file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
create_annotation_details.go
|
// Copyright (c) 2016, 2018, 2021, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
// DlsDataPlane API
//
// A description of the DlsDataPlane API.
//
package datalabelingservicedataplane
import (
"encoding/json"
"github.com/oracle/oci-go-sdk/v49/common"
)
// CreateAnnotationDetails This is the payload sent in the CreateAnnotation operation. It contains all the information required for a user to create an Annotation for a record.
type CreateAnnotationDetails struct {
// The OCID of the record annotated
RecordId *string `mandatory:"true" json:"recordId"`
// The OCID of the compartment for the annotation
CompartmentId *string `mandatory:"true" json:"compartmentId"`
// The entity types will be validated against the dataset to ensure consistency.
Entities []Entity `mandatory:"true" json:"entities"`
// Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
// Example: `{"bar-key": "value"}`
FreeformTags map[string]string `mandatory:"false" json:"freeformTags"`
// Defined tags for this resource. Each key is predefined and scoped to a namespace.
// Example: `{"foo-namespace": {"bar-key": "value"}}`
DefinedTags map[string]map[string]interface{} `mandatory:"false" json:"definedTags"`
}
func (m CreateAnnotationDetails) String() string {
return common.PointerString(m)
}
// UnmarshalJSON unmarshals from json
func (m *CreateAnnotationDetails) UnmarshalJSON(data []byte) (e error) {
model := struct {
FreeformTags map[string]string `json:"freeformTags"`
DefinedTags map[string]map[string]interface{} `json:"definedTags"`
RecordId *string `json:"recordId"`
CompartmentId *string `json:"compartmentId"`
Entities []entity `json:"entities"`
}{}
e = json.Unmarshal(data, &model)
if e != nil {
return
}
|
m.RecordId = model.RecordId
m.CompartmentId = model.CompartmentId
m.Entities = make([]Entity, len(model.Entities))
for i, n := range model.Entities {
nn, e = n.UnmarshalPolymorphicJSON(n.JsonData)
if e != nil {
return e
}
if nn != nil {
m.Entities[i] = nn.(Entity)
} else {
m.Entities[i] = nil
}
}
return
}
|
var nn interface{}
m.FreeformTags = model.FreeformTags
m.DefinedTags = model.DefinedTags
|
_tickprefix.py
|
import _plotly_utils.basevalidators
|
parent_name="scattergl.marker.colorbar",
**kwargs
):
super(TickprefixValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
|
class TickprefixValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name="tickprefix",
|
clear_paths.py
|
from __future__ import division
from libtbx.clear_paths \
import remove_or_rename_files_and_directories_if_possible
import sys
def run(args):
|
if (__name__ == "__main__"):
run(args=sys.argv[1:])
|
remaining = remove_or_rename_files_and_directories_if_possible(paths=args)
for path in remaining:
"WARNING: unable to remove or rename:", path
|
api.go
|
// Copyright 2020 John McKenzie
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"encoding/json"
"log"
"net/http"
"os/exec"
"strings"
"time"
)
const (
listenAddress = "0.0.0.0:4778"
version = "0.0.1"
)
// Widget represents a generic object.
type Widget struct {
ID string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
}
// WidgetHandler handles Widget requests.
type WidgetHandler struct {
widgets map[string]Widget
}
// NewWidgetHandler will construct a new WidgetHandler.
func NewWidgetHandler() WidgetHandler {
return WidgetHandler{
widgets: make(map[string]Widget, 0),
}
}
func (h WidgetHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
path := r.URL.EscapedPath()
id := strings.Replace(path, "/widgets/", "", 1)
log.Printf("path: %s method: %s id: %s", path, r.Method, id)
switch r.Method {
case http.MethodGet:
if len(id) > 0 {
h.get(w, r, id)
} else {
h.list(w, r)
}
return
case http.MethodPost:
if len(id) > 0 {
break
}
h.create(w, r)
return
case http.MethodPut:
if len(id) <= 0 {
break
}
h.update(w, r, id)
return
case http.MethodDelete:
if len(id) <= 0 {
break
}
h.delete(w, r, id)
return
default:
// default method not allowed...
}
writeJSONError(w, http.StatusMethodNotAllowed, "Method not allowed for this resource.")
}
func main() {
http.HandleFunc("/", index)
http.Handle("/widgets/", NewWidgetHandler())
log.Printf("listening for connections at %s", listenAddress)
log.Fatal(http.ListenAndServe(listenAddress, nil))
}
func index(w http.ResponseWriter, r *http.Request) {
log.Printf("URL Path: %s Method: %s", r.URL.Path, r.Method)
if r.URL.Path != "/" {
writeJSONError(w, http.StatusNotFound, "The requested resource could not be located.")
return
}
if r.Method != http.MethodOptions && r.Method != http.MethodGet {
writeJSONError(w, http.StatusMethodNotAllowed, "Method not allowed for this resource.")
return
}
payload := map[string]string{
"timestamp": time.Now().String(),
"version": version,
}
if err := writeJSON(w, http.StatusOK, payload); err != nil {
writeJSONError(w, http.StatusInternalServerError, err.Error())
}
}
func (h WidgetHandler) list(w http.ResponseWriter, r *http.Request) {
widgets := make([]Widget, 0)
for _, widget := range h.widgets {
widgets = append(widgets, widget)
}
payload := map[string]interface{}{
"widgets": widgets,
"count": len(widgets),
}
if err := writeJSON(w, http.StatusOK, payload); err != nil {
writeJSONError(w, http.StatusInternalServerError, err.Error())
}
}
func (h WidgetHandler) get(w http.ResponseWriter, r *http.Request, id string) {
widget, ok := h.widgets[id]
if !ok {
log.Printf("unable to find widget with id %s", id)
writeJSONError(w, http.StatusNotFound, "The requested resource could not be located.")
return
}
if err := writeJSON(w, http.StatusOK, map[string]Widget{"widget": widget}); err != nil {
writeJSONError(w, http.StatusInternalServerError, err.Error())
}
}
func (h WidgetHandler) create(w http.ResponseWriter, r *http.Request) {
var widget Widget
decoder := json.NewDecoder(r.Body)
if err := decoder.Decode(&widget); err != nil {
log.Printf("unable to parse widget %s", err)
writeJSONError(w, http.StatusBadRequest, err.Error())
return
}
uuid, err := exec.Command("uuidgen").Output()
if err != nil {
log.Printf("unable to generate uuid %x", err)
writeJSONError(w, http.StatusInternalServerError, err.Error())
return
}
widget.ID = strings.TrimSpace(string(uuid))
h.widgets[widget.ID] = widget
if err := writeJSON(w, http.StatusCreated, map[string]Widget{"widget": widget}); err != nil {
writeJSONError(w, http.StatusInternalServerError, err.Error())
}
}
func (h WidgetHandler) update(w http.ResponseWriter, r *http.Request, id string) {
widget, ok := h.widgets[id]
if !ok {
log.Printf("unable to find widget with id %s", id)
writeJSONError(w, http.StatusNotFound, "The requested resource could not be located.")
return
}
var updWidget Widget
decoder := json.NewDecoder(r.Body)
if err := decoder.Decode(&updWidget); err != nil {
log.Printf("unable to parse widget %s", err)
writeJSONError(w, http.StatusBadRequest, err.Error())
return
}
widget.Name = updWidget.Name
widget.Description = updWidget.Description
h.widgets[widget.ID] = widget
if err := writeJSON(w, http.StatusOK, map[string]Widget{"widget": widget}); err != nil {
writeJSONError(w, http.StatusInternalServerError, err.Error())
}
}
func (h WidgetHandler) delete(w http.ResponseWriter, r *http.Request, id string) {
widget, ok := h.widgets[id]
if !ok {
log.Printf("unable to find widget with id %s", id)
writeJSONError(w, http.StatusNotFound, "The requested resource could not be located.")
return
}
delete(h.widgets, id)
if err := writeJSON(w, http.StatusOK, map[string]Widget{"widget": widget}); err != nil {
writeJSONError(w, http.StatusInternalServerError, err.Error())
}
}
func writeJSON(w http.ResponseWriter, status int, payload interface{}) error {
log.Printf("writing json response code %d with payload %s", status, payload)
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(status)
return json.NewEncoder(w).Encode(payload)
}
func writeJSONError(w http.ResponseWriter, status int, message string) error
|
{
return writeJSON(w, status, map[string]string{
"error": message,
})
}
|
|
dmabuf_exporter.rs
|
use std::{fs::File, path::Path};
use log::error;
use v4l2r::{
device::Device,
ioctl::{self, ExpbufFlags},
memory::{DMABufHandle, MemoryType},
Format, QueueType,
};
use anyhow::Result;
pub fn export_dmabufs(
device_path: &Path,
queue: QueueType,
format: &Format,
nb_buffers: usize,
) -> Result<Vec<Vec<DMABufHandle<File>>>>
|
{
let mut device = Device::open(device_path, Default::default())?;
let set_format: Format = ioctl::s_fmt(&mut device, queue, format.clone()).unwrap();
if set_format != *format {
error!("Requested format does not apply as-is");
error!("Requested format: {:?}", format);
error!("Applied format: {:?}", format);
return Err(anyhow::anyhow!("Could not apply requested format"));
}
let nb_buffers: usize =
ioctl::reqbufs(&device, queue, MemoryType::MMAP, nb_buffers as u32).unwrap();
let fds: Vec<Vec<DMABufHandle<File>>> = (0..nb_buffers)
.into_iter()
.map(|buffer| {
(0..format.plane_fmt.len())
.into_iter()
.map(|plane| {
DMABufHandle::from(
ioctl::expbuf::<Device, File>(
&device,
queue,
buffer,
plane,
ExpbufFlags::RDWR,
)
.unwrap(),
)
})
.collect()
})
.collect();
// We can close the device now, the exported buffers will remain alive as
// long as they are referenced.
drop(device);
Ok(fds)
}
|
|
softplus.rs
|
use mli::*;
#[derive(Copy, Clone, Debug)]
pub struct Softplus;
fn sigmoid(n: f32) -> f32 {
(1.0 + (-n).exp()).recip()
}
fn softplus(n: f32) -> f32 {
(1.0 + n.exp()).ln()
}
impl Forward for Softplus {
type Input = f32;
type Internal = ();
type Output = f32;
fn forward(&self, &input: &f32) -> ((), f32) {
((), softplus(input))
}
}
impl Backward for Softplus {
type OutputDelta = f32;
type InputDelta = f32;
type TrainDelta = ();
fn backward(
&self,
&input: &f32,
_: &(),
&output_delta: &f32,
) -> (Self::InputDelta, Self::TrainDelta) {
(sigmoid(input) * output_delta, ())
}
}
impl Train for Softplus {
fn train(&mut self, _: &Self::TrainDelta)
|
}
|
{}
|
use-save-settings.tsx
|
import { RefObject, useCallback } from 'react';
import { useHistory } from 'react-router-dom';
import DocumentModel from '../model/document/document-model';
import LocalSettings from '../model/local/local-settings';
import useLogger from './useLogger';
/**
* A hook that manages a function to save local settings, based on refs to
* uncontrolled inputs.
*
* Upon saving, this also returns to the previous page in the React Router stack.
*
* @param model - the document model to update the author in the document, as well.
* @param nameInputRef - the ref to the name input
* @param emailInputRef - the ref to the email input
* @param gravatarInputRef - the ref to the Gravatar input
*
* @returns a function to save the current values
*
* @example
* ```tsx
* const ref1 = useRef<HTMLInputElement>(null);
* const ref2 = useRef<HTMLInputElement>(null);
* const ref3 = useRef<HTMLInputElement>(null);
*
* const save = useSaveSettings(
* documentModel,
* ref1,
* ref2,
* ref3
* )
*
* return <>
* <input ref={ref1} type="text" />
* <input ref={ref2} type="text" />
* <input ref={ref3} type="checkbox" />
* <button onClick={save}>Save</button>
* </>
* ```
*/
export function
|
(
model: DocumentModel,
nameInputRef: RefObject<HTMLInputElement>,
emailInputRef: RefObject<HTMLInputElement>,
gravatarInputRef: RefObject<HTMLInputElement>
): () => Promise<void> {
const logger = useLogger('useSafeSettings');
const history = useHistory();
return useCallback(async () => {
model.update(async (model: DocumentModel) => {
// Sanitize inputs
const res = {
name: nameInputRef.current?.value ?? '',
gravatarMail: emailInputRef.current?.value ?? '',
gravatar: gravatarInputRef.current?.checked ?? false
};
logger.info('Saving user settings', res);
// Save to local settings
await LocalSettings.setGravatar(res.gravatar);
await LocalSettings.setAuthor(
Object.assign(await LocalSettings.getAuthor(), res)
);
logger.success('Settings saved successfully');
// Change the author details in the document
const newAuthor = await LocalSettings.getAuthor();
model.authors[newAuthor.uuid] = newAuthor;
// Return to previous page
history.goBack();
return model;
});
}, [nameInputRef.current, emailInputRef.current, gravatarInputRef.current]);
}
|
useSaveSettings
|
createWebGlContext.js
|
define(
'spell/shared/util/platform/private/graphics/webgl/createWebGlContext',
[
'spell/shared/util/platform/private/graphics/StateStack',
'spell/shared/util/platform/private/graphics/webgl/createContext',
'spell/shared/util/platform/private/graphics/webgl/shaders',
'spell/shared/util/color',
'spell/shared/util/platform/private/nativeType/createFloatArray',
'spell/math/util',
'spell/math/vec2',
'spell/math/vec3',
'spell/math/mat3',
'spell/functions'
],
function(
StateStack,
createContext,
shaders,
color,
createFloatArray,
mathUtil,
vec2,
vec3,
mat3,
_
) {
'use strict'
/*
* private
*/
var gl, canvas,
stateStack = new StateStack( 32 ),
currentState = stateStack.getTop(),
NUM_CIRCLE_VERTICES = 32,
QUAD_VERTEX_OFFSET = 0,
CIRCLE_VERTEX_OFFSET = QUAD_VERTEX_OFFSET + 4,
LINE_VERTEX_OFFSET = CIRCLE_VERTEX_OFFSET + NUM_CIRCLE_VERTICES,
vertices = createFloatArray( ( LINE_VERTEX_OFFSET + 2 ) * 2 ),
positionVertexBuffer
var screenSpaceShimMatrix = mat3.create()
// view space to screen space transformation matrix
var viewToScreen = mat3.create()
mat3.identity( viewToScreen )
// world space to view space transformation matrix
var worldToView = mat3.create()
mat3.identity( worldToView )
// accumulated transformation world space to screen space transformation matrix
var worldToScreen = mat3.create()
mat3.identity( worldToScreen )
var screenToWorld = mat3.create()
mat3.identity( screenToWorld )
var tmpMatrix = mat3.create(),
defaultTextureMatrix = mat3.create()
mat3.identity( defaultTextureMatrix )
/*
* Creates a projection matrix that normalizes the transformation behaviour to that of the normalized canvas-2d (that is origin is in bottom left,
* positive x-axis to the right, positive y-axis up, screen space coordinates as input. The matrix transforms from screen space to clip space.
*
* @param width
* @param height
* @param matrix
*/
var createScreenSpaceShimMatrix = function( width, height, matrix ) {
mathUtil.mat3Ortho(
matrix,
0,
width,
0,
height
)
return matrix
}
var createViewToScreenMatrix = function( width, height, matrix ) {
mat3.identity( matrix )
matrix[ 0 ] = width * 0.5
matrix[ 4 ] = height * 0.5
matrix[ 6 ] = width * 0.5
matrix[ 7 ] = height * 0.5
return matrix
}
var initWrapperContext = function( shaderProgram ) {
viewport( shaderProgram, 0, 0, gl.canvas.width, gl.canvas.height )
// gl initialization
gl.clearColor( 0.0, 0.0, 0.0, 1.0 )
gl.clear( gl.COLOR_BUFFER_BIT )
// setting up blending
gl.enable( gl.BLEND )
gl.blendFunc( gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA )
gl.disable( gl.DEPTH_TEST )
gl.activeTexture( gl.TEXTURE0 )
}
/*
* Creates a wrapper context for the backend context.
*/
var createWrapperContext = function() {
var shaderProgram = createShaderProgram()
initWrapperContext( shaderProgram )
return {
clear : clear,
createTexture : createWebGlTexture,
drawTexture : _.bind( drawTexture, null, shaderProgram ),
drawSubTexture : _.bind( drawSubTexture, null, shaderProgram ),
drawRect : _.bind( drawRect, null, shaderProgram ),
drawCircle : _.bind( drawCircle, null, shaderProgram ),
drawLine : _.bind( drawLine, null, shaderProgram ),
fillRect : _.bind( fillRect, null, shaderProgram ),
getConfiguration : getConfiguration,
resizeColorBuffer : resizeColorBuffer,
restore : restore,
rotate : rotate,
save : save,
scale : scale,
setClearColor : setClearColor,
setColor : setColor,
setLineColor : setLineColor,
setGlobalAlpha : setGlobalAlpha,
setTransform : setTransform,
setViewMatrix : setViewMatrix,
transform : transform,
translate : translate,
viewport : _.bind( viewport, null, shaderProgram ),
transformScreenToWorld : transformScreenToWorld,
getCanvasElement : function() { return canvas },
flush : function() {}
}
}
/*
* Returns a rendering context. Once a context has been created additional calls to this method return the same context instance.
*
* @param canvas - the canvas dom element
*/
var createWebGlContext = function( canvasObj ) {
if( canvasObj === undefined ) throw 'Missing first argument.'
if( gl !== undefined ) return gl
canvas = canvasObj
gl = createContext( canvas )
if( gl === null ) return null
return createWrapperContext()
}
var createShaderProgram = function() {
var shaderProgram = gl.createProgram()
var vertexShader = gl.createShader( gl.VERTEX_SHADER )
gl.shaderSource( vertexShader, shaders.vertex )
gl.compileShader (vertexShader )
gl.attachShader( shaderProgram, vertexShader )
var fragmentShader = gl.createShader( gl.FRAGMENT_SHADER )
gl.shaderSource( fragmentShader, shaders.fragment )
gl.compileShader( fragmentShader )
gl.attachShader( shaderProgram, fragmentShader )
gl.linkProgram( shaderProgram )
gl.useProgram( shaderProgram )
// storing the attribute and uniform locations
shaderProgram.aVertexPosition = gl.getAttribLocation( shaderProgram, 'aVertexPosition' )
shaderProgram.uScreenSpaceShimMatrix = gl.getUniformLocation( shaderProgram, 'uScreenSpaceShimMatrix' )
shaderProgram.uTextureMatrix = gl.getUniformLocation( shaderProgram, 'uTextureMatrix' )
shaderProgram.uFillRect = gl.getUniformLocation( shaderProgram, 'uFillRect' )
shaderProgram.uGlobalAlpha = gl.getUniformLocation( shaderProgram, 'uGlobalAlpha' )
shaderProgram.uGlobalColor = gl.getUniformLocation( shaderProgram, 'uGlobalColor' )
shaderProgram.uTexture0 = gl.getUniformLocation( shaderProgram, 'uTexture0' )
shaderProgram.uModelViewMatrix = gl.getUniformLocation( shaderProgram, 'uModelViewMatrix' )
// setting up vertices
var angleStep = Math.PI * 2 / NUM_CIRCLE_VERTICES
// quad
vertices[ QUAD_VERTEX_OFFSET * 2 + 0 ] = 0.0
vertices[ QUAD_VERTEX_OFFSET * 2 + 1 ] = 0.0
vertices[ QUAD_VERTEX_OFFSET * 2 + 2 ] = 1.0
vertices[ QUAD_VERTEX_OFFSET * 2 + 3 ] = 0.0
vertices[ QUAD_VERTEX_OFFSET * 2 + 4 ] = 1.0
vertices[ QUAD_VERTEX_OFFSET * 2 + 5 ] = 1.0
vertices[ QUAD_VERTEX_OFFSET * 2 + 6 ] = 0.0
vertices[ QUAD_VERTEX_OFFSET * 2 + 7 ] = 1.0
// circle
for( var i = 0; i < NUM_CIRCLE_VERTICES; i++ ) {
var angle = angleStep * i
vertices[ CIRCLE_VERTEX_OFFSET * 2 + i * 2 ] = Math.sin( angle )
vertices[ CIRCLE_VERTEX_OFFSET * 2 + i * 2 + 1 ] = Math.cos( angle )
}
// line
// These vertices are stubs and get overwritten once the drawLine function is called.
vertices[ LINE_VERTEX_OFFSET * 2 + 0 ] = 0.0
vertices[ LINE_VERTEX_OFFSET * 2 + 1 ] = 0.0
vertices[ LINE_VERTEX_OFFSET * 2 + 2 ] = 0.0
vertices[ LINE_VERTEX_OFFSET * 2 + 3 ] = 0.0
positionVertexBuffer = gl.createBuffer()
gl.bindBuffer( gl.ARRAY_BUFFER, positionVertexBuffer )
gl.bufferData( gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW )
gl.vertexAttribPointer( shaderProgram.aVertexPosition, 2, gl.FLOAT, false, 0, 0 )
gl.enableVertexAttribArray( shaderProgram.aVertexPosition )
// setting up screen space shim matrix
gl.uniformMatrix3fv( shaderProgram.uScreenSpaceShimMatrix, false, screenSpaceShimMatrix )
// setting up texture matrix
setTextureMatrix( shaderProgram, defaultTextureMatrix )
return shaderProgram
}
var updateTextureMatrix = function( shaderProgram, ss, st, tt, ts, matrix ) {
mat3.identity( matrix )
matrix[ 0 ] = ss
matrix[ 4 ] = st
matrix[ 6 ] = tt
matrix[ 7 ] = ts
gl.uniformMatrix3fv( shaderProgram.uTextureMatrix, false, matrix )
}
var setTextureMatrix = function( shaderProgram, textureMatrix ) {
gl.uniformMatrix3fv( shaderProgram.uTextureMatrix, false, textureMatrix )
}
/*
* public
*/
var transformScreenToWorld = function( vec ) {
// transform vec to a gl-like origin (bottom left)
// use worldPosition as temp because we need to allocate it anyway
var worldPosition = vec2.clone( vec )
worldPosition[ 1 ] = gl.canvas.height - worldPosition[ 1 ]
vec2.transformMat3( worldPosition, worldPosition, screenToWorld )
return worldPosition
}
var save = function() {
stateStack.pushState()
currentState = stateStack.getTop()
}
var restore = function() {
stateStack.popState()
currentState = stateStack.getTop()
setViewMatrix( currentState.viewMatrix )
}
var setColor = function( vec ) {
currentState.color = color.createRgba( vec )
}
var setLineColor = function( vec ) {
currentState.lineColor = color.createRgba( vec )
}
var setGlobalAlpha = function( u ) {
currentState.opacity = u
|
}
var scale = function( vec ) {
mat3.scale( currentState.matrix, currentState.matrix, vec )
}
var translate = function( vec ) {
mat3.translate( currentState.matrix, currentState.matrix, vec )
}
var rotate = function( u ) {
mat3.rotate( currentState.matrix, currentState.matrix, u )
}
/*
* Clears the color buffer with the clear color
*/
var clear = function() {
gl.clear( gl.COLOR_BUFFER_BIT )
}
var drawTexture = function( shaderProgram, texture, destinationPosition, destinationDimensions, textureMatrix ) {
if( texture === undefined ) throw 'Texture is undefined'
// setting up fillRect mode
gl.uniform1i( shaderProgram.uFillRect, 0 )
// setting up global alpha
gl.uniform1f( shaderProgram.uGlobalAlpha, currentState.opacity )
// setting up global color
gl.uniform4fv( shaderProgram.uGlobalColor, currentState.color )
// setting up texture
gl.bindTexture( gl.TEXTURE_2D, texture.privateGlTextureResource )
gl.uniform1i( shaderProgram.uTexture0, 0 )
// setting up transformation
mat3.multiply( tmpMatrix, worldToScreen, currentState.matrix )
// rotating the image so that it is not upside down
mat3.translate( tmpMatrix, tmpMatrix, destinationPosition )
mat3.rotate( tmpMatrix, tmpMatrix, Math.PI )
mat3.scale( tmpMatrix, tmpMatrix, [ -1.0, 1.0 ] )
mat3.scale( tmpMatrix, tmpMatrix, destinationDimensions )
mat3.translate( tmpMatrix, tmpMatrix, [ 0.0, -1.0 ] )
gl.uniformMatrix3fv( shaderProgram.uModelViewMatrix, false, tmpMatrix )
setTextureMatrix(
shaderProgram,
textureMatrix ?
textureMatrix :
defaultTextureMatrix
)
gl.drawArrays( gl.TRIANGLE_FAN, QUAD_VERTEX_OFFSET, 4 )
}
var drawSubTexture = function( shaderProgram, texture, sourcePosition, sourceDimensions, destinationPosition, destinationDimensions ) {
if( texture === undefined ) throw 'Texture is undefined'
// setting up fillRect mode
gl.uniform1i( shaderProgram.uFillRect, 0 )
// setting up global alpha
gl.uniform1f( shaderProgram.uGlobalAlpha, currentState.opacity )
// setting up global color
gl.uniform4fv( shaderProgram.uGlobalColor, currentState.color )
// setting up texture
gl.bindTexture( gl.TEXTURE_2D, texture.privateGlTextureResource )
gl.uniform1i( shaderProgram.uTexture0, 0 )
// setting up transformation
mat3.multiply( tmpMatrix, worldToScreen, currentState.matrix )
// rotating the image so that it is not upside down
mat3.translate( tmpMatrix, tmpMatrix, destinationPosition )
mat3.rotate( tmpMatrix, tmpMatrix, Math.PI )
mat3.scale( tmpMatrix, tmpMatrix, [ -1.0, 1.0 ] )
mat3.scale( tmpMatrix, tmpMatrix, destinationDimensions )
mat3.translate( tmpMatrix, tmpMatrix, [ 0.0, -1.0 ] )
gl.uniformMatrix3fv( shaderProgram.uModelViewMatrix, false, tmpMatrix )
// setting up the texture matrix
var tw = texture.dimensions[ 0 ],
th = texture.dimensions[ 1 ]
updateTextureMatrix(
shaderProgram,
( sourceDimensions[ 0 ] - 1 ) / tw,
( sourceDimensions[ 1 ] - 1 ) / th,
( sourcePosition[ 0 ] + 0.5 ) / tw,
( sourcePosition[ 1 ] + 0.5 ) / th,
tmpMatrix
)
gl.drawArrays( gl.TRIANGLE_FAN, QUAD_VERTEX_OFFSET, 4 )
}
var drawRect = function( shaderProgram, dx, dy, dw, dh, lineWidth ) {
if( !lineWidth ) lineWidth = 1
gl.lineWidth( lineWidth )
// setting up fillRect mode
gl.uniform1i( shaderProgram.uFillRect, 1 )
// setting up global alpha
gl.uniform1f( shaderProgram.uGlobalAlpha, currentState.opacity )
// setting up global color
gl.uniform4fv( shaderProgram.uGlobalColor, currentState.lineColor )
// setting up transformation
mat3.multiply( tmpMatrix, worldToScreen, currentState.matrix )
// correcting position
mat3.translate( tmpMatrix, tmpMatrix, [ dx, dy ] )
mat3.scale( tmpMatrix, tmpMatrix, [ dw, dh ] )
gl.uniformMatrix3fv( shaderProgram.uModelViewMatrix, false, tmpMatrix )
gl.drawArrays( gl.LINE_LOOP, QUAD_VERTEX_OFFSET, 4 )
}
var drawCircle = function( shaderProgram, dx, dy, radius, lineWidth ) {
if( !lineWidth ) lineWidth = 1
gl.lineWidth( lineWidth )
// setting up fillRect mode
gl.uniform1i( shaderProgram.uFillRect, 1 )
// setting up global alpha
gl.uniform1f( shaderProgram.uGlobalAlpha, currentState.opacity )
// setting up global color
gl.uniform4fv( shaderProgram.uGlobalColor, currentState.lineColor )
// setting up transformation
mat3.multiply( tmpMatrix, worldToScreen, currentState.matrix )
// correcting position
mat3.translate( tmpMatrix, tmpMatrix, [ dx, dy ] )
mat3.scale( tmpMatrix, tmpMatrix, [ radius, radius ] )
gl.uniformMatrix3fv( shaderProgram.uModelViewMatrix, false, tmpMatrix )
gl.drawArrays( gl.LINE_LOOP, CIRCLE_VERTEX_OFFSET, NUM_CIRCLE_VERTICES )
}
var drawLine = function( shaderProgram, ax, ay, bx, by, lineWidth ) {
if( !lineWidth ) lineWidth = 1
gl.lineWidth( lineWidth )
// setting up fillRect mode
gl.uniform1i( shaderProgram.uFillRect, 1 )
// setting up global alpha
gl.uniform1f( shaderProgram.uGlobalAlpha, currentState.opacity )
// setting up global color
gl.uniform4fv( shaderProgram.uGlobalColor, currentState.lineColor )
// setting up transformation
mat3.multiply( tmpMatrix, worldToScreen, currentState.matrix )
gl.uniformMatrix3fv( shaderProgram.uModelViewMatrix, false, tmpMatrix )
// line
vertices[ LINE_VERTEX_OFFSET * 2 + 0 ] = ax
vertices[ LINE_VERTEX_OFFSET * 2 + 1 ] = ay
vertices[ LINE_VERTEX_OFFSET * 2 + 2 ] = bx
vertices[ LINE_VERTEX_OFFSET * 2 + 3 ] = by
gl.bufferData( gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW )
gl.drawArrays( gl.LINES, LINE_VERTEX_OFFSET, 2 )
}
var fillRect = function( shaderProgram, dx, dy, dw, dh ) {
// setting up fillRect mode
gl.uniform1i( shaderProgram.uFillRect, 1 )
// setting up global alpha
gl.uniform1f( shaderProgram.uGlobalAlpha, currentState.opacity )
// setting up global color
gl.uniform4fv( shaderProgram.uGlobalColor, currentState.color )
// setting up transformation
mat3.multiply( tmpMatrix, worldToScreen, currentState.matrix )
// correcting position
mat3.translate( tmpMatrix, tmpMatrix, [ dx, dy ] )
mat3.scale( tmpMatrix, tmpMatrix, [ dw, dh ] )
gl.uniformMatrix3fv( shaderProgram.uModelViewMatrix, false, tmpMatrix )
gl.drawArrays( gl.TRIANGLE_FAN, QUAD_VERTEX_OFFSET, 4 )
}
var resizeColorBuffer = function( width, height ) {
gl.canvas.width = width
gl.canvas.height = height
createViewToScreenMatrix( width, height, viewToScreen )
mat3.multiply( worldToScreen, viewToScreen, worldToView )
}
var transform = function( matrix ) {
mat3.multiply( currentState.matrix, currentState.matrix, matrix )
}
var setTransform = function( matrix ) {
mat3.copy( currentState.matrix, matrix )
}
var setViewMatrix = function( matrix ) {
mat3.copy( currentState.viewMatrix, matrix )
mat3.copy( worldToView, matrix )
createViewToScreenMatrix( gl.canvas.width, gl.canvas.height, viewToScreen )
mat3.multiply( worldToScreen, viewToScreen, worldToView )
mat3.invert( screenToWorld, worldToScreen )
}
var viewport = function( shaderProgram, x, y, width, height ) {
gl.viewport( x, y , width, height )
// reinitialize screen space shim matrix
createScreenSpaceShimMatrix( width, height, screenSpaceShimMatrix )
gl.uniformMatrix3fv( shaderProgram.uScreenSpaceShimMatrix, false, screenSpaceShimMatrix )
}
/*
* Returns an object describing the current configuration of the rendering backend.
*/
var getConfiguration = function() {
var info = gl.getParameter( gl.VENDOR ) + ';' +
gl.getParameter( gl.RENDERER ) + ';' +
gl.getParameter( gl.VERSION ) + ';' +
gl.getParameter( gl.SHADING_LANGUAGE_VERSION )
return {
type : 'webgl',
width : gl.canvas.width,
height : gl.canvas.height,
info : info
}
}
var isPowerOfTwo = function( number ) {
var magnitude = ( Math.log( number ) / Math.log( 2 ) )
return magnitude === parseInt( magnitude, 10 )
}
/*
* Returns instance of texture class
*
* The public interface of the texture class consists of the two attributes width and height.
*
* @param image
*/
var createWebGlTexture = function( image ) {
var isPowerOfTwoTexture = isPowerOfTwo( image.width ) && isPowerOfTwo( image.height )
var texture = gl.createTexture()
gl.bindTexture( gl.TEXTURE_2D, texture )
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image )
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR )
if( isPowerOfTwoTexture ) {
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT )
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT )
gl.generateMipmap( gl.TEXTURE_2D )
} else {
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE )
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE )
}
gl.bindTexture( gl.TEXTURE_2D, null )
return {
/*
* Public
*/
dimensions : [ image.width, image.height ],
/*
* Private
*
* This is an implementation detail of the class. If you write code that depends on this you better know what you are doing.
*/
privateGlTextureResource : texture
}
}
return createWebGlContext
}
)
|
}
var setClearColor = function( vec ) {
gl.clearColor( vec[ 0 ], vec[ 1 ], vec[ 2 ], 1.0 )
|
aws.go
|
package boshcli
import (
"github.com/EngineerBetter/control-tower/resource"
"github.com/EngineerBetter/control-tower/util"
"github.com/EngineerBetter/control-tower/util/yaml"
)
// Environment holds all the parameters AWS IAAS needs
type AWSEnvironment struct {
AccessKeyID string
ATCSecurityGroup string
AZ string
BlobstoreBucket string
CustomOperations string
DBCACert string
DBHost string
DBName string
DBPassword string
DBPort string
DBUsername string
DefaultKeyName string
DefaultSecurityGroups []string
ExternalIP string
InternalCIDR string
InternalGateway string
InternalIP string
PrivateCIDR string
PrivateCIDRGateway string
PrivateCIDRReserved string
PrivateKey string
PrivateSubnetID string
PublicCIDR string
PublicCIDRGateway string
PublicCIDRReserved string
PublicCIDRStatic string
PublicSubnetID string
Region string
S3AWSAccessKeyID string
S3AWSSecretAccessKey string
SecretAccessKey string
Spot bool
VersionFile []byte
VMSecurityGroup string
WorkerType string
}
func (e AWSEnvironment) ExtractBOSHandBPM() (util.Resource, util.Resource, error) {
resources := util.ParseVersionResources(e.VersionFile)
boshRelease := util.GetResource("bosh", resources)
bpmRelease := util.GetResource("bpm", resources)
return boshRelease, bpmRelease, nil
}
// ConfigureDirectorManifestCPI interpolates all the Environment parameters and
// required release versions into ready to use Director manifest
func (e AWSEnvironment) ConfigureDirectorManifestCPI() (string, error) {
resources := util.ParseVersionResources(e.VersionFile)
cpiResource := util.GetResource("cpi", resources)
stemcellResource := util.GetResource("stemcell", resources)
var allOperations = resource.AWSCPIOps + resource.AWSExternalIPOps + resource.AWSBlobstoreOps + resource.AWSDirectorCustomOps
return yaml.Interpolate(resource.DirectorManifest, allOperations+e.CustomOperations, map[string]interface{}{
"cpi_url": cpiResource.URL,
"cpi_version": cpiResource.Version,
"cpi_sha1": cpiResource.SHA1,
"stemcell_url": stemcellResource.URL,
"stemcell_sha1": stemcellResource.SHA1,
"internal_cidr": e.InternalCIDR,
"internal_gw": e.InternalGateway,
"internal_ip": e.InternalIP,
"access_key_id": e.AccessKeyID,
"secret_access_key": e.SecretAccessKey,
"region": e.Region,
"az": e.AZ,
"default_key_name": e.DefaultKeyName,
"default_security_groups": e.DefaultSecurityGroups,
"private_key": e.PrivateKey,
"subnet_id": e.PublicSubnetID,
"external_ip": e.ExternalIP,
"blobstore_bucket": e.BlobstoreBucket,
"db_ca_cert": e.DBCACert,
"db_host": e.DBHost,
"db_name": e.DBName,
"db_password": e.DBPassword,
"db_port": e.DBPort,
"db_username": e.DBUsername,
"s3_aws_access_key_id": e.S3AWSAccessKeyID,
"s3_aws_secret_access_key": e.S3AWSSecretAccessKey,
})
}
type awsCloudConfigParams struct {
ATCSecurityGroupID string
AvailabilityZone string
PrivateSubnetID string
PublicSubnetID string
Spot bool
VMsSecurityGroupID string
WorkerType string
PublicCIDR string
PublicCIDRStatic string
PublicCIDRReserved string
PublicCIDRGateway string
PrivateCIDR string
PrivateCIDRGateway string
PrivateCIDRReserved string
}
// ConfigureDirectorCloudConfig inserts values from the environment into the config template passed as argument
func (e AWSEnvironment) ConfigureDirectorCloudConfig() (string, error) {
templateParams := awsCloudConfigParams{
AvailabilityZone: e.AZ,
VMsSecurityGroupID: e.VMSecurityGroup,
ATCSecurityGroupID: e.ATCSecurityGroup,
PublicSubnetID: e.PublicSubnetID,
PrivateSubnetID: e.PrivateSubnetID,
Spot: e.Spot,
WorkerType: e.WorkerType,
PublicCIDR: e.PublicCIDR,
PublicCIDRGateway: e.PublicCIDRGateway,
PublicCIDRReserved: e.PublicCIDRReserved,
PublicCIDRStatic: e.PublicCIDRStatic,
PrivateCIDR: e.PrivateCIDR,
PrivateCIDRGateway: e.PrivateCIDRGateway,
PrivateCIDRReserved: e.PrivateCIDRReserved,
}
cc, err := util.RenderTemplate("cloud-config", resource.AWSDirectorCloudConfig, templateParams)
if cc == nil
|
return string(cc), err
}
func (e AWSEnvironment) ConcourseStemcellURL() (string, error) {
return concourseStemcellURL(resource.AWSReleaseVersions, "https://storage.googleapis.com/bosh-aws-light-stemcells/%s/light-bosh-stemcell-%s-aws-xen-hvm-ubuntu-bionic-go_agent.tgz")
}
|
{
return "", err
}
|
DLCoursera_part1_week4_1.py
|
import numpy as np
from dnn_utils import sigmoid,sigmoid_backward,relu,relu_backward
def initialize_two_layer(n_x,n_h,n_y):
W1 = np.random.randn(n_h,n_x) * 0.01
b1 = np.zeros(n_h,1)
W2 = np.random.randn(n_y,n_h) * 0.01
b2 = np.zeros(n_y,1)
param = {"W1":W1,"b1":b1,"W2":W2,"b2":b2}
return param
def initialize_l_layer(layer_dims):
param = {}
L = len(layer_dims)
for l in range(1, L):
param['W' + str(l)] = np.random.randn(layer_dims[l],layer_dims[l-1]) * 0.01
param['b' + str(l)] = np.zeros(layer_dims[l],1)
return param
def linear_forward(W,A,b):
"""
Implement the linear part of neural unit
"""
Z = np.dot(W,A) + b
return Z
def linear_activation_forward(A_pre,W,b,activation):
"""
Implement neural unit with the activation of Relu or sigmoid
"""
if activation == "Relu":
Z = linear_forward(W,A_pre,b)
A,activation_cache = relu(Z)
elif activation == "sigmoid":
Z = linear_forward(W,A_pre,b)
A,activation_cache = sigmoid(Z)
backward_used_cache = (A_pre,W,b)
cache = (backward_used_cache,activation_cache)
return A,cache
def L_model_forward(X,param):
"""
Implement forward propagation for L layers model
"""
caches = []
L = len(param) // 2
A = X
for l in range(1,L):
|
A,cache = linear_activation_forward(A,param['W'+str(l)],param['b'+str(l)],Relu)
caches.append(cache)
Al,cache = linear_activation_forward(A,param['W'+str(l)],param['b'+str(l)],Relu)
caches.append(cache)
return Al,caches
def linear_backward(dz,cache):
"""
Implement the backward propagation of linear part
"""
m = dz.shape[1]
dw = np.dot(dz,cache[0]) / m
db = np.sum(dz) / m
dA_pre = np.dot(cache[1],dz)
return dw,db,dA_pre
def linear_activation_backward(dA,cache,activation):
"""
Implement the backward propagation of neural unit
"""
if activation == "Relu":
dz = relu_backward(dA,cache[1])
elif activation == "sigmoid":
dz = sigmoid_backward(dA,cache[1])
dw,db,dA_pre = linear_backward(dz,cache[0])
return dw,db,dA_pre
def L_model_backward(AL,Y,caches):
"""
Implement the backward propagation for L layer model
"""
grads = {}
L = len(caches)
dAl = - (np.divide(Y,AL) - np.divide(1-Y,1-AL))
grads['dw'+str(L)],grads['db'+str(L)],grads['dA'+str(L)] = linear_activation_backward(dAL,caches[-1],"sigmoid")
for l in reversed(range(L-1)):
cache = caches[l]
grads['dw'+str(l+1)],grads['db'+str(l+1)],grads['dA'+str(l+1)] = linear_activation_backward(grads['dA'+str(l+2)],
cache,"Relu")
return grads
def update_param(param,grads,learning_rate):
"""
Update the parameters
"""
L = len(param) // 2
for l in range(L):
param['W'+str(l+1)] = param['W'+str(l+1)] - learning_rate * grads['W'+str(l+1)]
param['b'+str(l+1)] = param['b'+str(l+1)] - learning_rate * grads['b'+str(l+1)]
return param
| |
fr.py
|
from itertools import islice
from lxml import etree
from urllib import parse
MAIN_PAGE = "Wikiquote:Accueil"
def extract_quotes(tree, max_quotes):
# French wiki uses a "citation" HTML class
node_list = tree.xpath('//div[@class="citation"]')
quotes = list(islice((span.text_content()
for span in node_list),
max_quotes))
return quotes
def extract_quotes_and_authors(tree):
WAIT_FOR_h3 = 0
WAIT_FOR_a = 1
WAIT_FOR_a_DEDICATED_PAGE = 2
state = WAIT_FOR_h3
current_character = None
found_quotes = {}
for element in tree.iter():
if state == WAIT_FOR_h3:
if element.tag == "h3":
current_character = None
state = WAIT_FOR_a
continue
if element.tag == "div" and "class" in element.attrib and element.attrib["class"] == "citation":
if current_character is not None:
found_quotes[element.text] = current_character
continue
if element.tag == "i" and element.text is not None and "Voir le recueil de citations :" in element.text:
state = WAIT_FOR_a_DEDICATED_PAGE
continue
if state == WAIT_FOR_a:
if element.tag == "a" and "class" in element.attrib and element.attrib["class"] == "extiw":
current_character = element.text
state = WAIT_FOR_h3
continue
|
if element.tag == "a":
link = element.attrib["href"]
link = link.split("/")
link = link[-2] + "/" + link[-1]
link = parse.unquote(link)
import wikiquote.quotes
dedicated_quotes = wikiquote.quotes(link, max_quotes=1000, lang='fr')
for q in dedicated_quotes:
if current_character is not None:
found_quotes[q] = current_character
current_character = None
state = WAIT_FOR_h3
continue
return found_quotes
# author_nodes = tree.xpath('//h3/span[@class="mw-headline"]')
# print(author_nodes)
# authors = list(islice((span.text_content()
# for span in author_nodes),
# max_quotes))
# print(authors)
def qotd(html_tree):
tree = html_tree.get_element_by_id('mf-cdj')
tree = tree.xpath('div/div')[1].xpath('table/tr/td')[1]
quote = tree.xpath('div/i')[0].text_content().replace(u'\xa0', u' ')
author = tree.xpath('div/a')[0].text_content()
return quote, author
|
if state == WAIT_FOR_a_DEDICATED_PAGE:
|
pdf_print_service.js
|
/* Copyright 2016 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { CSS_UNITS, NullL10n } from './ui_utils';
import { PDFPrintServiceFactory, PDFViewerApplication } from './app';
import { PDFJS } from 'pdfjs-lib';
let activeService = null;
let overlayManager = null;
// Renders the page to the canvas of the given print service, and returns
// the suggested dimensions of the output page.
function renderPage(activeServiceOnEntry, pdfDocument, pageNumber, size) {
let scratchCanvas = activeService.scratchCanvas;
// The size of the canvas in pixels for printing.
const PRINT_RESOLUTION = 150;
const PRINT_UNITS = PRINT_RESOLUTION / 72.0;
scratchCanvas.width = Math.floor(size.width * PRINT_UNITS);
scratchCanvas.height = Math.floor(size.height * PRINT_UNITS);
// The physical size of the img as specified by the PDF document.
let width = Math.floor(size.width * CSS_UNITS) + 'px';
let height = Math.floor(size.height * CSS_UNITS) + 'px';
let ctx = scratchCanvas.getContext('2d');
ctx.save();
ctx.fillStyle = 'rgb(255, 255, 255)';
ctx.fillRect(0, 0, scratchCanvas.width, scratchCanvas.height);
ctx.restore();
return pdfDocument.getPage(pageNumber).then(function(pdfPage) {
let renderContext = {
canvasContext: ctx,
transform: [PRINT_UNITS, 0, 0, PRINT_UNITS, 0, 0],
viewport: pdfPage.getViewport(1, size.rotation),
intent: 'print',
};
return pdfPage.render(renderContext).promise;
}).then(function() {
return {
width,
height,
};
});
}
function PDFPrintService(pdfDocument, pagesOverview, printContainer, l10n) {
this.pdfDocument = pdfDocument;
this.pagesOverview = pagesOverview;
this.printContainer = printContainer;
this.l10n = l10n || NullL10n;
this.currentPage = -1;
// The temporary canvas where renderPage paints one page at a time.
this.scratchCanvas = document.createElement('canvas');
}
PDFPrintService.prototype = {
layout() {
this.throwIfInactive();
let body = document.querySelector('body');
body.setAttribute('data-pdfjsprinting', true);
let hasEqualPageSizes = this.pagesOverview.every(function(size) {
return size.width === this.pagesOverview[0].width &&
size.height === this.pagesOverview[0].height;
}, this);
if (!hasEqualPageSizes) {
console.warn('Not all pages have the same size. The printed ' +
'result may be incorrect!');
}
// Insert a @page + size rule to make sure that the page size is correctly
// set. Note that we assume that all pages have the same size, because
// variable-size pages are not supported yet (e.g. in Chrome & Firefox).
// TODO(robwu): Use named pages when size calculation bugs get resolved
// (e.g. https://crbug.com/355116) AND when support for named pages is
// added (http://www.w3.org/TR/css3-page/#using-named-pages).
// In browsers where @page + size is not supported (such as Firefox,
// https://bugzil.la/851441), the next stylesheet will be ignored and the
// user has to select the correct paper size in the UI if wanted.
this.pageStyleSheet = document.createElement('style');
let pageSize = this.pagesOverview[0];
this.pageStyleSheet.textContent =
// "size:<width> <height>" is what we need. But also add "A4" because
// Firefox incorrectly reports support for the other value.
'@supports ((size:A4) and (size:1pt 1pt)) {' +
'@page { size: ' + pageSize.width + 'pt ' + pageSize.height + 'pt;}' +
'}';
body.appendChild(this.pageStyleSheet);
},
destroy() {
if (activeService !== this) {
// |activeService| cannot be replaced without calling destroy() first,
// so if it differs then an external consumer has a stale reference to
// us.
return;
}
this.printContainer.textContent = '';
if (this.pageStyleSheet && this.pageStyleSheet.parentNode) {
this.pageStyleSheet.parentNode.removeChild(this.pageStyleSheet);
this.pageStyleSheet = null;
}
this.scratchCanvas.width = this.scratchCanvas.height = 0;
this.scratchCanvas = null;
activeService = null;
ensureOverlay().then(function() {
if (overlayManager.active !== 'printServiceOverlay') {
return; // overlay was already closed
}
overlayManager.close('printServiceOverlay');
});
},
renderPages() {
let pageCount = this.pagesOverview.length;
let renderNextPage = (resolve, reject) => {
this.throwIfInactive();
if (++this.currentPage >= pageCount) {
renderProgress(pageCount, pageCount, this.l10n);
resolve();
return;
}
let index = this.currentPage;
renderProgress(index, pageCount, this.l10n);
renderPage(this, this.pdfDocument, index + 1, this.pagesOverview[index])
.then(this.useRenderedPage.bind(this))
.then(function() {
renderNextPage(resolve, reject);
}, reject);
};
return new Promise(renderNextPage);
},
useRenderedPage(printItem) {
this.throwIfInactive();
let img = document.createElement('img');
img.style.width = printItem.width;
img.style.height = printItem.height;
let scratchCanvas = this.scratchCanvas;
if (('toBlob' in scratchCanvas) && !PDFJS.disableCreateObjectURL) {
scratchCanvas.toBlob(function(blob) {
img.src = URL.createObjectURL(blob);
});
} else {
img.src = scratchCanvas.toDataURL();
}
let wrapper = document.createElement('div');
wrapper.appendChild(img);
this.printContainer.appendChild(wrapper);
return new Promise(function(resolve, reject) {
img.onload = resolve;
img.onerror = reject;
});
},
performPrint() {
this.throwIfInactive();
return new Promise((resolve) => {
// Push window.print in the macrotask queue to avoid being affected by
// the deprecation of running print() code in a microtask, see
// https://github.com/mozilla/pdf.js/issues/7547.
setTimeout(() => {
if (!this.active) {
resolve();
return;
}
print.call(window);
// Delay promise resolution in case print() was not synchronous.
setTimeout(resolve, 20); // Tidy-up.
}, 0);
});
},
get active() {
return this === activeService;
},
throwIfInactive() {
if (!this.active) {
throw new Error('This print request was cancelled or completed.');
}
},
};
let print = window.print;
window.print = function print() {
if (activeService) {
console.warn('Ignored window.print() because of a pending print job.');
return;
}
ensureOverlay().then(function() {
if (activeService) {
overlayManager.open('printServiceOverlay');
}
});
try {
dispatchEvent('beforeprint');
} finally {
if (!activeService) {
console.error('Expected print service to be initialized.');
ensureOverlay().then(function() {
if (overlayManager.active === 'printServiceOverlay') {
overlayManager.close('printServiceOverlay');
}
});
return; // eslint-disable-line no-unsafe-finally
}
let activeServiceOnEntry = activeService;
activeService.renderPages().then(function() {
return activeServiceOnEntry.performPrint();
}).catch(function() {
// Ignore any error messages.
}).then(function() {
// aborts acts on the "active" print request, so we need to check
// whether the print request (activeServiceOnEntry) is still active.
// Without the check, an unrelated print request (created after aborting
// this print request while the pages were being generated) would be
// aborted.
if (activeServiceOnEntry.active) {
abort();
}
});
}
};
function dispatchEvent(eventType) {
let event = document.createEvent('CustomEvent');
event.initCustomEvent(eventType, false, false, 'custom');
window.dispatchEvent(event);
}
function
|
() {
if (activeService) {
activeService.destroy();
dispatchEvent('afterprint');
}
}
function renderProgress(index, total, l10n) {
let progressContainer = document.getElementById('printServiceOverlay');
let progress = Math.round(100 * index / total);
let progressBar = progressContainer.querySelector('progress');
let progressPerc = progressContainer.querySelector('.relative-progress');
progressBar.value = progress;
l10n.get('print_progress_percent', { progress, }, progress + '%').
then((msg) => {
progressPerc.textContent = msg;
});
}
let hasAttachEvent = !!document.attachEvent;
window.addEventListener('keydown', function(event) {
// Intercept Cmd/Ctrl + P in all browsers.
// Also intercept Cmd/Ctrl + Shift + P in Chrome and Opera
if (event.keyCode === /* P= */ 80 && (event.ctrlKey || event.metaKey) &&
!event.altKey && (!event.shiftKey || window.chrome || window.opera)) {
window.print();
if (hasAttachEvent) {
// Only attachEvent can cancel Ctrl + P dialog in IE <=10
// attachEvent is gone in IE11, so the dialog will re-appear in IE11.
return;
}
event.preventDefault();
if (event.stopImmediatePropagation) {
event.stopImmediatePropagation();
} else {
event.stopPropagation();
}
return;
}
}, true);
if (hasAttachEvent) {
document.attachEvent('onkeydown', function(event) {
event = event || window.event;
if (event.keyCode === /* P= */ 80 && event.ctrlKey) {
event.keyCode = 0;
return false;
}
});
}
if ('onbeforeprint' in window) {
// Do not propagate before/afterprint events when they are not triggered
// from within this polyfill. (FF /IE / Chrome 63+).
let stopPropagationIfNeeded = function(event) {
if (event.detail !== 'custom' && event.stopImmediatePropagation) {
event.stopImmediatePropagation();
}
};
window.addEventListener('beforeprint', stopPropagationIfNeeded);
window.addEventListener('afterprint', stopPropagationIfNeeded);
}
let overlayPromise;
function ensureOverlay() {
if (!overlayPromise) {
overlayManager = PDFViewerApplication.overlayManager;
if (!overlayManager) {
throw new Error('The overlay manager has not yet been initialized.');
}
overlayPromise = overlayManager.register('printServiceOverlay',
document.getElementById('printServiceOverlay'), abort, true);
document.getElementById('printCancel').onclick = abort;
}
return overlayPromise;
}
PDFPrintServiceFactory.instance = {
supportsPrinting: true,
createPrintService(pdfDocument, pagesOverview, printContainer, l10n) {
if (activeService) {
throw new Error('The print service is created and active.');
}
activeService = new PDFPrintService(pdfDocument, pagesOverview,
printContainer, l10n);
return activeService;
},
};
export {
PDFPrintService,
};
|
abort
|
CCMaterial.js
|
/****************************************************************************
Copyright (c) 2017-2018 Xiamen Yaji Software Co., Ltd.
http://www.cocos.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated engine source code (the "Software"), a limited,
worldwide, royalty-free, non-assignable, revocable and non-exclusive license
to use Cocos Creator solely to develop games on your target platforms. You shall
not use Cocos Creator software for developing other software or tools that's
used for developing games. You are not granted to publish, distribute,
sublicense, and/or sell copies of Cocos Creator.
The software or tools in this License Agreement are licensed, not sold.
Xiamen Yaji Software Co., Ltd. reserves all rights not expressly granted to you.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
const Asset = require('../CCAsset');
const Texture = require('../CCTexture2D');
const PixelFormat = Texture.PixelFormat;
const EffectAsset = require('./CCEffectAsset');
const textureUtil = require('../../utils/texture-util');
const gfx = cc.gfx;
/**
* !#en Material builtin name
* !#zh 内置材质名字
* @enum Material.BUILTIN_NAME
*/
const BUILTIN_NAME = cc.Enum({
/**
* @property SPRITE
* @readonly
* @type {String}
*/
SPRITE: '2d-sprite',
/**
* @property GRAY_SPRITE
* @readonly
* @type {String}
*/
GRAY_SPRITE: '2d-gray-sprite',
/**
* @property UNLIT
* @readonly
* @type {String}
*/
UNLIT: 'unlit',
});
/**
* !#en Material Asset.
* !#zh 材质资源类。
* @class Material
* @extends Asset
*/
let Material = cc.Class({
name: 'cc.Material',
extends: Asset,
ctor () {
this.loaded = false;
this._manualHash = false;
this._dirty = true;
this._effect = null;
},
properties: {
// deprecated
_defines: {
default: undefined,
type: Object
},
// deprecated
_props: {
default: undefined,
type: Object
},
_effectAsset: {
type: EffectAsset,
default: null,
},
_techniqueIndex: 0,
_techniqueData: Object,
effectName: CC_EDITOR ? {
get () {
return this._effectAsset && this._effectAsset.name;
},
set (val) {
let effectAsset = cc.assetManager.builtins.getBuiltin('effect', val);
if (!effectAsset) {
Editor.warn(`no effect named '${val}' found`);
return;
}
this.effectAsset = effectAsset;
}
} : undefined,
effectAsset: {
get () {
return this._effectAsset;
},
set (asset) {
if (cc.game.renderType === cc.game.RENDER_TYPE_CANVAS) {
return;
}
this._effectAsset = asset;
if (!asset) {
cc.error('Can not set an empty effect asset.');
return;
}
this._effect = this._effectAsset.getInstantiatedEffect();
}
},
effect: {
get () {
return this._effect;
}
},
techniqueIndex: {
get () {
return this._techniqueIndex;
},
set (v) {
this._techniqueIndex = v;
this._effect.switchTechnique(v);
}
}
},
statics: {
/**
* !#en Get built-in materials
* !#zh 获取内置材质
* @static
* @method getBuiltinMaterial
* @param {string} name
* @return {Material}
*/
getBuiltinMaterial (name) {
if (cc.game.renderType === cc.game.RENDER_TYPE_CANVAS) {
return new cc.Material();
}
return cc.assetManager.builtins.getBuiltin('material', 'builtin-' + name);
},
BUILTIN_NAME,
/**
* !#en Creates a Material with builtin Effect.
* !#zh 使用内建 Effect 创建一个材质。
* @static
* @method createWithBuiltin
* @param {string} effectName
* @param {number} [techniqueIndex]
* @return {Material}
*/
createWithBuiltin (effectName, techniqueIndex = 0) {
let effectAsset = cc.assetManager.builtins.getBuiltin('effect', 'builtin-' + effectName);
return Material.create(effectAsset, techniqueIndex);
},
/**
* !#en Creates a Material.
* !#zh 创建一个材质。
* @static
* @method create
* @param {EffectAsset} effectAsset
* @param {number} [techniqueIndex]
* @return {Material}
*/
create (effectAsset, techniqueIndex = 0) {
if (!effectAsset) return null;
let material = new Material();
material.effectAsset = effectAsset;
material.techniqueIndex = techniqueIndex;
return material;
}
},
/**
* !#en Sets the Material property
* !#zh 设置材质的属性
* @method setProperty
* @param {string} name
* @param {Object} val
* @param {number} [passIdx]
* @param {boolean} [directly]
*/
setProperty (name, val, passIdx, directly) {
if (cc.game.renderType === cc.game.RENDER_TYPE_CANVAS) return;
if (typeof passIdx === 'string') {
passIdx = parseInt(passIdx);
}
if (val instanceof Texture) {
let isAlphaAtlas = val.isAlphaAtlas();
let key = 'CC_USE_ALPHA_ATLAS_' + name;
let def = this.getDefine(key, passIdx);
if (isAlphaAtlas || def) {
this.define(key, isAlphaAtlas);
}
if (!val.loaded) {
cc.assetManager.postLoadNative(val);
}
}
this._effect.setProperty(name, val, passIdx, directly);
},
/**
* !#en Sets the Material property as fast as possible, but maybe not safe.
* !#zh 快速设置材质的属性, 需要用户在自己的代码中去确保该操作的正确性.
* @method setPropertyFast
* @param {string} name
* @param {Object} val
* @param {number} [passIdx]
* @param {boolean} [directly]
*/
setPropertyFast (name, val, passIdx, directly) {
const effect = this._effect;
effect._setPassProperty(name, val, effect._passes[passIdx || 0], directly);
},
/**
* !#en Gets the Material property.
* !#zh 获取材质的属性。
* @method getProperty
* @param {string} name
* @param {number} passIdx
* @return {Object}
*/
getProperty (name, passIdx) {
if (typeof passIdx === 'string') {
passIdx = parseInt(passIdx);
}
return this._effect.getProperty(name, passIdx);
},
/**
* !#en Sets the Material define.
* !#zh 设置材质的宏定义。
* @method define
* @param {string} name
* @param {boolean|number} val
* @param {number} [passIdx]
* @param {boolean} [force]
*/
define (name, val, passIdx, force) {
if (cc.game.renderType === cc.game.RENDER_TYPE_CANVAS) return;
if (typeof passIdx === 'string') {
passIdx = parseInt(passIdx);
}
this._effect.define(name, val, passIdx, force);
},
/**
* !#en Gets the Material define.
* !#zh 获取材质的宏定义。
* @method getDefine
* @param {string} name
* @param {number} [passIdx]
* @return {boolean|number}
*/
|
return this._effect.getDefine(name, passIdx);
},
/**
* !#en Sets the Material cull mode.
* !#zh 设置材质的裁减模式。
* @method setCullMode
* @param {number} cullMode
* @param {number} passIdx
*/
setCullMode (cullMode = gfx.CULL_BACK, passIdx) {
this._effect.setCullMode(cullMode, passIdx);
},
/**
* !#en Sets the Material depth states.
* !#zh 设置材质的深度渲染状态。
* @method setDepth
* @param {boolean} depthTest
* @param {boolean} depthWrite
* @param {number} depthFunc
* @param {number} passIdx
*/
setDepth (
depthTest = false,
depthWrite = false,
depthFunc = gfx.DS_FUNC_LESS,
passIdx
) {
this._effect.setDepth(depthTest, depthWrite, depthFunc, passIdx);
},
/**
* !#en Sets the Material blend states.
* !#zh 设置材质的混合渲染状态。
* @method setBlend
* @param {boolean} enabled
* @param {number} blendEq
* @param {number} blendSrc
* @param {number} blendDst
* @param {number} blendAlphaEq
* @param {number} blendSrcAlpha
* @param {number} blendDstAlpha
* @param {number} blendColor
* @param {number} passIdx
*/
setBlend (
enabled = false,
blendEq = gfx.BLEND_FUNC_ADD,
blendSrc = gfx.BLEND_SRC_ALPHA,
blendDst = gfx.BLEND_ONE_MINUS_SRC_ALPHA,
blendAlphaEq = gfx.BLEND_FUNC_ADD,
blendSrcAlpha = gfx.BLEND_SRC_ALPHA,
blendDstAlpha = gfx.BLEND_ONE_MINUS_SRC_ALPHA,
blendColor = 0xffffffff,
passIdx
) {
this._effect.setBlend(enabled, blendEq, blendSrc, blendDst, blendAlphaEq, blendSrcAlpha, blendDstAlpha, blendColor, passIdx);
},
/**
* !#en Sets whether enable the stencil test.
* !#zh 设置是否开启模板测试。
* @method setStencilEnabled
* @param {number} stencilTest
* @param {number} passIdx
*/
setStencilEnabled (stencilTest = gfx.STENCIL_INHERIT, passIdx) {
this._effect.setStencilEnabled(stencilTest, passIdx);
},
/**
* !#en Sets the Material stencil render states.
* !#zh 设置材质的模板测试渲染参数。
* @method setStencil
* @param {number} stencilTest
* @param {number} stencilFunc
* @param {number} stencilRef
* @param {number} stencilMask
* @param {number} stencilFailOp
* @param {number} stencilZFailOp
* @param {number} stencilZPassOp
* @param {number} stencilWriteMask
* @param {number} passIdx
*/
setStencil (
stencilTest = gfx.STENCIL_INHERIT,
stencilFunc = gfx.DS_FUNC_ALWAYS,
stencilRef = 0,
stencilMask = 0xff,
stencilFailOp = gfx.STENCIL_OP_KEEP,
stencilZFailOp = gfx.STENCIL_OP_KEEP,
stencilZPassOp = gfx.STENCIL_OP_KEEP,
stencilWriteMask = 0xff,
passIdx
) {
this._effect.setStencil(stencilTest, stencilFunc, stencilRef, stencilMask, stencilFailOp, stencilZFailOp, stencilZPassOp, stencilWriteMask, passIdx);
},
updateHash (hash) {
this._manualHash = hash;
this._effect && this._effect.updateHash(hash);
},
getHash () {
return this._manualHash || (this._effect && this._effect.getHash());
},
onLoad () {
this.effectAsset = this._effectAsset;
if (!this._effect) return;
if (this._techniqueIndex) {
this._effect.switchTechnique(this._techniqueIndex);
}
this._techniqueData = this._techniqueData || {};
let passDatas = this._techniqueData;
for (let index in passDatas) {
index = parseInt(index);
let passData = passDatas[index];
if (!passData) continue;
for (let def in passData.defines) {
this.define(def, passData.defines[def], index);
}
for (let prop in passData.props) {
this.setProperty(prop, passData.props[prop], index);
}
}
},
});
export default Material;
cc.Material = Material;
|
getDefine (name, passIdx) {
if (typeof passIdx === 'string') {
passIdx = parseInt(passIdx);
}
|
Sidebar.test.tsx
|
import React from "react";
import { useStaticQuery } from "gatsby";
import { shallow } from "enzyme";
import toJSON from "enzyme-to-json";
import Sidebar from "./Sidebar";
import siteMetadata from "../../../jest/__fixtures__/site-metadata";
describe("<Sidebar />", () => {
beforeEach(() => {
(useStaticQuery as jest.Mock).mockReturnValue(siteMetadata);
});
it("renders", () => {
shallow(<Sidebar />);
|
const wrapper = shallow(<Sidebar />);
expect(toJSON(wrapper)).toMatchSnapshot();
});
});
|
});
it("renders and matches snapshot", () => {
|
connectApp.js
|
'use strict';
/* jshint ignore:start */
/**
* This code was generated by
* \ / _ _ _| _ _
* | (_)\/(_)(_|\/| |(/_ v1.0.0
* / /
*/
/* jshint ignore:end */
var Q = require('q'); /* jshint ignore:line */
var _ = require('lodash'); /* jshint ignore:line */
var Page = require('../../../../base/Page'); /* jshint ignore:line */
var serialize = require('../../../../base/serialize'); /* jshint ignore:line */
var values = require('../../../../base/values'); /* jshint ignore:line */
var ConnectAppList;
var ConnectAppPage;
var ConnectAppInstance;
var ConnectAppContext;
/* jshint ignore:start */
/**
* @description Initialize the ConnectAppList
*
* @param {Twilio.Api.V2010} version - Version of the resource
* @param {string} accountSid - The SID of the Account that created the resource
*/
/* jshint ignore:end */
ConnectAppList = function ConnectAppList(version, accountSid) {
/* jshint ignore:start */
/**
* @param {string} sid - sid of instance
*
* @returns {Twilio.Api.V2010.AccountContext.ConnectAppContext}
*/
/* jshint ignore:end */
function ConnectAppListInstance(sid) {
return ConnectAppListInstance.get(sid);
}
ConnectAppListInstance._version = version;
// Path Solution
ConnectAppListInstance._solution = {accountSid: accountSid};
ConnectAppListInstance._uri = _.template(
'/Accounts/<%= accountSid %>/ConnectApps.json' // jshint ignore:line
)(ConnectAppListInstance._solution);
/* jshint ignore:start */
/**
* Streams ConnectAppInstance records from the API.
*
* This operation lazily loads records as efficiently as possible until the limit
* is reached.
*
* The results are passed into the callback function, so this operation is memory efficient.
*
* If a function is passed as the first argument, it will be used as the callback function.
*
* @param {object} [opts] - Options for request
* @param {number} [opts.limit] -
* Upper limit for the number of records to return.
* each() guarantees never to return more than limit.
* Default is no limit
* @param {number} [opts.pageSize] -
* Number of records to fetch per request,
* when not set will use the default value of 50 records.
* If no pageSize is defined but a limit is defined,
* each() will attempt to read the limit with the most efficient
* page size, i.e. min(limit, 1000)
* @param {Function} [opts.callback] -
* Function to process each record. If this and a positional
* callback are passed, this one will be used
* @param {Function} [opts.done] -
* Function to be called upon completion of streaming
* @param {Function} [callback] - Function to process each record
*/
/* jshint ignore:end */
ConnectAppListInstance.each = function each(opts, callback) {
if (_.isFunction(opts)) {
callback = opts;
opts = {};
}
opts = opts || {};
if (opts.callback) {
callback = opts.callback;
}
if (_.isUndefined(callback)) {
throw new Error('Callback function must be provided');
}
var done = false;
var currentPage = 1;
var currentResource = 0;
var limits = this._version.readLimits({
limit: opts.limit,
pageSize: opts.pageSize
});
function onComplete(error) {
done = true;
if (_.isFunction(opts.done)) {
opts.done(error);
}
}
function fetchNextPage(fn) {
var promise = fn();
if (_.isUndefined(promise)) {
onComplete();
return;
}
promise.then(function(page) {
_.each(page.instances, function(instance) {
if (done || (!_.isUndefined(opts.limit) && currentResource >= opts.limit)) {
done = true;
return false;
}
currentResource++;
callback(instance, onComplete);
});
if ((limits.pageLimit && limits.pageLimit <= currentPage)) {
onComplete();
} else if (!done) {
currentPage++;
fetchNextPage(_.bind(page.nextPage, page));
}
});
promise.catch(onComplete);
}
fetchNextPage(_.bind(this.page, this, _.merge(opts, limits)));
};
/* jshint ignore:start */
/**
* Lists ConnectAppInstance records from the API as a list.
*
* If a function is passed as the first argument, it will be used as the callback function.
*
* @param {object} [opts] - Options for request
* @param {number} [opts.limit] -
* Upper limit for the number of records to return.
* list() guarantees never to return more than limit.
* Default is no limit
* @param {number} [opts.pageSize] -
* Number of records to fetch per request,
* when not set will use the default value of 50 records.
* If no page_size is defined but a limit is defined,
* list() will attempt to read the limit with the most
* efficient page size, i.e. min(limit, 1000)
* @param {function} [callback] - Callback to handle list of records
*
* @returns {Promise} Resolves to a list of records
*/
/* jshint ignore:end */
ConnectAppListInstance.list = function list(opts, callback) {
if (_.isFunction(opts)) {
callback = opts;
opts = {};
}
opts = opts || {};
var deferred = Q.defer();
var allResources = [];
opts.callback = function(resource, done) {
allResources.push(resource);
if (!_.isUndefined(opts.limit) && allResources.length === opts.limit) {
done();
}
};
opts.done = function(error) {
if (_.isUndefined(error)) {
deferred.resolve(allResources);
} else {
deferred.reject(error);
}
};
if (_.isFunction(callback)) {
deferred.promise.nodeify(callback);
}
this.each(opts);
return deferred.promise;
};
/* jshint ignore:start */
/**
* Retrieve a single page of ConnectAppInstance records from the API.
* Request is executed immediately
*
* If a function is passed as the first argument, it will be used as the callback function.
*
* @param {object} [opts] - Options for request
* @param {string} [opts.pageToken] - PageToken provided by the API
* @param {number} [opts.pageNumber] -
* Page Number, this value is simply for client state
* @param {number} [opts.pageSize] - Number of records to return, defaults to 50
* @param {function} [callback] - Callback to handle list of records
*
* @returns {Promise} Resolves to a list of records
*/
/* jshint ignore:end */
ConnectAppListInstance.page = function page(opts, callback) {
if (_.isFunction(opts)) {
callback = opts;
opts = {};
}
opts = opts || {};
var deferred = Q.defer();
var data = values.of({
'PageToken': opts.pageToken,
'Page': opts.pageNumber,
'PageSize': opts.pageSize
});
var promise = this._version.page({uri: this._uri, method: 'GET', params: data});
promise = promise.then(function(payload) {
deferred.resolve(new ConnectAppPage(this._version, payload, this._solution));
}.bind(this));
promise.catch(function(error) {
deferred.reject(error);
});
if (_.isFunction(callback)) {
deferred.promise.nodeify(callback);
}
return deferred.promise;
};
/* jshint ignore:start */
/**
* Retrieve a single target page of ConnectAppInstance records from the API.
* Request is executed immediately
*
* If a function is passed as the first argument, it will be used as the callback function.
*
* @param {string} [targetUrl] - API-generated URL for the requested results page
* @param {function} [callback] - Callback to handle list of records
*
* @returns {Promise} Resolves to a list of records
*/
/* jshint ignore:end */
ConnectAppListInstance.getPage = function getPage(targetUrl, callback) {
var deferred = Q.defer();
var promise = this._version._domain.twilio.request({method: 'GET', uri: targetUrl});
promise = promise.then(function(payload) {
deferred.resolve(new ConnectAppPage(this._version, payload, this._solution));
}.bind(this));
promise.catch(function(error) {
deferred.reject(error);
});
if (_.isFunction(callback)) {
deferred.promise.nodeify(callback);
}
return deferred.promise;
};
/* jshint ignore:start */
/**
* Constructs a connect_app
*
* @param {string} sid - The unique string that identifies the resource
*
* @returns {Twilio.Api.V2010.AccountContext.ConnectAppContext}
*/
/* jshint ignore:end */
ConnectAppListInstance.get = function get(sid) {
return new ConnectAppContext(this._version, this._solution.accountSid, sid);
};
return ConnectAppListInstance;
};
/* jshint ignore:start */
/**
* Initialize the ConnectAppPage
*
* @param {V2010} version - Version of the resource
* @param {Response<string>} response - Response from the API
* @param {ConnectAppSolution} solution - Path solution
*
* @returns ConnectAppPage
*/
/* jshint ignore:end */
ConnectAppPage = function ConnectAppPage(version, response, solution) {
// Path Solution
this._solution = solution;
Page.prototype.constructor.call(this, version, response, this._solution);
};
_.extend(ConnectAppPage.prototype, Page.prototype);
ConnectAppPage.prototype.constructor = ConnectAppPage;
/* jshint ignore:start */
/**
* Build an instance of ConnectAppInstance
*
* @param {ConnectAppPayload} payload - Payload response from the API
*
* @returns ConnectAppInstance
*/
/* jshint ignore:end */
ConnectAppPage.prototype.getInstance = function getInstance(payload) {
return new ConnectAppInstance(this._version, payload, this._solution.accountSid);
};
/* jshint ignore:start */
/**
* Initialize the ConnectAppContext
*
* @property {string} accountSid - The SID of the Account that created the resource
* @property {string} authorizeRedirectUrl -
* The URL to redirect the user to after authorization
* @property {string} companyName - The company name set for the Connect App
* @property {string} deauthorizeCallbackMethod -
* The HTTP method we use to call deauthorize_callback_url
* @property {string} deauthorizeCallbackUrl -
* The URL we call to de-authorize the Connect App
* @property {string} description - The description of the Connect App
* @property {string} friendlyName -
* The string that you assigned to describe the resource
* @property {string} homepageUrl - The URL users can obtain more information
* @property {connect_app.permission} permissions -
* The set of permissions that your ConnectApp requests
* @property {string} sid - The unique string that identifies the resource
* @property {string} uri -
* The URI of the resource, relative to `https://api.twilio.com`
*
* @param {V2010} version - Version of the resource
* @param {ConnectAppPayload} payload - The instance payload
* @param {sid} accountSid - The SID of the Account that created the resource
* @param {sid} sid - The unique string that identifies the resource
*/
/* jshint ignore:end */
ConnectAppInstance = function ConnectAppInstance(version, payload, accountSid,
sid) {
this._version = version;
// Marshaled Properties
this.accountSid = payload.account_sid; // jshint ignore:line
this.authorizeRedirectUrl = payload.authorize_redirect_url; // jshint ignore:line
this.companyName = payload.company_name; // jshint ignore:line
this.deauthorizeCallbackMethod = payload.deauthorize_callback_method; // jshint ignore:line
this.deauthorizeCallbackUrl = payload.deauthorize_callback_url; // jshint ignore:line
this.description = payload.description; // jshint ignore:line
this.friendlyName = payload.friendly_name; // jshint ignore:line
this.homepageUrl = payload.homepage_url; // jshint ignore:line
this.permissions = payload.permissions; // jshint ignore:line
this.sid = payload.sid; // jshint ignore:line
this.uri = payload.uri; // jshint ignore:line
// Context
this._context = undefined;
this._solution = {accountSid: accountSid, sid: sid || this.sid, };
};
Object.defineProperty(ConnectAppInstance.prototype,
'_proxy', {
get: function() {
if (!this._context) {
this._context = new ConnectAppContext(this._version, this._solution.accountSid, this._solution.sid);
}
return this._context;
}
});
/* jshint ignore:start */
/**
* fetch a ConnectAppInstance
*
* @param {function} [callback] - Callback to handle processed record
*
|
ConnectAppInstance.prototype.fetch = function fetch(callback) {
return this._proxy.fetch(callback);
};
/* jshint ignore:start */
/**
* update a ConnectAppInstance
*
* @param {object} [opts] - Options for request
* @param {string} [opts.authorizeRedirectUrl] -
* The URL to redirect the user to after authorization
* @param {string} [opts.companyName] - The company name to set for the Connect App
* @param {string} [opts.deauthorizeCallbackMethod] -
* The HTTP method to use when calling deauthorize_callback_url
* @param {string} [opts.deauthorizeCallbackUrl] -
* The URL to call to de-authorize the Connect App
* @param {string} [opts.description] - A description of the Connect App
* @param {string} [opts.friendlyName] - A string to describe the resource
* @param {string} [opts.homepageUrl] -
* A public URL where users can obtain more information
* @param {connect_app.permission|list} [opts.permissions] -
* The set of permissions that your ConnectApp will request
* @param {function} [callback] - Callback to handle processed record
*
* @returns {Promise} Resolves to processed ConnectAppInstance
*/
/* jshint ignore:end */
ConnectAppInstance.prototype.update = function update(opts, callback) {
return this._proxy.update(opts, callback);
};
/* jshint ignore:start */
/**
* Produce a plain JSON object version of the ConnectAppInstance for serialization.
* Removes any circular references in the object.
*
* @returns Object
*/
/* jshint ignore:end */
ConnectAppInstance.prototype.toJSON = function toJSON() {
let clone = {};
_.forOwn(this, function(value, key) {
if (!_.startsWith(key, '_') && ! _.isFunction(value)) {
clone[key] = value;
}
});
return clone;
};
/* jshint ignore:start */
/**
* Initialize the ConnectAppContext
*
* @param {V2010} version - Version of the resource
* @param {sid} accountSid -
* The SID of the Account that created the resource to fetch
* @param {sid} sid - The unique string that identifies the resource
*/
/* jshint ignore:end */
ConnectAppContext = function ConnectAppContext(version, accountSid, sid) {
this._version = version;
// Path Solution
this._solution = {accountSid: accountSid, sid: sid, };
this._uri = _.template(
'/Accounts/<%= accountSid %>/ConnectApps/<%= sid %>.json' // jshint ignore:line
)(this._solution);
};
/* jshint ignore:start */
/**
* fetch a ConnectAppInstance
*
* @param {function} [callback] - Callback to handle processed record
*
* @returns {Promise} Resolves to processed ConnectAppInstance
*/
/* jshint ignore:end */
ConnectAppContext.prototype.fetch = function fetch(callback) {
var deferred = Q.defer();
var promise = this._version.fetch({uri: this._uri, method: 'GET'});
promise = promise.then(function(payload) {
deferred.resolve(new ConnectAppInstance(
this._version,
payload,
this._solution.accountSid,
this._solution.sid
));
}.bind(this));
promise.catch(function(error) {
deferred.reject(error);
});
if (_.isFunction(callback)) {
deferred.promise.nodeify(callback);
}
return deferred.promise;
};
/* jshint ignore:start */
/**
* update a ConnectAppInstance
*
* @param {object} [opts] - Options for request
* @param {string} [opts.authorizeRedirectUrl] -
* The URL to redirect the user to after authorization
* @param {string} [opts.companyName] - The company name to set for the Connect App
* @param {string} [opts.deauthorizeCallbackMethod] -
* The HTTP method to use when calling deauthorize_callback_url
* @param {string} [opts.deauthorizeCallbackUrl] -
* The URL to call to de-authorize the Connect App
* @param {string} [opts.description] - A description of the Connect App
* @param {string} [opts.friendlyName] - A string to describe the resource
* @param {string} [opts.homepageUrl] -
* A public URL where users can obtain more information
* @param {connect_app.permission|list} [opts.permissions] -
* The set of permissions that your ConnectApp will request
* @param {function} [callback] - Callback to handle processed record
*
* @returns {Promise} Resolves to processed ConnectAppInstance
*/
/* jshint ignore:end */
ConnectAppContext.prototype.update = function update(opts, callback) {
if (_.isFunction(opts)) {
callback = opts;
opts = {};
}
opts = opts || {};
var deferred = Q.defer();
var data = values.of({
'AuthorizeRedirectUrl': _.get(opts, 'authorizeRedirectUrl'),
'CompanyName': _.get(opts, 'companyName'),
'DeauthorizeCallbackMethod': _.get(opts, 'deauthorizeCallbackMethod'),
'DeauthorizeCallbackUrl': _.get(opts, 'deauthorizeCallbackUrl'),
'Description': _.get(opts, 'description'),
'FriendlyName': _.get(opts, 'friendlyName'),
'HomepageUrl': _.get(opts, 'homepageUrl'),
'Permissions': serialize.map(_.get(opts, 'permissions'), function(e) { return e; })
});
var promise = this._version.update({uri: this._uri, method: 'POST', data: data});
promise = promise.then(function(payload) {
deferred.resolve(new ConnectAppInstance(
this._version,
payload,
this._solution.accountSid,
this._solution.sid
));
}.bind(this));
promise.catch(function(error) {
deferred.reject(error);
});
if (_.isFunction(callback)) {
deferred.promise.nodeify(callback);
}
return deferred.promise;
};
module.exports = {
ConnectAppList: ConnectAppList,
ConnectAppPage: ConnectAppPage,
ConnectAppInstance: ConnectAppInstance,
ConnectAppContext: ConnectAppContext
};
|
* @returns {Promise} Resolves to processed ConnectAppInstance
*/
/* jshint ignore:end */
|
util_io.py
|
# Copyright 2021 MIT Probabilistic Computing Project
# Apache License, Version 2.0, refer to LICENSE.txt
import ast
from . import hirm
def intify(x):
if x.isnumeric():
assert int(x) == float(x)
return int(x)
return x
def
|
(path):
"""Load a schema from path."""
signatures = {}
with open(path, 'r') as f:
for line in f:
parts = line.strip().split(' ')
assert 3 <= len(parts)
dist = parts[0]
assert dist == 'bernoulli'
feature = parts[1]
domains = tuple(parts[2:])
signatures[feature] = domains
return signatures
def load_observations(path):
"""Load a dataset from path."""
data = []
with open(path, 'r') as f:
for line in f:
parts = line.strip().split(' ')
assert 3 <= len(parts)
x = float(parts[0])
relation = parts[1]
items = tuple(intify(x) for x in parts[2:])
data.append((relation, items, x))
return data
def load_clusters_irm(path):
"""Load clusters from path."""
clusters = {}
with open(path, 'r') as f:
for line in f:
parts = line.strip().split(' ')
assert 3 <= len(parts)
domain = parts[0]
table = int(parts[1])
items = tuple(intify(x) for x in parts[2:])
if domain not in clusters:
clusters[domain] = {}
clusters[domain][table] = items
return clusters
def load_clusters_hirm(path):
"""Load clusters from path."""
irms = {}
relations = {}
current_irm = 0
with open(path, 'r') as f:
for line in f:
parts = line.strip().split(' ')
if parts[0].isnumeric():
assert 2 <= len(parts)
table = int(parts[0])
items = tuple(parts[1:])
assert table not in relations
relations[table] = items
continue
if len(parts) == 1 and not parts[0]:
current_irm = None
continue
if len(parts) == 1 and parts[0].startswith('irm='):
assert current_irm is None
current_irm = int(parts[0].split('=')[1])
assert current_irm not in irms
irms[current_irm] = {}
continue
if 2 <= len(parts):
assert current_irm is not None
assert current_irm in irms
domain = parts[0]
table = int(parts[1])
items = tuple(intify(x) for x in parts[2:])
if domain not in irms[current_irm]:
irms[current_irm][domain] = {}
assert table not in irms[current_irm][domain]
irms[current_irm][domain][table] = items
continue
assert False, 'Failed to process line'
assert set(relations) == set(irms)
return relations, irms
# Serialization to/from JSON compatible dictionaries
# NB: Caveats of json.dumps
# - dict keys must be string (no tuples)
# - dict keys that are integers automatically stringified.
# - tuples automatically converted to listified.
# - sets are not JSON serializable.
def to_dict_BetaBernoulli(x):
return {'alpha': x.alpha, 'beta': x.beta, 'N': x.N, 's': x.s}
def from_dict_BetaBernoulli(d, prng=None):
x = hirm.BetaBernoulli(alpha=d['alpha'], beta=d['beta'], prng=prng)
x.N = d['N']
x.s = d['s']
return x
def to_dict_CRP(x):
return {
'alpha': x.alpha,
'N': x.N,
'tables': {repr(t): list(v) for t,v in x.tables.items()},
'assignments': {repr(t): v for t,v in x.assignments.items()}
}
def from_dict_CRP(d, prng=None):
x = hirm.CRP(d['alpha'], prng=prng)
x.N = d['N']
x.tables = {ast.literal_eval(t): set(v) for t,v in d['tables'].items()}
x.assignments = {ast.literal_eval(t): v for t,v in d['assignments'].items()}
return x
def to_dict_Domain(x):
return {
'name': x.name,
'items': list(x.items),
'crp': to_dict_CRP(x.crp)
}
def from_dict_Domain(d, prng=None):
x = hirm.Domain(d['name'], prng=prng)
x.items = set(d['items'])
x.crp = from_dict_CRP(d['crp'])
return x
def to_dict_Relation(x):
return {
'name' : x.name,
'domains' : [d.name for d in x.domains], # Serialize names only.
'clusters' : {repr(c): to_dict_BetaBernoulli(v) for c,v in x.clusters.items()},
'data' : {repr(c): v for c,v in x.data.items()},
'data_r' : {
repr(k) : {repr(k1): list(v1) for k1, v1 in v.items()}
for k, v in x.data_r.items()
}
}
def from_dict_Relation(d, prng=None):
x = hirm.Relation(d['name'], [], prng=prng)
x.domains = d['domains']
x.clusters = {
ast.literal_eval(c): from_dict_BetaBernoulli(v, prng=prng)
for c,v in d['clusters'].items()
}
x.data = {ast.literal_eval(c): v for c,v in d['data'].items()}
x.data_r = {
ast.literal_eval(k): {
ast.literal_eval(k1): set(tuple(y) for y in v1)
for k1,v1 in v.items()
}
for k, v in d['data_r'].items()
}
return x
def to_dict_IRM(x):
return {
'schema': x.schema,
'domains': {k: to_dict_Domain(v) for k,v in x.domains.items()},
'relations': {k: to_dict_Relation(v) for k,v in x.relations.items()},
'domain_to_relations': {k: list(v) for k,v in x.domain_to_relations.items()}
}
def from_dict_IRM(d, prng=None):
x = hirm.IRM({}, prng=prng)
x.schema = d['schema']
x.domains = {k: from_dict_Domain(v, prng=prng) for k,v in d['domains'].items()}
x.relations = {k: from_dict_Relation(v, prng=prng) for k,v in d['relations'].items()}
x.domain_to_relations = {k: set(v) for k,v in d['domain_to_relations'].items()}
# Resolve Domain names into Domain objects.
for relation in x.relations.values():
relation.domains = tuple([x.domains[d] for d in relation.domains])
return x
def to_dict_HIRM(x):
return {
'schema': x.schema,
'crp': to_dict_CRP(x.crp),
'irms' : {k: to_dict_IRM(v) for k, v in x.irms.items()}
}
def from_dict_HIRM(d, prng=None):
x = hirm.HIRM({}, prng=prng)
x.schema = d['schema']
x.crp = from_dict_CRP(d['crp'], prng=prng)
x.irms = {int(k): from_dict_IRM(v, prng=prng) for k,v in d['irms'].items()}
return x
def to_txt_irm(path, irm):
with open(path, 'w') as f:
for domain in irm.domains.values():
tables = sorted(domain.crp.tables)
for table in tables:
customers = domain.crp.tables[table]
customers_str = ' '.join(str(c) for c in customers)
f.write('%s %d %s' % (domain.name, table, customers_str))
f.write('\n')
def to_txt_hirm(path, hirm):
with open(path, 'w') as f:
tables = sorted(hirm.crp.tables)
for table in tables:
customers = hirm.crp.tables[table]
customers_str = ' '.join(str(c) for c in customers)
f.write('%d %s' % (table, customers_str))
f.write('\n')
f.write('\n')
j = 0
for table in tables:
f.write('irm=%d\n' % (table,))
irm = hirm.irms[table]
for domain in irm.domains.values():
for table, customers in domain.crp.tables.items():
customers_str = ' '.join(str(c) for c in customers)
f.write('%s %d %s' % (domain.name, table, customers_str))
f.write('\n')
if j != len(hirm.irms) - 1:
f.write('\n')
j += 1
def from_txt_irm(path_schema, path_obs, path_clusters):
schema = load_schema(path_schema)
observations = load_observations(path_obs)
clusters = load_clusters_irm(path_clusters)
irm = hirm.IRM(schema)
for domain, tables in clusters.items():
for table, items in tables.items():
for item in items:
irm.domains[domain].incorporate(item, table=table)
for (relation, items, x) in observations:
irm.incorporate(relation, items, x)
return irm
def from_txt_hirm(path_schema, path_obs, path_clusters):
schema = load_schema(path_schema)
observations = load_observations(path_obs)
relations, irms = load_clusters_hirm(path_clusters)
hirmm = hirm.HIRM(schema)
for table in relations:
for relation in relations[table]:
if hirmm.crp.assignments[relation] != table:
hirmm.set_cluster_assignment_gibbs(relation, table)
irm = hirmm.irms[table]
for domain, tables in irms[table].items():
for t, items in tables.items():
for item in items:
irm.domains[domain].incorporate(item, table=t)
for (relation, items, x) in observations:
hirmm.incorporate(relation, items, x)
return hirmm
|
load_schema
|
container.js
|
import React from "react"
import * as containerStyles from "./container.module.css"
|
export default function Container({ children }) {
return <div className={containerStyles.container}>{children}</div>
}
| |
game.7e80c355267946d50b29.js
|
!function(t){var e={};function n(r){if(e[r])return e[r].exports;var o=e[r]={i:r,l:!1,exports:{}};return t[r].call(o.exports,o,o.exports,n),o.l=!0,o.exports}n.m=t,n.c=e,n.d=function(t,e,r){n.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:r})},n.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},n.t=function(t,e){if(1&e&&(t=n(t)),8&e)return t;if(4&e&&"object"==typeof t&&t&&t.__esModule)return t;var r=Object.create(null);if(n.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var o in t)n.d(r,o,function(e){return t[e]}.bind(null,o));return r},n.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return n.d(e,"a",e),e},n.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},n.p="",n(n.s=68)}([function(t,e,n){(function(r){e.log=function(...t){return"object"==typeof console&&console.log&&console.log(...t)},e.formatArgs=function(e){if(e[0]=(this.useColors?"%c":"")+this.namespace+(this.useColors?" %c":" ")+e[0]+(this.useColors?"%c ":" ")+"+"+t.exports.humanize(this.diff),!this.useColors)return;const n="color: "+this.color;e.splice(1,0,n,"color: inherit");let r=0,o=0;e[0].replace(/%[a-zA-Z%]/g,t=>{"%%"!==t&&(r++,"%c"===t&&(o=r))}),e.splice(o,0,n)},e.save=function(t){try{t?e.storage.setItem("debug",t):e.storage.removeItem("debug")}catch(t){}},e.load=function(){let t;try{t=e.storage.getItem("debug")}catch(t){}!t&&void 0!==r&&"env"in r&&(t=r.env.DEBUG);return t},e.useColors=function(){if("undefined"!=typeof window&&window.process&&("renderer"===window.process.type||window.process.__nwjs))return!0;if("undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/))return!1;return"undefined"!=typeof document&&document.documentElement&&document.documentElement.style&&document.documentElement.style.WebkitAppearance||"undefined"!=typeof window&&window.console&&(window.console.firebug||window.console.exception&&window.console.table)||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)&&parseInt(RegExp.$1,10)>=31||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)},e.storage=function(){try{return localStorage}catch(t){}}(),e.colors=["#0000CC","#0000FF","#0033CC","#0033FF","#0066CC","#0066FF","#0099CC","#0099FF","#00CC00","#00CC33","#00CC66","#00CC99","#00CCCC","#00CCFF","#3300CC","#3300FF","#3333CC","#3333FF","#3366CC","#3366FF","#3399CC","#3399FF","#33CC00","#33CC33","#33CC66","#33CC99","#33CCCC","#33CCFF","#6600CC","#6600FF","#6633CC","#6633FF","#66CC00","#66CC33","#9900CC","#9900FF","#9933CC","#9933FF","#99CC00","#99CC33","#CC0000","#CC0033","#CC0066","#CC0099","#CC00CC","#CC00FF","#CC3300","#CC3333","#CC3366","#CC3399","#CC33CC","#CC33FF","#CC6600","#CC6633","#CC9900","#CC9933","#CCCC00","#CCCC33","#FF0000","#FF0033","#FF0066","#FF0099","#FF00CC","#FF00FF","#FF3300","#FF3333","#FF3366","#FF3399","#FF33CC","#FF33FF","#FF6600","#FF6633","#FF9900","#FF9933","#FFCC00","#FFCC33"],t.exports=n(28)(e);const{formatters:o}=t.exports;o.j=function(t){try{return JSON.stringify(t)}catch(t){return"[UnexpectedJSONParseError]: "+t.message}}}).call(this,n(12))},function(t,e,n){function r(t){if(t)return function(t){for(var e in r.prototype)t[e]=r.prototype[e];return t}(t)}t.exports=r,r.prototype.on=r.prototype.addEventListener=function(t,e){return this._callbacks=this._callbacks||{},(this._callbacks["$"+t]=this._callbacks["$"+t]||[]).push(e),this},r.prototype.once=function(t,e){function n(){this.off(t,n),e.apply(this,arguments)}return n.fn=e,this.on(t,n),this},r.prototype.off=r.prototype.removeListener=r.prototype.removeAllListeners=r.prototype.removeEventListener=function(t,e){if(this._callbacks=this._callbacks||{},0==arguments.length)return this._callbacks={},this;var n,r=this._callbacks["$"+t];if(!r)return this;if(1==arguments.length)return delete this._callbacks["$"+t],this;for(var o=0;o<r.length;o++)if((n=r[o])===e||n.fn===e){r.splice(o,1);break}return this},r.prototype.emit=function(t){this._callbacks=this._callbacks||{};var e=[].slice.call(arguments,1),n=this._callbacks["$"+t];if(n)for(var r=0,o=(n=n.slice(0)).length;r<o;++r)n[r].apply(this,e);return this},r.prototype.listeners=function(t){return this._callbacks=this._callbacks||{},this._callbacks["$"+t]||[]},r.prototype.hasListeners=function(t){return!!this.listeners(t).length}},function(t,e,n){var r,o=n(41),i=n(18),s=n(43),a=n(44),c=n(45);"undefined"!=typeof ArrayBuffer&&(r=n(46));var u="undefined"!=typeof navigator&&/Android/i.test(navigator.userAgent),h="undefined"!=typeof navigator&&/PhantomJS/i.test(navigator.userAgent),f=u||h;e.protocol=3;var p=e.packets={open:0,close:1,ping:2,pong:3,message:4,upgrade:5,noop:6},l=o(p),d={type:"error",data:"parser error"},y=n(47);function g(t,e,n){for(var r=new Array(t.length),o=a(t.length,n),i=function(t,n,o){e(n,(function(e,n){r[t]=n,o(e,r)}))},s=0;s<t.length;s++)i(s,t[s],o)}e.encodePacket=function(t,n,r,o){"function"==typeof n&&(o=n,n=!1),"function"==typeof r&&(o=r,r=null);var i=void 0===t.data?void 0:t.data.buffer||t.data;if("undefined"!=typeof ArrayBuffer&&i instanceof ArrayBuffer)return function(t,n,r){if(!n)return e.encodeBase64Packet(t,r);var o=t.data,i=new Uint8Array(o),s=new Uint8Array(1+o.byteLength);s[0]=p[t.type];for(var a=0;a<i.length;a++)s[a+1]=i[a];return r(s.buffer)}(t,n,o);if(void 0!==y&&i instanceof y)return function(t,n,r){if(!n)return e.encodeBase64Packet(t,r);if(f)return function(t,n,r){if(!n)return e.encodeBase64Packet(t,r);var o=new FileReader;return o.onload=function(){e.encodePacket({type:t.type,data:o.result},n,!0,r)},o.readAsArrayBuffer(t.data)}(t,n,r);var o=new Uint8Array(1);o[0]=p[t.type];var i=new y([o.buffer,t.data]);return r(i)}(t,n,o);if(i&&i.base64)return function(t,n){var r="b"+e.packets[t.type]+t.data.data;return n(r)}(t,o);var s=p[t.type];return void 0!==t.data&&(s+=r?c.encode(String(t.data),{strict:!1}):String(t.data)),o(""+s)},e.encodeBase64Packet=function(t,n){var r,o="b"+e.packets[t.type];if(void 0!==y&&t.data instanceof y){var i=new FileReader;return i.onload=function(){var t=i.result.split(",")[1];n(o+t)},i.readAsDataURL(t.data)}try{r=String.fromCharCode.apply(null,new Uint8Array(t.data))}catch(e){for(var s=new Uint8Array(t.data),a=new Array(s.length),c=0;c<s.length;c++)a[c]=s[c];r=String.fromCharCode.apply(null,a)}return o+=btoa(r),n(o)},e.decodePacket=function(t,n,r){if(void 0===t)return d;if("string"==typeof t){if("b"===t.charAt(0))return e.decodeBase64Packet(t.substr(1),n);if(r&&!1===(t=function(t){try{t=c.decode(t,{strict:!1})}catch(t){return!1}return t}(t)))return d;var o=t.charAt(0);return Number(o)==o&&l[o]?t.length>1?{type:l[o],data:t.substring(1)}:{type:l[o]}:d}o=new Uint8Array(t)[0];var i=s(t,1);return y&&"blob"===n&&(i=new y([i])),{type:l[o],data:i}},e.decodeBase64Packet=function(t,e){var n=l[t.charAt(0)];if(!r)return{type:n,data:{base64:!0,data:t.substr(1)}};var o=r.decode(t.substr(1));return"blob"===e&&y&&(o=new y([o])),{type:n,data:o}},e.encodePayload=function(t,n,r){"function"==typeof n&&(r=n,n=null);var o=i(t);if(n&&o)return y&&!f?e.encodePayloadAsBlob(t,r):e.encodePayloadAsArrayBuffer(t,r);if(!t.length)return r("0:");g(t,(function(t,r){e.encodePacket(t,!!o&&n,!1,(function(t){r(null,function(t){return t.length+":"+t}(t))}))}),(function(t,e){return r(e.join(""))}))},e.decodePayload=function(t,n,r){if("string"!=typeof t)return e.decodePayloadAsBinary(t,n,r);var o;if("function"==typeof n&&(r=n,n=null),""===t)return r(d,0,1);for(var i,s,a="",c=0,u=t.length;c<u;c++){var h=t.charAt(c);if(":"===h){if(""===a||a!=(i=Number(a)))return r(d,0,1);if(a!=(s=t.substr(c+1,i)).length)return r(d,0,1);if(s.length){if(o=e.decodePacket(s,n,!1),d.type===o.type&&d.data===o.data)return r(d,0,1);if(!1===r(o,c+i,u))return}c+=i,a=""}else a+=h}return""!==a?r(d,0,1):void 0},e.encodePayloadAsArrayBuffer=function(t,n){if(!t.length)return n(new ArrayBuffer(0));g(t,(function(t,n){e.encodePacket(t,!0,!0,(function(t){return n(null,t)}))}),(function(t,e){var r=e.reduce((function(t,e){var n;return t+(n="string"==typeof e?e.length:e.byteLength).toString().length+n+2}),0),o=new Uint8Array(r),i=0;return e.forEach((function(t){var e="string"==typeof t,n=t;if(e){for(var r=new Uint8Array(t.length),s=0;s<t.length;s++)r[s]=t.charCodeAt(s);n=r.buffer}o[i++]=e?0:1;var a=n.byteLength.toString();for(s=0;s<a.length;s++)o[i++]=parseInt(a[s]);o[i++]=255;for(r=new Uint8Array(n),s=0;s<r.length;s++)o[i++]=r[s]})),n(o.buffer)}))},e.encodePayloadAsBlob=function(t,n){g(t,(function(t,n){e.encodePacket(t,!0,!0,(function(t){var e=new Uint8Array(1);if(e[0]=1,"string"==typeof t){for(var r=new Uint8Array(t.length),o=0;o<t.length;o++)r[o]=t.charCodeAt(o);t=r.buffer,e[0]=0}var i=(t instanceof ArrayBuffer?t.byteLength:t.size).toString(),s=new Uint8Array(i.length+1);for(o=0;o<i.length;o++)s[o]=parseInt(i[o]);if(s[i.length]=255,y){var a=new y([e.buffer,s.buffer,t]);n(null,a)}}))}),(function(t,e){return n(new y(e))}))},e.decodePayloadAsBinary=function(t,n,r){"function"==typeof n&&(r=n,n=null);for(var o=t,i=[];o.byteLength>0;){for(var a=new Uint8Array(o),c=0===a[0],u="",h=1;255!==a[h];h++){if(u.length>310)return r(d,0,1);u+=a[h]}o=s(o,2+u.length),u=parseInt(u);var f=s(o,0,u);if(c)try{f=String.fromCharCode.apply(null,new Uint8Array(f))}catch(t){var p=new Uint8Array(f);f="";for(h=0;h<p.length;h++)f+=String.fromCharCode(p[h])}i.push(f),o=s(o,u)}var l=i.length;i.forEach((function(t,o){r(e.decodePacket(t,n,!0),o,l)}))}},function(t,e){e.encode=function(t){var e="";for(var n in t)t.hasOwnProperty(n)&&(e.length&&(e+="&"),e+=encodeURIComponent(n)+"="+encodeURIComponent(t[n]));return e},e.decode=function(t){for(var e={},n=t.split("&"),r=0,o=n.length;r<o;r++){var i=n[r].split("=");e[decodeURIComponent(i[0])]=decodeURIComponent(i[1])}return e}},function(t,e){t.exports=function(t,e){var n=function(){};n.prototype=e.prototype,t.prototype=new n,t.prototype.constructor=t}},function(t,e,n){var r=n(30)("socket.io-parser"),o=n(1),i=n(33),s=n(13),a=n(14);function c(){}e.protocol=4,e.types=["CONNECT","DISCONNECT","EVENT","ACK","ERROR","BINARY_EVENT","BINARY_ACK"],e.CONNECT=0,e.DISCONNECT=1,e.EVENT=2,e.ACK=3,e.ERROR=4,e.BINARY_EVENT=5,e.BINARY_ACK=6,e.Encoder=c,e.Decoder=f;var u=e.ERROR+'"encode error"';function h(t){var n=""+t.type;if(e.BINARY_EVENT!==t.type&&e.BINARY_ACK!==t.type||(n+=t.attachments+"-"),t.nsp&&"/"!==t.nsp&&(n+=t.nsp+","),null!=t.id&&(n+=t.id),null!=t.data){var o=function(t){try{return JSON.stringify(t)}catch(t){return!1}}(t.data);if(!1===o)return u;n+=o}return r("encoded %j as %s",t,n),n}function f(){this.reconstructor=null}function p(t){this.reconPack=t,this.buffers=[]}function l(t){return{type:e.ERROR,data:"parser error: "+t}}c.prototype.encode=function(t,n){(r("encoding packet %j",t),e.BINARY_EVENT===t.type||e.BINARY_ACK===t.type)?function(t,e){i.removeBlobs(t,(function(t){var n=i.deconstructPacket(t),r=h(n.packet),o=n.buffers;o.unshift(r),e(o)}))}(t,n):n([h(t)])},o(f.prototype),f.prototype.add=function(t){var n;if("string"==typeof t)n=function(t){var n=0,o={type:Number(t.charAt(0))};if(null==e.types[o.type])return l("unknown packet type "+o.type);if(e.BINARY_EVENT===o.type||e.BINARY_ACK===o.type){for(var i="";"-"!==t.charAt(++n)&&(i+=t.charAt(n),n!=t.length););if(i!=Number(i)||"-"!==t.charAt(n))throw new Error("Illegal attachments");o.attachments=Number(i)}if("/"===t.charAt(n+1))for(o.nsp="";++n;){if(","===(c=t.charAt(n)))break;if(o.nsp+=c,n===t.length)break}else o.nsp="/";var a=t.charAt(n+1);if(""!==a&&Number(a)==a){for(o.id="";++n;){var c;if(null==(c=t.charAt(n))||Number(c)!=c){--n;break}if(o.id+=t.charAt(n),n===t.length)break}o.id=Number(o.id)}if(t.charAt(++n)){var u=function(t){try{return JSON.parse(t)}catch(t){return!1}}(t.substr(n));if(!(!1!==u&&(o.type===e.ERROR||s(u))))return l("invalid payload");o.data=u}return r("decoded %s as %j",t,o),o}(t),e.BINARY_EVENT===n.type||e.BINARY_ACK===n.type?(this.reconstructor=new p(n),0===this.reconstructor.reconPack.attachments&&this.emit("decoded",n)):this.emit("decoded",n);else{if(!a(t)&&!t.base64)throw new Error("Unknown type: "+t);if(!this.reconstructor)throw new Error("got binary data when not reconstructing a packet");(n=this.reconstructor.takeBinaryData(t))&&(this.reconstructor=null,this.emit("decoded",n))}},f.prototype.destroy=function(){this.reconstructor&&this.reconstructor.finishedReconstruction()},p.prototype.takeBinaryData=function(t){if(this.buffers.push(t),this.buffers.length===this.reconPack.attachments){var e=i.reconstructPacket(this.reconPack,this.buffers);return this.finishedReconstruction(),e}return null},p.prototype.finishedReconstruction=function(){this.reconPack=null,this.buffers=[]}},function(t,e,n){"use strict";(function(t){
/*!
* The buffer module from node.js, for the browser.
|
*/
var r=n(34),o=n(35),i=n(36);function s(){return c.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function a(t,e){if(s()<e)throw new RangeError("Invalid typed array length");return c.TYPED_ARRAY_SUPPORT?(t=new Uint8Array(e)).__proto__=c.prototype:(null===t&&(t=new c(e)),t.length=e),t}function c(t,e,n){if(!(c.TYPED_ARRAY_SUPPORT||this instanceof c))return new c(t,e,n);if("number"==typeof t){if("string"==typeof e)throw new Error("If encoding is specified then the first argument must be a string");return f(this,t)}return u(this,t,e,n)}function u(t,e,n,r){if("number"==typeof e)throw new TypeError('"value" argument must not be a number');return"undefined"!=typeof ArrayBuffer&&e instanceof ArrayBuffer?function(t,e,n,r){if(e.byteLength,n<0||e.byteLength<n)throw new RangeError("'offset' is out of bounds");if(e.byteLength<n+(r||0))throw new RangeError("'length' is out of bounds");e=void 0===n&&void 0===r?new Uint8Array(e):void 0===r?new Uint8Array(e,n):new Uint8Array(e,n,r);c.TYPED_ARRAY_SUPPORT?(t=e).__proto__=c.prototype:t=p(t,e);return t}(t,e,n,r):"string"==typeof e?function(t,e,n){"string"==typeof n&&""!==n||(n="utf8");if(!c.isEncoding(n))throw new TypeError('"encoding" must be a valid string encoding');var r=0|d(e,n),o=(t=a(t,r)).write(e,n);o!==r&&(t=t.slice(0,o));return t}(t,e,n):function(t,e){if(c.isBuffer(e)){var n=0|l(e.length);return 0===(t=a(t,n)).length||e.copy(t,0,0,n),t}if(e){if("undefined"!=typeof ArrayBuffer&&e.buffer instanceof ArrayBuffer||"length"in e)return"number"!=typeof e.length||(r=e.length)!=r?a(t,0):p(t,e);if("Buffer"===e.type&&i(e.data))return p(t,e.data)}var r;throw new TypeError("First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.")}(t,e)}function h(t){if("number"!=typeof t)throw new TypeError('"size" argument must be a number');if(t<0)throw new RangeError('"size" argument must not be negative')}function f(t,e){if(h(e),t=a(t,e<0?0:0|l(e)),!c.TYPED_ARRAY_SUPPORT)for(var n=0;n<e;++n)t[n]=0;return t}function p(t,e){var n=e.length<0?0:0|l(e.length);t=a(t,n);for(var r=0;r<n;r+=1)t[r]=255&e[r];return t}function l(t){if(t>=s())throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+s().toString(16)+" bytes");return 0|t}function d(t,e){if(c.isBuffer(t))return t.length;if("undefined"!=typeof ArrayBuffer&&"function"==typeof ArrayBuffer.isView&&(ArrayBuffer.isView(t)||t instanceof ArrayBuffer))return t.byteLength;"string"!=typeof t&&(t=""+t);var n=t.length;if(0===n)return 0;for(var r=!1;;)switch(e){case"ascii":case"latin1":case"binary":return n;case"utf8":case"utf-8":case void 0:return j(t).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*n;case"hex":return n>>>1;case"base64":return q(t).length;default:if(r)return j(t).length;e=(""+e).toLowerCase(),r=!0}}function y(t,e,n){var r=!1;if((void 0===e||e<0)&&(e=0),e>this.length)return"";if((void 0===n||n>this.length)&&(n=this.length),n<=0)return"";if((n>>>=0)<=(e>>>=0))return"";for(t||(t="utf8");;)switch(t){case"hex":return T(this,e,n);case"utf8":case"utf-8":return x(this,e,n);case"ascii":return R(this,e,n);case"latin1":case"binary":return S(this,e,n);case"base64":return B(this,e,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return P(this,e,n);default:if(r)throw new TypeError("Unknown encoding: "+t);t=(t+"").toLowerCase(),r=!0}}function g(t,e,n){var r=t[e];t[e]=t[n],t[n]=r}function m(t,e,n,r,o){if(0===t.length)return-1;if("string"==typeof n?(r=n,n=0):n>2147483647?n=2147483647:n<-2147483648&&(n=-2147483648),n=+n,isNaN(n)&&(n=o?0:t.length-1),n<0&&(n=t.length+n),n>=t.length){if(o)return-1;n=t.length-1}else if(n<0){if(!o)return-1;n=0}if("string"==typeof e&&(e=c.from(e,r)),c.isBuffer(e))return 0===e.length?-1:v(t,e,n,r,o);if("number"==typeof e)return e&=255,c.TYPED_ARRAY_SUPPORT&&"function"==typeof Uint8Array.prototype.indexOf?o?Uint8Array.prototype.indexOf.call(t,e,n):Uint8Array.prototype.lastIndexOf.call(t,e,n):v(t,[e],n,r,o);throw new TypeError("val must be string, number or Buffer")}function v(t,e,n,r,o){var i,s=1,a=t.length,c=e.length;if(void 0!==r&&("ucs2"===(r=String(r).toLowerCase())||"ucs-2"===r||"utf16le"===r||"utf-16le"===r)){if(t.length<2||e.length<2)return-1;s=2,a/=2,c/=2,n/=2}function u(t,e){return 1===s?t[e]:t.readUInt16BE(e*s)}if(o){var h=-1;for(i=n;i<a;i++)if(u(t,i)===u(e,-1===h?0:i-h)){if(-1===h&&(h=i),i-h+1===c)return h*s}else-1!==h&&(i-=i-h),h=-1}else for(n+c>a&&(n=a-c),i=n;i>=0;i--){for(var f=!0,p=0;p<c;p++)if(u(t,i+p)!==u(e,p)){f=!1;break}if(f)return i}return-1}function w(t,e,n,r){n=Number(n)||0;var o=t.length-n;r?(r=Number(r))>o&&(r=o):r=o;var i=e.length;if(i%2!=0)throw new TypeError("Invalid hex string");r>i/2&&(r=i/2);for(var s=0;s<r;++s){var a=parseInt(e.substr(2*s,2),16);if(isNaN(a))return s;t[n+s]=a}return s}function b(t,e,n,r){return Y(j(e,t.length-n),t,n,r)}function C(t,e,n,r){return Y(function(t){for(var e=[],n=0;n<t.length;++n)e.push(255&t.charCodeAt(n));return e}(e),t,n,r)}function A(t,e,n,r){return C(t,e,n,r)}function E(t,e,n,r){return Y(q(e),t,n,r)}function k(t,e,n,r){return Y(function(t,e){for(var n,r,o,i=[],s=0;s<t.length&&!((e-=2)<0);++s)n=t.charCodeAt(s),r=n>>8,o=n%256,i.push(o),i.push(r);return i}(e,t.length-n),t,n,r)}function B(t,e,n){return 0===e&&n===t.length?r.fromByteArray(t):r.fromByteArray(t.slice(e,n))}function x(t,e,n){n=Math.min(t.length,n);for(var r=[],o=e;o<n;){var i,s,a,c,u=t[o],h=null,f=u>239?4:u>223?3:u>191?2:1;if(o+f<=n)switch(f){case 1:u<128&&(h=u);break;case 2:128==(192&(i=t[o+1]))&&(c=(31&u)<<6|63&i)>127&&(h=c);break;case 3:i=t[o+1],s=t[o+2],128==(192&i)&&128==(192&s)&&(c=(15&u)<<12|(63&i)<<6|63&s)>2047&&(c<55296||c>57343)&&(h=c);break;case 4:i=t[o+1],s=t[o+2],a=t[o+3],128==(192&i)&&128==(192&s)&&128==(192&a)&&(c=(15&u)<<18|(63&i)<<12|(63&s)<<6|63&a)>65535&&c<1114112&&(h=c)}null===h?(h=65533,f=1):h>65535&&(h-=65536,r.push(h>>>10&1023|55296),h=56320|1023&h),r.push(h),o+=f}return function(t){var e=t.length;if(e<=4096)return String.fromCharCode.apply(String,t);var n="",r=0;for(;r<e;)n+=String.fromCharCode.apply(String,t.slice(r,r+=4096));return n}(r)}e.Buffer=c,e.SlowBuffer=function(t){+t!=t&&(t=0);return c.alloc(+t)},e.INSPECT_MAX_BYTES=50,c.TYPED_ARRAY_SUPPORT=void 0!==t.TYPED_ARRAY_SUPPORT?t.TYPED_ARRAY_SUPPORT:function(){try{var t=new Uint8Array(1);return t.__proto__={__proto__:Uint8Array.prototype,foo:function(){return 42}},42===t.foo()&&"function"==typeof t.subarray&&0===t.subarray(1,1).byteLength}catch(t){return!1}}(),e.kMaxLength=s(),c.poolSize=8192,c._augment=function(t){return t.__proto__=c.prototype,t},c.from=function(t,e,n){return u(null,t,e,n)},c.TYPED_ARRAY_SUPPORT&&(c.prototype.__proto__=Uint8Array.prototype,c.__proto__=Uint8Array,"undefined"!=typeof Symbol&&Symbol.species&&c[Symbol.species]===c&&Object.defineProperty(c,Symbol.species,{value:null,configurable:!0})),c.alloc=function(t,e,n){return function(t,e,n,r){return h(e),e<=0?a(t,e):void 0!==n?"string"==typeof r?a(t,e).fill(n,r):a(t,e).fill(n):a(t,e)}(null,t,e,n)},c.allocUnsafe=function(t){return f(null,t)},c.allocUnsafeSlow=function(t){return f(null,t)},c.isBuffer=function(t){return!(null==t||!t._isBuffer)},c.compare=function(t,e){if(!c.isBuffer(t)||!c.isBuffer(e))throw new TypeError("Arguments must be Buffers");if(t===e)return 0;for(var n=t.length,r=e.length,o=0,i=Math.min(n,r);o<i;++o)if(t[o]!==e[o]){n=t[o],r=e[o];break}return n<r?-1:r<n?1:0},c.isEncoding=function(t){switch(String(t).toLowerCase()){case"hex":case"utf8":case"utf-8":case"ascii":case"latin1":case"binary":case"base64":case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return!0;default:return!1}},c.concat=function(t,e){if(!i(t))throw new TypeError('"list" argument must be an Array of Buffers');if(0===t.length)return c.alloc(0);var n;if(void 0===e)for(e=0,n=0;n<t.length;++n)e+=t[n].length;var r=c.allocUnsafe(e),o=0;for(n=0;n<t.length;++n){var s=t[n];if(!c.isBuffer(s))throw new TypeError('"list" argument must be an Array of Buffers');s.copy(r,o),o+=s.length}return r},c.byteLength=d,c.prototype._isBuffer=!0,c.prototype.swap16=function(){var t=this.length;if(t%2!=0)throw new RangeError("Buffer size must be a multiple of 16-bits");for(var e=0;e<t;e+=2)g(this,e,e+1);return this},c.prototype.swap32=function(){var t=this.length;if(t%4!=0)throw new RangeError("Buffer size must be a multiple of 32-bits");for(var e=0;e<t;e+=4)g(this,e,e+3),g(this,e+1,e+2);return this},c.prototype.swap64=function(){var t=this.length;if(t%8!=0)throw new RangeError("Buffer size must be a multiple of 64-bits");for(var e=0;e<t;e+=8)g(this,e,e+7),g(this,e+1,e+6),g(this,e+2,e+5),g(this,e+3,e+4);return this},c.prototype.toString=function(){var t=0|this.length;return 0===t?"":0===arguments.length?x(this,0,t):y.apply(this,arguments)},c.prototype.equals=function(t){if(!c.isBuffer(t))throw new TypeError("Argument must be a Buffer");return this===t||0===c.compare(this,t)},c.prototype.inspect=function(){var t="",n=e.INSPECT_MAX_BYTES;return this.length>0&&(t=this.toString("hex",0,n).match(/.{2}/g).join(" "),this.length>n&&(t+=" ... ")),"<Buffer "+t+">"},c.prototype.compare=function(t,e,n,r,o){if(!c.isBuffer(t))throw new TypeError("Argument must be a Buffer");if(void 0===e&&(e=0),void 0===n&&(n=t?t.length:0),void 0===r&&(r=0),void 0===o&&(o=this.length),e<0||n>t.length||r<0||o>this.length)throw new RangeError("out of range index");if(r>=o&&e>=n)return 0;if(r>=o)return-1;if(e>=n)return 1;if(this===t)return 0;for(var i=(o>>>=0)-(r>>>=0),s=(n>>>=0)-(e>>>=0),a=Math.min(i,s),u=this.slice(r,o),h=t.slice(e,n),f=0;f<a;++f)if(u[f]!==h[f]){i=u[f],s=h[f];break}return i<s?-1:s<i?1:0},c.prototype.includes=function(t,e,n){return-1!==this.indexOf(t,e,n)},c.prototype.indexOf=function(t,e,n){return m(this,t,e,n,!0)},c.prototype.lastIndexOf=function(t,e,n){return m(this,t,e,n,!1)},c.prototype.write=function(t,e,n,r){if(void 0===e)r="utf8",n=this.length,e=0;else if(void 0===n&&"string"==typeof e)r=e,n=this.length,e=0;else{if(!isFinite(e))throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");e|=0,isFinite(n)?(n|=0,void 0===r&&(r="utf8")):(r=n,n=void 0)}var o=this.length-e;if((void 0===n||n>o)&&(n=o),t.length>0&&(n<0||e<0)||e>this.length)throw new RangeError("Attempt to write outside buffer bounds");r||(r="utf8");for(var i=!1;;)switch(r){case"hex":return w(this,t,e,n);case"utf8":case"utf-8":return b(this,t,e,n);case"ascii":return C(this,t,e,n);case"latin1":case"binary":return A(this,t,e,n);case"base64":return E(this,t,e,n);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return k(this,t,e,n);default:if(i)throw new TypeError("Unknown encoding: "+r);r=(""+r).toLowerCase(),i=!0}},c.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};function R(t,e,n){var r="";n=Math.min(t.length,n);for(var o=e;o<n;++o)r+=String.fromCharCode(127&t[o]);return r}function S(t,e,n){var r="";n=Math.min(t.length,n);for(var o=e;o<n;++o)r+=String.fromCharCode(t[o]);return r}function T(t,e,n){var r=t.length;(!e||e<0)&&(e=0),(!n||n<0||n>r)&&(n=r);for(var o="",i=e;i<n;++i)o+=M(t[i]);return o}function P(t,e,n){for(var r=t.slice(e,n),o="",i=0;i<r.length;i+=2)o+=String.fromCharCode(r[i]+256*r[i+1]);return o}function _(t,e,n){if(t%1!=0||t<0)throw new RangeError("offset is not uint");if(t+e>n)throw new RangeError("Trying to access beyond buffer length")}function F(t,e,n,r,o,i){if(!c.isBuffer(t))throw new TypeError('"buffer" argument must be a Buffer instance');if(e>o||e<i)throw new RangeError('"value" argument is out of bounds');if(n+r>t.length)throw new RangeError("Index out of range")}function O(t,e,n,r){e<0&&(e=65535+e+1);for(var o=0,i=Math.min(t.length-n,2);o<i;++o)t[n+o]=(e&255<<8*(r?o:1-o))>>>8*(r?o:1-o)}function U(t,e,n,r){e<0&&(e=4294967295+e+1);for(var o=0,i=Math.min(t.length-n,4);o<i;++o)t[n+o]=e>>>8*(r?o:3-o)&255}function I(t,e,n,r,o,i){if(n+r>t.length)throw new RangeError("Index out of range");if(n<0)throw new RangeError("Index out of range")}function L(t,e,n,r,i){return i||I(t,0,n,4),o.write(t,e,n,r,23,4),n+4}function N(t,e,n,r,i){return i||I(t,0,n,8),o.write(t,e,n,r,52,8),n+8}c.prototype.slice=function(t,e){var n,r=this.length;if((t=~~t)<0?(t+=r)<0&&(t=0):t>r&&(t=r),(e=void 0===e?r:~~e)<0?(e+=r)<0&&(e=0):e>r&&(e=r),e<t&&(e=t),c.TYPED_ARRAY_SUPPORT)(n=this.subarray(t,e)).__proto__=c.prototype;else{var o=e-t;n=new c(o,void 0);for(var i=0;i<o;++i)n[i]=this[i+t]}return n},c.prototype.readUIntLE=function(t,e,n){t|=0,e|=0,n||_(t,e,this.length);for(var r=this[t],o=1,i=0;++i<e&&(o*=256);)r+=this[t+i]*o;return r},c.prototype.readUIntBE=function(t,e,n){t|=0,e|=0,n||_(t,e,this.length);for(var r=this[t+--e],o=1;e>0&&(o*=256);)r+=this[t+--e]*o;return r},c.prototype.readUInt8=function(t,e){return e||_(t,1,this.length),this[t]},c.prototype.readUInt16LE=function(t,e){return e||_(t,2,this.length),this[t]|this[t+1]<<8},c.prototype.readUInt16BE=function(t,e){return e||_(t,2,this.length),this[t]<<8|this[t+1]},c.prototype.readUInt32LE=function(t,e){return e||_(t,4,this.length),(this[t]|this[t+1]<<8|this[t+2]<<16)+16777216*this[t+3]},c.prototype.readUInt32BE=function(t,e){return e||_(t,4,this.length),16777216*this[t]+(this[t+1]<<16|this[t+2]<<8|this[t+3])},c.prototype.readIntLE=function(t,e,n){t|=0,e|=0,n||_(t,e,this.length);for(var r=this[t],o=1,i=0;++i<e&&(o*=256);)r+=this[t+i]*o;return r>=(o*=128)&&(r-=Math.pow(2,8*e)),r},c.prototype.readIntBE=function(t,e,n){t|=0,e|=0,n||_(t,e,this.length);for(var r=e,o=1,i=this[t+--r];r>0&&(o*=256);)i+=this[t+--r]*o;return i>=(o*=128)&&(i-=Math.pow(2,8*e)),i},c.prototype.readInt8=function(t,e){return e||_(t,1,this.length),128&this[t]?-1*(255-this[t]+1):this[t]},c.prototype.readInt16LE=function(t,e){e||_(t,2,this.length);var n=this[t]|this[t+1]<<8;return 32768&n?4294901760|n:n},c.prototype.readInt16BE=function(t,e){e||_(t,2,this.length);var n=this[t+1]|this[t]<<8;return 32768&n?4294901760|n:n},c.prototype.readInt32LE=function(t,e){return e||_(t,4,this.length),this[t]|this[t+1]<<8|this[t+2]<<16|this[t+3]<<24},c.prototype.readInt32BE=function(t,e){return e||_(t,4,this.length),this[t]<<24|this[t+1]<<16|this[t+2]<<8|this[t+3]},c.prototype.readFloatLE=function(t,e){return e||_(t,4,this.length),o.read(this,t,!0,23,4)},c.prototype.readFloatBE=function(t,e){return e||_(t,4,this.length),o.read(this,t,!1,23,4)},c.prototype.readDoubleLE=function(t,e){return e||_(t,8,this.length),o.read(this,t,!0,52,8)},c.prototype.readDoubleBE=function(t,e){return e||_(t,8,this.length),o.read(this,t,!1,52,8)},c.prototype.writeUIntLE=function(t,e,n,r){(t=+t,e|=0,n|=0,r)||F(this,t,e,n,Math.pow(2,8*n)-1,0);var o=1,i=0;for(this[e]=255&t;++i<n&&(o*=256);)this[e+i]=t/o&255;return e+n},c.prototype.writeUIntBE=function(t,e,n,r){(t=+t,e|=0,n|=0,r)||F(this,t,e,n,Math.pow(2,8*n)-1,0);var o=n-1,i=1;for(this[e+o]=255&t;--o>=0&&(i*=256);)this[e+o]=t/i&255;return e+n},c.prototype.writeUInt8=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,1,255,0),c.TYPED_ARRAY_SUPPORT||(t=Math.floor(t)),this[e]=255&t,e+1},c.prototype.writeUInt16LE=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,2,65535,0),c.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8):O(this,t,e,!0),e+2},c.prototype.writeUInt16BE=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,2,65535,0),c.TYPED_ARRAY_SUPPORT?(this[e]=t>>>8,this[e+1]=255&t):O(this,t,e,!1),e+2},c.prototype.writeUInt32LE=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,4,4294967295,0),c.TYPED_ARRAY_SUPPORT?(this[e+3]=t>>>24,this[e+2]=t>>>16,this[e+1]=t>>>8,this[e]=255&t):U(this,t,e,!0),e+4},c.prototype.writeUInt32BE=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,4,4294967295,0),c.TYPED_ARRAY_SUPPORT?(this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t):U(this,t,e,!1),e+4},c.prototype.writeIntLE=function(t,e,n,r){if(t=+t,e|=0,!r){var o=Math.pow(2,8*n-1);F(this,t,e,n,o-1,-o)}var i=0,s=1,a=0;for(this[e]=255&t;++i<n&&(s*=256);)t<0&&0===a&&0!==this[e+i-1]&&(a=1),this[e+i]=(t/s>>0)-a&255;return e+n},c.prototype.writeIntBE=function(t,e,n,r){if(t=+t,e|=0,!r){var o=Math.pow(2,8*n-1);F(this,t,e,n,o-1,-o)}var i=n-1,s=1,a=0;for(this[e+i]=255&t;--i>=0&&(s*=256);)t<0&&0===a&&0!==this[e+i+1]&&(a=1),this[e+i]=(t/s>>0)-a&255;return e+n},c.prototype.writeInt8=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,1,127,-128),c.TYPED_ARRAY_SUPPORT||(t=Math.floor(t)),t<0&&(t=255+t+1),this[e]=255&t,e+1},c.prototype.writeInt16LE=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,2,32767,-32768),c.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8):O(this,t,e,!0),e+2},c.prototype.writeInt16BE=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,2,32767,-32768),c.TYPED_ARRAY_SUPPORT?(this[e]=t>>>8,this[e+1]=255&t):O(this,t,e,!1),e+2},c.prototype.writeInt32LE=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,4,2147483647,-2147483648),c.TYPED_ARRAY_SUPPORT?(this[e]=255&t,this[e+1]=t>>>8,this[e+2]=t>>>16,this[e+3]=t>>>24):U(this,t,e,!0),e+4},c.prototype.writeInt32BE=function(t,e,n){return t=+t,e|=0,n||F(this,t,e,4,2147483647,-2147483648),t<0&&(t=4294967295+t+1),c.TYPED_ARRAY_SUPPORT?(this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t):U(this,t,e,!1),e+4},c.prototype.writeFloatLE=function(t,e,n){return L(this,t,e,!0,n)},c.prototype.writeFloatBE=function(t,e,n){return L(this,t,e,!1,n)},c.prototype.writeDoubleLE=function(t,e,n){return N(this,t,e,!0,n)},c.prototype.writeDoubleBE=function(t,e,n){return N(this,t,e,!1,n)},c.prototype.copy=function(t,e,n,r){if(n||(n=0),r||0===r||(r=this.length),e>=t.length&&(e=t.length),e||(e=0),r>0&&r<n&&(r=n),r===n)return 0;if(0===t.length||0===this.length)return 0;if(e<0)throw new RangeError("targetStart out of bounds");if(n<0||n>=this.length)throw new RangeError("sourceStart out of bounds");if(r<0)throw new RangeError("sourceEnd out of bounds");r>this.length&&(r=this.length),t.length-e<r-n&&(r=t.length-e+n);var o,i=r-n;if(this===t&&n<e&&e<r)for(o=i-1;o>=0;--o)t[o+e]=this[o+n];else if(i<1e3||!c.TYPED_ARRAY_SUPPORT)for(o=0;o<i;++o)t[o+e]=this[o+n];else Uint8Array.prototype.set.call(t,this.subarray(n,n+i),e);return i},c.prototype.fill=function(t,e,n,r){if("string"==typeof t){if("string"==typeof e?(r=e,e=0,n=this.length):"string"==typeof n&&(r=n,n=this.length),1===t.length){var o=t.charCodeAt(0);o<256&&(t=o)}if(void 0!==r&&"string"!=typeof r)throw new TypeError("encoding must be a string");if("string"==typeof r&&!c.isEncoding(r))throw new TypeError("Unknown encoding: "+r)}else"number"==typeof t&&(t&=255);if(e<0||this.length<e||this.length<n)throw new RangeError("Out of range index");if(n<=e)return this;var i;if(e>>>=0,n=void 0===n?this.length:n>>>0,t||(t=0),"number"==typeof t)for(i=e;i<n;++i)this[i]=t;else{var s=c.isBuffer(t)?t:j(new c(t,r).toString()),a=s.length;for(i=0;i<n-e;++i)this[i+e]=s[i%a]}return this};var D=/[^+\/0-9A-Za-z-_]/g;function M(t){return t<16?"0"+t.toString(16):t.toString(16)}function j(t,e){var n;e=e||1/0;for(var r=t.length,o=null,i=[],s=0;s<r;++s){if((n=t.charCodeAt(s))>55295&&n<57344){if(!o){if(n>56319){(e-=3)>-1&&i.push(239,191,189);continue}if(s+1===r){(e-=3)>-1&&i.push(239,191,189);continue}o=n;continue}if(n<56320){(e-=3)>-1&&i.push(239,191,189),o=n;continue}n=65536+(o-55296<<10|n-56320)}else o&&(e-=3)>-1&&i.push(239,191,189);if(o=null,n<128){if((e-=1)<0)break;i.push(n)}else if(n<2048){if((e-=2)<0)break;i.push(n>>6|192,63&n|128)}else if(n<65536){if((e-=3)<0)break;i.push(n>>12|224,n>>6&63|128,63&n|128)}else{if(!(n<1114112))throw new Error("Invalid code point");if((e-=4)<0)break;i.push(n>>18|240,n>>12&63|128,n>>6&63|128,63&n|128)}}return i}function q(t){return r.toByteArray(function(t){if((t=function(t){return t.trim?t.trim():t.replace(/^\s+|\s+$/g,"")}(t).replace(D,"")).length<2)return"";for(;t.length%4!=0;)t+="=";return t}(t))}function Y(t,e,n,r){for(var o=0;o<r&&!(o+n>=e.length||o>=t.length);++o)e[o+n]=t[o];return o}}).call(this,n(7))},function(t,e){var n;n=function(){return this}();try{n=n||new Function("return this")()}catch(t){"object"==typeof window&&(n=window)}t.exports=n},function(t,e,n){var r=n(39);t.exports=function(t){var e=t.xdomain,n=t.xscheme,o=t.enablesXDR;try{if("undefined"!=typeof XMLHttpRequest&&(!e||r))return new XMLHttpRequest}catch(t){}try{if("undefined"!=typeof XDomainRequest&&!n&&o)return new XDomainRequest}catch(t){}if(!e)try{return new(self[["Active"].concat("Object").join("X")])("Microsoft.XMLHTTP")}catch(t){}}},function(t,e,n){var r=n(2),o=n(1);function i(t){this.path=t.path,this.hostname=t.hostname,this.port=t.port,this.secure=t.secure,this.query=t.query,this.timestampParam=t.timestampParam,this.timestampRequests=t.timestampRequests,this.readyState="",this.agent=t.agent||!1,this.socket=t.socket,this.enablesXDR=t.enablesXDR,this.withCredentials=t.withCredentials,this.pfx=t.pfx,this.key=t.key,this.passphrase=t.passphrase,this.cert=t.cert,this.ca=t.ca,this.ciphers=t.ciphers,this.rejectUnauthorized=t.rejectUnauthorized,this.forceNode=t.forceNode,this.isReactNative=t.isReactNative,this.extraHeaders=t.extraHeaders,this.localAddress=t.localAddress}t.exports=i,o(i.prototype),i.prototype.onError=function(t,e){var n=new Error(t);return n.type="TransportError",n.description=e,this.emit("error",n),this},i.prototype.open=function(){return"closed"!==this.readyState&&""!==this.readyState||(this.readyState="opening",this.doOpen()),this},i.prototype.close=function(){return"opening"!==this.readyState&&"open"!==this.readyState||(this.doClose(),this.onClose()),this},i.prototype.send=function(t){if("open"!==this.readyState)throw new Error("Transport not open");this.write(t)},i.prototype.onOpen=function(){this.readyState="open",this.writable=!0,this.emit("open")},i.prototype.onData=function(t){var e=r.decodePacket(t,this.socket.binaryType);this.onPacket(e)},i.prototype.onPacket=function(t){this.emit("packet",t)},i.prototype.onClose=function(){this.readyState="closed",this.emit("close")}},function(t,e,n){var r=n(57).Symbol;t.exports=r},function(t,e){var n=/^(?:(?![^:@]+:[^:@\/]*@)(http|https|ws|wss):\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?((?:[a-f0-9]{0,4}:){2,7}[a-f0-9]{0,4}|[^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/,r=["source","protocol","authority","userInfo","user","password","host","port","relative","path","directory","file","query","anchor"];t.exports=function(t){var e=t,o=t.indexOf("["),i=t.indexOf("]");-1!=o&&-1!=i&&(t=t.substring(0,o)+t.substring(o,i).replace(/:/g,";")+t.substring(i,t.length));for(var s=n.exec(t||""),a={},c=14;c--;)a[r[c]]=s[c]||"";return-1!=o&&-1!=i&&(a.source=e,a.host=a.host.substring(1,a.host.length-1).replace(/;/g,":"),a.authority=a.authority.replace("[","").replace("]","").replace(/;/g,":"),a.ipv6uri=!0),a}},function(t,e){var n,r,o=t.exports={};function i(){throw new Error("setTimeout has not been defined")}function s(){throw new Error("clearTimeout has not been defined")}function a(t){if(n===setTimeout)return setTimeout(t,0);if((n===i||!n)&&setTimeout)return n=setTimeout,setTimeout(t,0);try{return n(t,0)}catch(e){try{return n.call(null,t,0)}catch(e){return n.call(this,t,0)}}}!function(){try{n="function"==typeof setTimeout?setTimeout:i}catch(t){n=i}try{r="function"==typeof clearTimeout?clearTimeout:s}catch(t){r=s}}();var c,u=[],h=!1,f=-1;function p(){h&&c&&(h=!1,c.length?u=c.concat(u):f=-1,u.length&&l())}function l(){if(!h){var t=a(p);h=!0;for(var e=u.length;e;){for(c=u,u=[];++f<e;)c&&c[f].run();f=-1,e=u.length}c=null,h=!1,function(t){if(r===clearTimeout)return clearTimeout(t);if((r===s||!r)&&clearTimeout)return r=clearTimeout,clearTimeout(t);try{r(t)}catch(e){try{return r.call(null,t)}catch(e){return r.call(this,t)}}}(t)}}function d(t,e){this.fun=t,this.array=e}function y(){}o.nextTick=function(t){var e=new Array(arguments.length-1);if(arguments.length>1)for(var n=1;n<arguments.length;n++)e[n-1]=arguments[n];u.push(new d(t,e)),1!==u.length||h||a(l)},d.prototype.run=function(){this.fun.apply(null,this.array)},o.title="browser",o.browser=!0,o.env={},o.argv=[],o.version="",o.versions={},o.on=y,o.addListener=y,o.once=y,o.off=y,o.removeListener=y,o.removeAllListeners=y,o.emit=y,o.prependListener=y,o.prependOnceListener=y,o.listeners=function(t){return[]},o.binding=function(t){throw new Error("process.binding is not supported")},o.cwd=function(){return"/"},o.chdir=function(t){throw new Error("process.chdir is not supported")},o.umask=function(){return 0}},function(t,e){var n={}.toString;t.exports=Array.isArray||function(t){return"[object Array]"==n.call(t)}},function(t,e,n){(function(e){t.exports=function(t){return n&&e.isBuffer(t)||r&&(t instanceof ArrayBuffer||function(t){return"function"==typeof ArrayBuffer.isView?ArrayBuffer.isView(t):t.buffer instanceof ArrayBuffer}(t))};var n="function"==typeof e&&"function"==typeof e.isBuffer,r="function"==typeof ArrayBuffer}).call(this,n(6).Buffer)},function(t,e,n){var r=n(37),o=n(21),i=n(1),s=n(5),a=n(22),c=n(23),u=n(0)("socket.io-client:manager"),h=n(20),f=n(52),p=Object.prototype.hasOwnProperty;function l(t,e){if(!(this instanceof l))return new l(t,e);t&&"object"==typeof t&&(e=t,t=void 0),(e=e||{}).path=e.path||"/socket.io",this.nsps={},this.subs=[],this.opts=e,this.reconnection(!1!==e.reconnection),this.reconnectionAttempts(e.reconnectionAttempts||1/0),this.reconnectionDelay(e.reconnectionDelay||1e3),this.reconnectionDelayMax(e.reconnectionDelayMax||5e3),this.randomizationFactor(e.randomizationFactor||.5),this.backoff=new f({min:this.reconnectionDelay(),max:this.reconnectionDelayMax(),jitter:this.randomizationFactor()}),this.timeout(null==e.timeout?2e4:e.timeout),this.readyState="closed",this.uri=t,this.connecting=[],this.lastPing=null,this.encoding=!1,this.packetBuffer=[];var n=e.parser||s;this.encoder=new n.Encoder,this.decoder=new n.Decoder,this.autoConnect=!1!==e.autoConnect,this.autoConnect&&this.open()}t.exports=l,l.prototype.emitAll=function(){for(var t in this.emit.apply(this,arguments),this.nsps)p.call(this.nsps,t)&&this.nsps[t].emit.apply(this.nsps[t],arguments)},l.prototype.updateSocketIds=function(){for(var t in this.nsps)p.call(this.nsps,t)&&(this.nsps[t].id=this.generateId(t))},l.prototype.generateId=function(t){return("/"===t?"":t+"#")+this.engine.id},i(l.prototype),l.prototype.reconnection=function(t){return arguments.length?(this._reconnection=!!t,this):this._reconnection},l.prototype.reconnectionAttempts=function(t){return arguments.length?(this._reconnectionAttempts=t,this):this._reconnectionAttempts},l.prototype.reconnectionDelay=function(t){return arguments.length?(this._reconnectionDelay=t,this.backoff&&this.backoff.setMin(t),this):this._reconnectionDelay},l.prototype.randomizationFactor=function(t){return arguments.length?(this._randomizationFactor=t,this.backoff&&this.backoff.setJitter(t),this):this._randomizationFactor},l.prototype.reconnectionDelayMax=function(t){return arguments.length?(this._reconnectionDelayMax=t,this.backoff&&this.backoff.setMax(t),this):this._reconnectionDelayMax},l.prototype.timeout=function(t){return arguments.length?(this._timeout=t,this):this._timeout},l.prototype.maybeReconnectOnOpen=function(){!this.reconnecting&&this._reconnection&&0===this.backoff.attempts&&this.reconnect()},l.prototype.open=l.prototype.connect=function(t,e){if(u("readyState %s",this.readyState),~this.readyState.indexOf("open"))return this;u("opening %s",this.uri),this.engine=r(this.uri,this.opts);var n=this.engine,o=this;this.readyState="opening",this.skipReconnect=!1;var i=a(n,"open",(function(){o.onopen(),t&&t()})),s=a(n,"error",(function(e){if(u("connect_error"),o.cleanup(),o.readyState="closed",o.emitAll("connect_error",e),t){var n=new Error("Connection error");n.data=e,t(n)}else o.maybeReconnectOnOpen()}));if(!1!==this._timeout){var c=this._timeout;u("connect attempt will timeout after %d",c);var h=setTimeout((function(){u("connect attempt timed out after %d",c),i.destroy(),n.close(),n.emit("error","timeout"),o.emitAll("connect_timeout",c)}),c);this.subs.push({destroy:function(){clearTimeout(h)}})}return this.subs.push(i),this.subs.push(s),this},l.prototype.onopen=function(){u("open"),this.cleanup(),this.readyState="open",this.emit("open");var t=this.engine;this.subs.push(a(t,"data",c(this,"ondata"))),this.subs.push(a(t,"ping",c(this,"onping"))),this.subs.push(a(t,"pong",c(this,"onpong"))),this.subs.push(a(t,"error",c(this,"onerror"))),this.subs.push(a(t,"close",c(this,"onclose"))),this.subs.push(a(this.decoder,"decoded",c(this,"ondecoded")))},l.prototype.onping=function(){this.lastPing=new Date,this.emitAll("ping")},l.prototype.onpong=function(){this.emitAll("pong",new Date-this.lastPing)},l.prototype.ondata=function(t){this.decoder.add(t)},l.prototype.ondecoded=function(t){this.emit("packet",t)},l.prototype.onerror=function(t){u("error",t),this.emitAll("error",t)},l.prototype.socket=function(t,e){var n=this.nsps[t];if(!n){n=new o(this,t,e),this.nsps[t]=n;var r=this;n.on("connecting",i),n.on("connect",(function(){n.id=r.generateId(t)})),this.autoConnect&&i()}function i(){~h(r.connecting,n)||r.connecting.push(n)}return n},l.prototype.destroy=function(t){var e=h(this.connecting,t);~e&&this.connecting.splice(e,1),this.connecting.length||this.close()},l.prototype.packet=function(t){u("writing packet %j",t);var e=this;t.query&&0===t.type&&(t.nsp+="?"+t.query),e.encoding?e.packetBuffer.push(t):(e.encoding=!0,this.encoder.encode(t,(function(n){for(var r=0;r<n.length;r++)e.engine.write(n[r],t.options);e.encoding=!1,e.processPacketQueue()})))},l.prototype.processPacketQueue=function(){if(this.packetBuffer.length>0&&!this.encoding){var t=this.packetBuffer.shift();this.packet(t)}},l.prototype.cleanup=function(){u("cleanup");for(var t=this.subs.length,e=0;e<t;e++){this.subs.shift().destroy()}this.packetBuffer=[],this.encoding=!1,this.lastPing=null,this.decoder.destroy()},l.prototype.close=l.prototype.disconnect=function(){u("disconnect"),this.skipReconnect=!0,this.reconnecting=!1,"opening"===this.readyState&&this.cleanup(),this.backoff.reset(),this.readyState="closed",this.engine&&this.engine.close()},l.prototype.onclose=function(t){u("onclose"),this.cleanup(),this.backoff.reset(),this.readyState="closed",this.emit("close",t),this._reconnection&&!this.skipReconnect&&this.reconnect()},l.prototype.reconnect=function(){if(this.reconnecting||this.skipReconnect)return this;var t=this;if(this.backoff.attempts>=this._reconnectionAttempts)u("reconnect failed"),this.backoff.reset(),this.emitAll("reconnect_failed"),this.reconnecting=!1;else{var e=this.backoff.duration();u("will wait %dms before reconnect attempt",e),this.reconnecting=!0;var n=setTimeout((function(){t.skipReconnect||(u("attempting reconnect"),t.emitAll("reconnect_attempt",t.backoff.attempts),t.emitAll("reconnecting",t.backoff.attempts),t.skipReconnect||t.open((function(e){e?(u("reconnect attempt error"),t.reconnecting=!1,t.reconnect(),t.emitAll("reconnect_error",e.data)):(u("reconnect success"),t.onreconnect())})))}),e);this.subs.push({destroy:function(){clearTimeout(n)}})}},l.prototype.onreconnect=function(){var t=this.backoff.attempts;this.reconnecting=!1,this.backoff.reset(),this.updateSocketIds(),this.emitAll("reconnect",t)}},function(t,e,n){var r=n(8),o=n(40),i=n(48),s=n(49);e.polling=function(t){var e=!1,n=!1,s=!1!==t.jsonp;if("undefined"!=typeof location){var a="https:"===location.protocol,c=location.port;c||(c=a?443:80),e=t.hostname!==location.hostname||c!==t.port,n=t.secure!==a}if(t.xdomain=e,t.xscheme=n,"open"in new r(t)&&!t.forceJSONP)return new o(t);if(!s)throw new Error("JSONP disabled");return new i(t)},e.websocket=s},function(t,e,n){var r=n(9),o=n(3),i=n(2),s=n(4),a=n(19),c=n(0)("engine.io-client:polling");t.exports=h;var u=null!=new(n(8))({xdomain:!1}).responseType;function h(t){var e=t&&t.forceBase64;u&&!e||(this.supportsBinary=!1),r.call(this,t)}s(h,r),h.prototype.name="polling",h.prototype.doOpen=function(){this.poll()},h.prototype.pause=function(t){var e=this;function n(){c("paused"),e.readyState="paused",t()}if(this.readyState="pausing",this.polling||!this.writable){var r=0;this.polling&&(c("we are currently polling - waiting to pause"),r++,this.once("pollComplete",(function(){c("pre-pause polling complete"),--r||n()}))),this.writable||(c("we are currently writing - waiting to pause"),r++,this.once("drain",(function(){c("pre-pause writing complete"),--r||n()})))}else n()},h.prototype.poll=function(){c("polling"),this.polling=!0,this.doPoll(),this.emit("poll")},h.prototype.onData=function(t){var e=this;c("polling got data %s",t);i.decodePayload(t,this.socket.binaryType,(function(t,n,r){if("opening"===e.readyState&&e.onOpen(),"close"===t.type)return e.onClose(),!1;e.onPacket(t)})),"closed"!==this.readyState&&(this.polling=!1,this.emit("pollComplete"),"open"===this.readyState?this.poll():c('ignoring poll - transport state "%s"',this.readyState))},h.prototype.doClose=function(){var t=this;function e(){c("writing close packet"),t.write([{type:"close"}])}"open"===this.readyState?(c("transport open - closing"),e()):(c("transport not open - deferring close"),this.once("open",e))},h.prototype.write=function(t){var e=this;this.writable=!1;var n=function(){e.writable=!0,e.emit("drain")};i.encodePayload(t,this.supportsBinary,(function(t){e.doWrite(t,n)}))},h.prototype.uri=function(){var t=this.query||{},e=this.secure?"https":"http",n="";return!1!==this.timestampRequests&&(t[this.timestampParam]=a()),this.supportsBinary||t.sid||(t.b64=1),t=o.encode(t),this.port&&("https"===e&&443!==Number(this.port)||"http"===e&&80!==Number(this.port))&&(n=":"+this.port),t.length&&(t="?"+t),e+"://"+(-1!==this.hostname.indexOf(":")?"["+this.hostname+"]":this.hostname)+n+this.path+t}},function(t,e,n){(function(e){var r=n(42),o=Object.prototype.toString,i="function"==typeof Blob||"undefined"!=typeof Blob&&"[object BlobConstructor]"===o.call(Blob),s="function"==typeof File||"undefined"!=typeof File&&"[object FileConstructor]"===o.call(File);t.exports=function t(n){if(!n||"object"!=typeof n)return!1;if(r(n)){for(var o=0,a=n.length;o<a;o++)if(t(n[o]))return!0;return!1}if("function"==typeof e&&e.isBuffer&&e.isBuffer(n)||"function"==typeof ArrayBuffer&&n instanceof ArrayBuffer||i&&n instanceof Blob||s&&n instanceof File)return!0;if(n.toJSON&&"function"==typeof n.toJSON&&1===arguments.length)return t(n.toJSON(),!0);for(var c in n)if(Object.prototype.hasOwnProperty.call(n,c)&&t(n[c]))return!0;return!1}}).call(this,n(6).Buffer)},function(t,e,n){"use strict";var r,o="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_".split(""),i={},s=0,a=0;function c(t){var e="";do{e=o[t%64]+e,t=Math.floor(t/64)}while(t>0);return e}function u(){var t=c(+new Date);return t!==r?(s=0,r=t):t+"."+c(s++)}for(;a<64;a++)i[o[a]]=a;u.encode=c,u.decode=function(t){var e=0;for(a=0;a<t.length;a++)e=64*e+i[t.charAt(a)];return e},t.exports=u},function(t,e){var n=[].indexOf;t.exports=function(t,e){if(n)return t.indexOf(e);for(var r=0;r<t.length;++r)if(t[r]===e)return r;return-1}},function(t,e,n){var r=n(5),o=n(1),i=n(51),s=n(22),a=n(23),c=n(0)("socket.io-client:socket"),u=n(3),h=n(18);t.exports=l;var f={connect:1,connect_error:1,connect_timeout:1,connecting:1,disconnect:1,error:1,reconnect:1,reconnect_attempt:1,reconnect_failed:1,reconnect_error:1,reconnecting:1,ping:1,pong:1},p=o.prototype.emit;function l(t,e,n){this.io=t,this.nsp=e,this.json=this,this.ids=0,this.acks={},this.receiveBuffer=[],this.sendBuffer=[],this.connected=!1,this.disconnected=!0,this.flags={},n&&n.query&&(this.query=n.query),this.io.autoConnect&&this.open()}o(l.prototype),l.prototype.subEvents=function(){if(!this.subs){var t=this.io;this.subs=[s(t,"open",a(this,"onopen")),s(t,"packet",a(this,"onpacket")),s(t,"close",a(this,"onclose"))]}},l.prototype.open=l.prototype.connect=function(){return this.connected||(this.subEvents(),this.io.open(),"open"===this.io.readyState&&this.onopen(),this.emit("connecting")),this},l.prototype.send=function(){var t=i(arguments);return t.unshift("message"),this.emit.apply(this,t),this},l.prototype.emit=function(t){if(f.hasOwnProperty(t))return p.apply(this,arguments),this;var e=i(arguments),n={type:(void 0!==this.flags.binary?this.flags.binary:h(e))?r.BINARY_EVENT:r.EVENT,data:e,options:{}};return n.options.compress=!this.flags||!1!==this.flags.compress,"function"==typeof e[e.length-1]&&(c("emitting packet with ack id %d",this.ids),this.acks[this.ids]=e.pop(),n.id=this.ids++),this.connected?this.packet(n):this.sendBuffer.push(n),this.flags={},this},l.prototype.packet=function(t){t.nsp=this.nsp,this.io.packet(t)},l.prototype.onopen=function(){if(c("transport is open - connecting"),"/"!==this.nsp)if(this.query){var t="object"==typeof this.query?u.encode(this.query):this.query;c("sending connect packet with query %s",t),this.packet({type:r.CONNECT,query:t})}else this.packet({type:r.CONNECT})},l.prototype.onclose=function(t){c("close (%s)",t),this.connected=!1,this.disconnected=!0,delete this.id,this.emit("disconnect",t)},l.prototype.onpacket=function(t){var e=t.nsp===this.nsp,n=t.type===r.ERROR&&"/"===t.nsp;if(e||n)switch(t.type){case r.CONNECT:this.onconnect();break;case r.EVENT:case r.BINARY_EVENT:this.onevent(t);break;case r.ACK:case r.BINARY_ACK:this.onack(t);break;case r.DISCONNECT:this.ondisconnect();break;case r.ERROR:this.emit("error",t.data)}},l.prototype.onevent=function(t){var e=t.data||[];c("emitting event %j",e),null!=t.id&&(c("attaching ack callback to event"),e.push(this.ack(t.id))),this.connected?p.apply(this,e):this.receiveBuffer.push(e)},l.prototype.ack=function(t){var e=this,n=!1;return function(){if(!n){n=!0;var o=i(arguments);c("sending ack %j",o),e.packet({type:h(o)?r.BINARY_ACK:r.ACK,id:t,data:o})}}},l.prototype.onack=function(t){var e=this.acks[t.id];"function"==typeof e?(c("calling ack %s with %j",t.id,t.data),e.apply(this,t.data),delete this.acks[t.id]):c("bad ack %s",t.id)},l.prototype.onconnect=function(){this.connected=!0,this.disconnected=!1,this.emit("connect"),this.emitBuffered()},l.prototype.emitBuffered=function(){var t;for(t=0;t<this.receiveBuffer.length;t++)p.apply(this,this.receiveBuffer[t]);for(this.receiveBuffer=[],t=0;t<this.sendBuffer.length;t++)this.packet(this.sendBuffer[t]);this.sendBuffer=[]},l.prototype.ondisconnect=function(){c("server disconnect (%s)",this.nsp),this.destroy(),this.onclose("io server disconnect")},l.prototype.destroy=function(){if(this.subs){for(var t=0;t<this.subs.length;t++)this.subs[t].destroy();this.subs=null}this.io.destroy(this)},l.prototype.close=l.prototype.disconnect=function(){return this.connected&&(c("performing disconnect (%s)",this.nsp),this.packet({type:r.DISCONNECT})),this.destroy(),this.connected&&this.onclose("io client disconnect"),this},l.prototype.compress=function(t){return this.flags.compress=t,this},l.prototype.binary=function(t){return this.flags.binary=t,this}},function(t,e){t.exports=function(t,e,n){return t.on(e,n),{destroy:function(){t.removeListener(e,n)}}}},function(t,e){var n=[].slice;t.exports=function(t,e){if("string"==typeof e&&(e=t[e]),"function"!=typeof e)throw new Error("bind() requires a function");var r=n.call(arguments,2);return function(){return e.apply(t,r.concat(n.call(arguments)))}}},function(t,e){t.exports=Object.freeze({PLAYER_RADIUS:20,PLAYER_MAX_HP:100,PLAYER_SPEED:400,PLAYER_FIRE_COOLDOWN:.25,BULLET_RADIUS:3,BULLET_SPEED:800,BULLET_DAMAGE:10,SCORE_BULLET_HIT:20,SCORE_PER_SECOND:1,MAP_SIZE:3e3,MSG_TYPES:{JOIN_GAME:"join_game",GAME_UPDATE:"update",INPUT:"input",GAME_OVER:"dead"}})},function(t,e,n){var r=n(27),o=n(5),i=n(15),s=n(0)("socket.io-client");t.exports=e=c;var a=e.managers={};function c(t,e){"object"==typeof t&&(e=t,t=void 0),e=e||{};var n,o=r(t),c=o.source,u=o.id,h=o.path,f=a[u]&&h in a[u].nsps;return e.forceNew||e["force new connection"]||!1===e.multiplex||f?(s("ignoring socket cache for %s",c),n=i(c,e)):(a[u]||(s("new io instance for %s",c),a[u]=i(c,e)),n=a[u]),o.query&&!e.query&&(e.query=o.query),n.socket(o.path,e)}e.protocol=o.protocol,e.connect=c,e.Manager=n(15),e.Socket=n(21)},function(t,e,n){var r=n(53),o=n(55),i=/[&<>"']/g,s=RegExp(i.source);t.exports=function(t){return(t=o(t))&&s.test(t)?t.replace(i,r):t}},function(t,e,n){var r=n(11),o=n(0)("socket.io-client:url");t.exports=function(t,e){var n=t;e=e||"undefined"!=typeof location&&location,null==t&&(t=e.protocol+"//"+e.host);"string"==typeof t&&("/"===t.charAt(0)&&(t="/"===t.charAt(1)?e.protocol+t:e.host+t),/^(https?|wss?):\/\//.test(t)||(o("protocol-less url %s",t),t=void 0!==e?e.protocol+"//"+t:"https://"+t),o("parse %s",t),n=r(t));n.port||(/^(http|ws)$/.test(n.protocol)?n.port="80":/^(http|ws)s$/.test(n.protocol)&&(n.port="443"));n.path=n.path||"/";var i=-1!==n.host.indexOf(":")?"["+n.host+"]":n.host;return n.id=n.protocol+"://"+i+":"+n.port,n.href=n.protocol+"://"+i+(e&&e.port===n.port?"":":"+n.port),n}},function(t,e,n){t.exports=function(t){function e(t){let e=0;for(let n=0;n<t.length;n++)e=(e<<5)-e+t.charCodeAt(n),e|=0;return r.colors[Math.abs(e)%r.colors.length]}function r(t){let n;function s(...t){if(!s.enabled)return;const e=s,o=Number(new Date),i=o-(n||o);e.diff=i,e.prev=n,e.curr=o,n=o,t[0]=r.coerce(t[0]),"string"!=typeof t[0]&&t.unshift("%O");let a=0;t[0]=t[0].replace(/%([a-zA-Z%])/g,(n,o)=>{if("%%"===n)return n;a++;const i=r.formatters[o];if("function"==typeof i){const r=t[a];n=i.call(e,r),t.splice(a,1),a--}return n}),r.formatArgs.call(e,t);(e.log||r.log).apply(e,t)}return s.namespace=t,s.enabled=r.enabled(t),s.useColors=r.useColors(),s.color=e(t),s.destroy=o,s.extend=i,"function"==typeof r.init&&r.init(s),r.instances.push(s),s}function o(){const t=r.instances.indexOf(this);return-1!==t&&(r.instances.splice(t,1),!0)}function i(t,e){const n=r(this.namespace+(void 0===e?":":e)+t);return n.log=this.log,n}function s(t){return t.toString().substring(2,t.toString().length-2).replace(/\.\*\?$/,"*")}return r.debug=r,r.default=r,r.coerce=function(t){if(t instanceof Error)return t.stack||t.message;return t},r.disable=function(){const t=[...r.names.map(s),...r.skips.map(s).map(t=>"-"+t)].join(",");return r.enable(""),t},r.enable=function(t){let e;r.save(t),r.names=[],r.skips=[];const n=("string"==typeof t?t:"").split(/[\s,]+/),o=n.length;for(e=0;e<o;e++)n[e]&&("-"===(t=n[e].replace(/\*/g,".*?"))[0]?r.skips.push(new RegExp("^"+t.substr(1)+"$")):r.names.push(new RegExp("^"+t+"$")));for(e=0;e<r.instances.length;e++){const t=r.instances[e];t.enabled=r.enabled(t.namespace)}},r.enabled=function(t){if("*"===t[t.length-1])return!0;let e,n;for(e=0,n=r.skips.length;e<n;e++)if(r.skips[e].test(t))return!1;for(e=0,n=r.names.length;e<n;e++)if(r.names[e].test(t))return!0;return!1},r.humanize=n(29),Object.keys(t).forEach(e=>{r[e]=t[e]}),r.instances=[],r.names=[],r.skips=[],r.formatters={},r.selectColor=e,r.enable(r.load()),r}},function(t,e){var n=1e3,r=6e4,o=60*r,i=24*o;function s(t,e,n,r){var o=e>=1.5*n;return Math.round(t/n)+" "+r+(o?"s":"")}t.exports=function(t,e){e=e||{};var a=typeof t;if("string"===a&&t.length>0)return function(t){if((t=String(t)).length>100)return;var e=/^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(t);if(!e)return;var s=parseFloat(e[1]);switch((e[2]||"ms").toLowerCase()){case"years":case"year":case"yrs":case"yr":case"y":return 315576e5*s;case"weeks":case"week":case"w":return 6048e5*s;case"days":case"day":case"d":return s*i;case"hours":case"hour":case"hrs":case"hr":case"h":return s*o;case"minutes":case"minute":case"mins":case"min":case"m":return s*r;case"seconds":case"second":case"secs":case"sec":case"s":return s*n;case"milliseconds":case"millisecond":case"msecs":case"msec":case"ms":return s;default:return}}(t);if("number"===a&&isFinite(t))return e.long?function(t){var e=Math.abs(t);if(e>=i)return s(t,e,i,"day");if(e>=o)return s(t,e,o,"hour");if(e>=r)return s(t,e,r,"minute");if(e>=n)return s(t,e,n,"second");return t+" ms"}(t):function(t){var e=Math.abs(t);if(e>=i)return Math.round(t/i)+"d";if(e>=o)return Math.round(t/o)+"h";if(e>=r)return Math.round(t/r)+"m";if(e>=n)return Math.round(t/n)+"s";return t+"ms"}(t);throw new Error("val is not a non-empty string or a valid number. val="+JSON.stringify(t))}},function(t,e,n){(function(r){function o(){var t;try{t=e.storage.debug}catch(t){}return!t&&void 0!==r&&"env"in r&&(t=r.env.DEBUG),t}(e=t.exports=n(31)).log=function(){return"object"==typeof console&&console.log&&Function.prototype.apply.call(console.log,console,arguments)},e.formatArgs=function(t){var n=this.useColors;if(t[0]=(n?"%c":"")+this.namespace+(n?" %c":" ")+t[0]+(n?"%c ":" ")+"+"+e.humanize(this.diff),!n)return;var r="color: "+this.color;t.splice(1,0,r,"color: inherit");var o=0,i=0;t[0].replace(/%[a-zA-Z%]/g,(function(t){"%%"!==t&&(o++,"%c"===t&&(i=o))})),t.splice(i,0,r)},e.save=function(t){try{null==t?e.storage.removeItem("debug"):e.storage.debug=t}catch(t){}},e.load=o,e.useColors=function(){if("undefined"!=typeof window&&window.process&&"renderer"===window.process.type)return!0;if("undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/))return!1;return"undefined"!=typeof document&&document.documentElement&&document.documentElement.style&&document.documentElement.style.WebkitAppearance||"undefined"!=typeof window&&window.console&&(window.console.firebug||window.console.exception&&window.console.table)||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)&&parseInt(RegExp.$1,10)>=31||"undefined"!=typeof navigator&&navigator.userAgent&&navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)},e.storage="undefined"!=typeof chrome&&void 0!==chrome.storage?chrome.storage.local:function(){try{return window.localStorage}catch(t){}}(),e.colors=["#0000CC","#0000FF","#0033CC","#0033FF","#0066CC","#0066FF","#0099CC","#0099FF","#00CC00","#00CC33","#00CC66","#00CC99","#00CCCC","#00CCFF","#3300CC","#3300FF","#3333CC","#3333FF","#3366CC","#3366FF","#3399CC","#3399FF","#33CC00","#33CC33","#33CC66","#33CC99","#33CCCC","#33CCFF","#6600CC","#6600FF","#6633CC","#6633FF","#66CC00","#66CC33","#9900CC","#9900FF","#9933CC","#9933FF","#99CC00","#99CC33","#CC0000","#CC0033","#CC0066","#CC0099","#CC00CC","#CC00FF","#CC3300","#CC3333","#CC3366","#CC3399","#CC33CC","#CC33FF","#CC6600","#CC6633","#CC9900","#CC9933","#CCCC00","#CCCC33","#FF0000","#FF0033","#FF0066","#FF0099","#FF00CC","#FF00FF","#FF3300","#FF3333","#FF3366","#FF3399","#FF33CC","#FF33FF","#FF6600","#FF6633","#FF9900","#FF9933","#FFCC00","#FFCC33"],e.formatters.j=function(t){try{return JSON.stringify(t)}catch(t){return"[UnexpectedJSONParseError]: "+t.message}},e.enable(o())}).call(this,n(12))},function(t,e,n){function r(t){var n;function r(){if(r.enabled){var t=r,o=+new Date,i=o-(n||o);t.diff=i,t.prev=n,t.curr=o,n=o;for(var s=new Array(arguments.length),a=0;a<s.length;a++)s[a]=arguments[a];s[0]=e.coerce(s[0]),"string"!=typeof s[0]&&s.unshift("%O");var c=0;s[0]=s[0].replace(/%([a-zA-Z%])/g,(function(n,r){if("%%"===n)return n;c++;var o=e.formatters[r];if("function"==typeof o){var i=s[c];n=o.call(t,i),s.splice(c,1),c--}return n})),e.formatArgs.call(t,s);var u=r.log||e.log||console.log.bind(console);u.apply(t,s)}}return r.namespace=t,r.enabled=e.enabled(t),r.useColors=e.useColors(),r.color=function(t){var n,r=0;for(n in t)r=(r<<5)-r+t.charCodeAt(n),r|=0;return e.colors[Math.abs(r)%e.colors.length]}(t),r.destroy=o,"function"==typeof e.init&&e.init(r),e.instances.push(r),r}function o(){var t=e.instances.indexOf(this);return-1!==t&&(e.instances.splice(t,1),!0)}(e=t.exports=r.debug=r.default=r).coerce=function(t){return t instanceof Error?t.stack||t.message:t},e.disable=function(){e.enable("")},e.enable=function(t){var n;e.save(t),e.names=[],e.skips=[];var r=("string"==typeof t?t:"").split(/[\s,]+/),o=r.length;for(n=0;n<o;n++)r[n]&&("-"===(t=r[n].replace(/\*/g,".*?"))[0]?e.skips.push(new RegExp("^"+t.substr(1)+"$")):e.names.push(new RegExp("^"+t+"$")));for(n=0;n<e.instances.length;n++){var i=e.instances[n];i.enabled=e.enabled(i.namespace)}},e.enabled=function(t){if("*"===t[t.length-1])return!0;var n,r;for(n=0,r=e.skips.length;n<r;n++)if(e.skips[n].test(t))return!1;for(n=0,r=e.names.length;n<r;n++)if(e.names[n].test(t))return!0;return!1},e.humanize=n(32),e.instances=[],e.names=[],e.skips=[],e.formatters={}},function(t,e){var n=1e3,r=6e4,o=60*r,i=24*o;function s(t,e,n){if(!(t<e))return t<1.5*e?Math.floor(t/e)+" "+n:Math.ceil(t/e)+" "+n+"s"}t.exports=function(t,e){e=e||{};var a,c=typeof t;if("string"===c&&t.length>0)return function(t){if((t=String(t)).length>100)return;var e=/^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(t);if(!e)return;var s=parseFloat(e[1]);switch((e[2]||"ms").toLowerCase()){case"years":case"year":case"yrs":case"yr":case"y":return 315576e5*s;case"days":case"day":case"d":return s*i;case"hours":case"hour":case"hrs":case"hr":case"h":return s*o;case"minutes":case"minute":case"mins":case"min":case"m":return s*r;case"seconds":case"second":case"secs":case"sec":case"s":return s*n;case"milliseconds":case"millisecond":case"msecs":case"msec":case"ms":return s;default:return}}(t);if("number"===c&&!1===isNaN(t))return e.long?s(a=t,i,"day")||s(a,o,"hour")||s(a,r,"minute")||s(a,n,"second")||a+" ms":function(t){if(t>=i)return Math.round(t/i)+"d";if(t>=o)return Math.round(t/o)+"h";if(t>=r)return Math.round(t/r)+"m";if(t>=n)return Math.round(t/n)+"s";return t+"ms"}(t);throw new Error("val is not a non-empty string or a valid number. val="+JSON.stringify(t))}},function(t,e,n){var r=n(13),o=n(14),i=Object.prototype.toString,s="function"==typeof Blob||"undefined"!=typeof Blob&&"[object BlobConstructor]"===i.call(Blob),a="function"==typeof File||"undefined"!=typeof File&&"[object FileConstructor]"===i.call(File);e.deconstructPacket=function(t){var e=[],n=t.data,i=t;return i.data=function t(e,n){if(!e)return e;if(o(e)){var i={_placeholder:!0,num:n.length};return n.push(e),i}if(r(e)){for(var s=new Array(e.length),a=0;a<e.length;a++)s[a]=t(e[a],n);return s}if("object"==typeof e&&!(e instanceof Date)){s={};for(var c in e)s[c]=t(e[c],n);return s}return e}(n,e),i.attachments=e.length,{packet:i,buffers:e}},e.reconstructPacket=function(t,e){return t.data=function t(e,n){if(!e)return e;if(e&&e._placeholder)return n[e.num];if(r(e))for(var o=0;o<e.length;o++)e[o]=t(e[o],n);else if("object"==typeof e)for(var i in e)e[i]=t(e[i],n);return e}(t.data,e),t.attachments=void 0,t},e.removeBlobs=function(t,e){var n=0,i=t;!function t(c,u,h){if(!c)return c;if(s&&c instanceof Blob||a&&c instanceof File){n++;var f=new FileReader;f.onload=function(){h?h[u]=this.result:i=this.result,--n||e(i)},f.readAsArrayBuffer(c)}else if(r(c))for(var p=0;p<c.length;p++)t(c[p],p,c);else if("object"==typeof c&&!o(c))for(var l in c)t(c[l],l,c)}(i),n||e(i)}},function(t,e,n){"use strict";e.byteLength=function(t){var e=u(t),n=e[0],r=e[1];return 3*(n+r)/4-r},e.toByteArray=function(t){var e,n,r=u(t),s=r[0],a=r[1],c=new i(function(t,e,n){return 3*(e+n)/4-n}(0,s,a)),h=0,f=a>0?s-4:s;for(n=0;n<f;n+=4)e=o[t.charCodeAt(n)]<<18|o[t.charCodeAt(n+1)]<<12|o[t.charCodeAt(n+2)]<<6|o[t.charCodeAt(n+3)],c[h++]=e>>16&255,c[h++]=e>>8&255,c[h++]=255&e;2===a&&(e=o[t.charCodeAt(n)]<<2|o[t.charCodeAt(n+1)]>>4,c[h++]=255&e);1===a&&(e=o[t.charCodeAt(n)]<<10|o[t.charCodeAt(n+1)]<<4|o[t.charCodeAt(n+2)]>>2,c[h++]=e>>8&255,c[h++]=255&e);return c},e.fromByteArray=function(t){for(var e,n=t.length,o=n%3,i=[],s=0,a=n-o;s<a;s+=16383)i.push(h(t,s,s+16383>a?a:s+16383));1===o?(e=t[n-1],i.push(r[e>>2]+r[e<<4&63]+"==")):2===o&&(e=(t[n-2]<<8)+t[n-1],i.push(r[e>>10]+r[e>>4&63]+r[e<<2&63]+"="));return i.join("")};for(var r=[],o=[],i="undefined"!=typeof Uint8Array?Uint8Array:Array,s="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",a=0,c=s.length;a<c;++a)r[a]=s[a],o[s.charCodeAt(a)]=a;function u(t){var e=t.length;if(e%4>0)throw new Error("Invalid string. Length must be a multiple of 4");var n=t.indexOf("=");return-1===n&&(n=e),[n,n===e?0:4-n%4]}function h(t,e,n){for(var o,i,s=[],a=e;a<n;a+=3)o=(t[a]<<16&16711680)+(t[a+1]<<8&65280)+(255&t[a+2]),s.push(r[(i=o)>>18&63]+r[i>>12&63]+r[i>>6&63]+r[63&i]);return s.join("")}o["-".charCodeAt(0)]=62,o["_".charCodeAt(0)]=63},function(t,e){e.read=function(t,e,n,r,o){var i,s,a=8*o-r-1,c=(1<<a)-1,u=c>>1,h=-7,f=n?o-1:0,p=n?-1:1,l=t[e+f];for(f+=p,i=l&(1<<-h)-1,l>>=-h,h+=a;h>0;i=256*i+t[e+f],f+=p,h-=8);for(s=i&(1<<-h)-1,i>>=-h,h+=r;h>0;s=256*s+t[e+f],f+=p,h-=8);if(0===i)i=1-u;else{if(i===c)return s?NaN:1/0*(l?-1:1);s+=Math.pow(2,r),i-=u}return(l?-1:1)*s*Math.pow(2,i-r)},e.write=function(t,e,n,r,o,i){var s,a,c,u=8*i-o-1,h=(1<<u)-1,f=h>>1,p=23===o?Math.pow(2,-24)-Math.pow(2,-77):0,l=r?0:i-1,d=r?1:-1,y=e<0||0===e&&1/e<0?1:0;for(e=Math.abs(e),isNaN(e)||e===1/0?(a=isNaN(e)?1:0,s=h):(s=Math.floor(Math.log(e)/Math.LN2),e*(c=Math.pow(2,-s))<1&&(s--,c*=2),(e+=s+f>=1?p/c:p*Math.pow(2,1-f))*c>=2&&(s++,c/=2),s+f>=h?(a=0,s=h):s+f>=1?(a=(e*c-1)*Math.pow(2,o),s+=f):(a=e*Math.pow(2,f-1)*Math.pow(2,o),s=0));o>=8;t[n+l]=255&a,l+=d,a/=256,o-=8);for(s=s<<o|a,u+=o;u>0;t[n+l]=255&s,l+=d,s/=256,u-=8);t[n+l-d]|=128*y}},function(t,e){var n={}.toString;t.exports=Array.isArray||function(t){return"[object Array]"==n.call(t)}},function(t,e,n){t.exports=n(38),t.exports.parser=n(2)},function(t,e,n){var r=n(16),o=n(1),i=n(0)("engine.io-client:socket"),s=n(20),a=n(2),c=n(11),u=n(3);function h(t,e){if(!(this instanceof h))return new h(t,e);e=e||{},t&&"object"==typeof t&&(e=t,t=null),t?(t=c(t),e.hostname=t.host,e.secure="https"===t.protocol||"wss"===t.protocol,e.port=t.port,t.query&&(e.query=t.query)):e.host&&(e.hostname=c(e.host).host),this.secure=null!=e.secure?e.secure:"undefined"!=typeof location&&"https:"===location.protocol,e.hostname&&!e.port&&(e.port=this.secure?"443":"80"),this.agent=e.agent||!1,this.hostname=e.hostname||("undefined"!=typeof location?location.hostname:"localhost"),this.port=e.port||("undefined"!=typeof location&&location.port?location.port:this.secure?443:80),this.query=e.query||{},"string"==typeof this.query&&(this.query=u.decode(this.query)),this.upgrade=!1!==e.upgrade,this.path=(e.path||"/engine.io").replace(/\/$/,"")+"/",this.forceJSONP=!!e.forceJSONP,this.jsonp=!1!==e.jsonp,this.forceBase64=!!e.forceBase64,this.enablesXDR=!!e.enablesXDR,this.withCredentials=!1!==e.withCredentials,this.timestampParam=e.timestampParam||"t",this.timestampRequests=e.timestampRequests,this.transports=e.transports||["polling","websocket"],this.transportOptions=e.transportOptions||{},this.readyState="",this.writeBuffer=[],this.prevBufferLen=0,this.policyPort=e.policyPort||843,this.rememberUpgrade=e.rememberUpgrade||!1,this.binaryType=null,this.onlyBinaryUpgrades=e.onlyBinaryUpgrades,this.perMessageDeflate=!1!==e.perMessageDeflate&&(e.perMessageDeflate||{}),!0===this.perMessageDeflate&&(this.perMessageDeflate={}),this.perMessageDeflate&&null==this.perMessageDeflate.threshold&&(this.perMessageDeflate.threshold=1024),this.pfx=e.pfx||null,this.key=e.key||null,this.passphrase=e.passphrase||null,this.cert=e.cert||null,this.ca=e.ca||null,this.ciphers=e.ciphers||null,this.rejectUnauthorized=void 0===e.rejectUnauthorized||e.rejectUnauthorized,this.forceNode=!!e.forceNode,this.isReactNative="undefined"!=typeof navigator&&"string"==typeof navigator.product&&"reactnative"===navigator.product.toLowerCase(),("undefined"==typeof self||this.isReactNative)&&(e.extraHeaders&&Object.keys(e.extraHeaders).length>0&&(this.extraHeaders=e.extraHeaders),e.localAddress&&(this.localAddress=e.localAddress)),this.id=null,this.upgrades=null,this.pingInterval=null,this.pingTimeout=null,this.pingIntervalTimer=null,this.pingTimeoutTimer=null,this.open()}t.exports=h,h.priorWebsocketSuccess=!1,o(h.prototype),h.protocol=a.protocol,h.Socket=h,h.Transport=n(9),h.transports=n(16),h.parser=n(2),h.prototype.createTransport=function(t){i('creating transport "%s"',t);var e=function(t){var e={};for(var n in t)t.hasOwnProperty(n)&&(e[n]=t[n]);return e}(this.query);e.EIO=a.protocol,e.transport=t;var n=this.transportOptions[t]||{};return this.id&&(e.sid=this.id),new r[t]({query:e,socket:this,agent:n.agent||this.agent,hostname:n.hostname||this.hostname,port:n.port||this.port,secure:n.secure||this.secure,path:n.path||this.path,forceJSONP:n.forceJSONP||this.forceJSONP,jsonp:n.jsonp||this.jsonp,forceBase64:n.forceBase64||this.forceBase64,enablesXDR:n.enablesXDR||this.enablesXDR,withCredentials:n.withCredentials||this.withCredentials,timestampRequests:n.timestampRequests||this.timestampRequests,timestampParam:n.timestampParam||this.timestampParam,policyPort:n.policyPort||this.policyPort,pfx:n.pfx||this.pfx,key:n.key||this.key,passphrase:n.passphrase||this.passphrase,cert:n.cert||this.cert,ca:n.ca||this.ca,ciphers:n.ciphers||this.ciphers,rejectUnauthorized:n.rejectUnauthorized||this.rejectUnauthorized,perMessageDeflate:n.perMessageDeflate||this.perMessageDeflate,extraHeaders:n.extraHeaders||this.extraHeaders,forceNode:n.forceNode||this.forceNode,localAddress:n.localAddress||this.localAddress,requestTimeout:n.requestTimeout||this.requestTimeout,protocols:n.protocols||void 0,isReactNative:this.isReactNative})},h.prototype.open=function(){var t;if(this.rememberUpgrade&&h.priorWebsocketSuccess&&-1!==this.transports.indexOf("websocket"))t="websocket";else{if(0===this.transports.length){var e=this;return void setTimeout((function(){e.emit("error","No transports available")}),0)}t=this.transports[0]}this.readyState="opening";try{t=this.createTransport(t)}catch(t){return this.transports.shift(),void this.open()}t.open(),this.setTransport(t)},h.prototype.setTransport=function(t){i("setting transport %s",t.name);var e=this;this.transport&&(i("clearing existing transport %s",this.transport.name),this.transport.removeAllListeners()),this.transport=t,t.on("drain",(function(){e.onDrain()})).on("packet",(function(t){e.onPacket(t)})).on("error",(function(t){e.onError(t)})).on("close",(function(){e.onClose("transport close")}))},h.prototype.probe=function(t){i('probing transport "%s"',t);var e=this.createTransport(t,{probe:1}),n=!1,r=this;function o(){if(r.onlyBinaryUpgrades){var o=!this.supportsBinary&&r.transport.supportsBinary;n=n||o}n||(i('probe transport "%s" opened',t),e.send([{type:"ping",data:"probe"}]),e.once("packet",(function(o){if(!n)if("pong"===o.type&&"probe"===o.data){if(i('probe transport "%s" pong',t),r.upgrading=!0,r.emit("upgrading",e),!e)return;h.priorWebsocketSuccess="websocket"===e.name,i('pausing current transport "%s"',r.transport.name),r.transport.pause((function(){n||"closed"!==r.readyState&&(i("changing transport and sending upgrade packet"),p(),r.setTransport(e),e.send([{type:"upgrade"}]),r.emit("upgrade",e),e=null,r.upgrading=!1,r.flush())}))}else{i('probe transport "%s" failed',t);var s=new Error("probe error");s.transport=e.name,r.emit("upgradeError",s)}})))}function s(){n||(n=!0,p(),e.close(),e=null)}function a(n){var o=new Error("probe error: "+n);o.transport=e.name,s(),i('probe transport "%s" failed because of error: %s',t,n),r.emit("upgradeError",o)}function c(){a("transport closed")}function u(){a("socket closed")}function f(t){e&&t.name!==e.name&&(i('"%s" works - aborting "%s"',t.name,e.name),s())}function p(){e.removeListener("open",o),e.removeListener("error",a),e.removeListener("close",c),r.removeListener("close",u),r.removeListener("upgrading",f)}h.priorWebsocketSuccess=!1,e.once("open",o),e.once("error",a),e.once("close",c),this.once("close",u),this.once("upgrading",f),e.open()},h.prototype.onOpen=function(){if(i("socket open"),this.readyState="open",h.priorWebsocketSuccess="websocket"===this.transport.name,this.emit("open"),this.flush(),"open"===this.readyState&&this.upgrade&&this.transport.pause){i("starting upgrade probes");for(var t=0,e=this.upgrades.length;t<e;t++)this.probe(this.upgrades[t])}},h.prototype.onPacket=function(t){if("opening"===this.readyState||"open"===this.readyState||"closing"===this.readyState)switch(i('socket receive: type "%s", data "%s"',t.type,t.data),this.emit("packet",t),this.emit("heartbeat"),t.type){case"open":this.onHandshake(JSON.parse(t.data));break;case"pong":this.setPing(),this.emit("pong");break;case"error":var e=new Error("server error");e.code=t.data,this.onError(e);break;case"message":this.emit("data",t.data),this.emit("message",t.data)}else i('packet received with socket readyState "%s"',this.readyState)},h.prototype.onHandshake=function(t){this.emit("handshake",t),this.id=t.sid,this.transport.query.sid=t.sid,this.upgrades=this.filterUpgrades(t.upgrades),this.pingInterval=t.pingInterval,this.pingTimeout=t.pingTimeout,this.onOpen(),"closed"!==this.readyState&&(this.setPing(),this.removeListener("heartbeat",this.onHeartbeat),this.on("heartbeat",this.onHeartbeat))},h.prototype.onHeartbeat=function(t){clearTimeout(this.pingTimeoutTimer);var e=this;e.pingTimeoutTimer=setTimeout((function(){"closed"!==e.readyState&&e.onClose("ping timeout")}),t||e.pingInterval+e.pingTimeout)},h.prototype.setPing=function(){var t=this;clearTimeout(t.pingIntervalTimer),t.pingIntervalTimer=setTimeout((function(){i("writing ping packet - expecting pong within %sms",t.pingTimeout),t.ping(),t.onHeartbeat(t.pingTimeout)}),t.pingInterval)},h.prototype.ping=function(){var t=this;this.sendPacket("ping",(function(){t.emit("ping")}))},h.prototype.onDrain=function(){this.writeBuffer.splice(0,this.prevBufferLen),this.prevBufferLen=0,0===this.writeBuffer.length?this.emit("drain"):this.flush()},h.prototype.flush=function(){"closed"!==this.readyState&&this.transport.writable&&!this.upgrading&&this.writeBuffer.length&&(i("flushing %d packets in socket",this.writeBuffer.length),this.transport.send(this.writeBuffer),this.prevBufferLen=this.writeBuffer.length,this.emit("flush"))},h.prototype.write=h.prototype.send=function(t,e,n){return this.sendPacket("message",t,e,n),this},h.prototype.sendPacket=function(t,e,n,r){if("function"==typeof e&&(r=e,e=void 0),"function"==typeof n&&(r=n,n=null),"closing"!==this.readyState&&"closed"!==this.readyState){(n=n||{}).compress=!1!==n.compress;var o={type:t,data:e,options:n};this.emit("packetCreate",o),this.writeBuffer.push(o),r&&this.once("flush",r),this.flush()}},h.prototype.close=function(){if("opening"===this.readyState||"open"===this.readyState){this.readyState="closing";var t=this;this.writeBuffer.length?this.once("drain",(function(){this.upgrading?r():e()})):this.upgrading?r():e()}function e(){t.onClose("forced close"),i("socket closing - telling transport to close"),t.transport.close()}function n(){t.removeListener("upgrade",n),t.removeListener("upgradeError",n),e()}function r(){t.once("upgrade",n),t.once("upgradeError",n)}return this},h.prototype.onError=function(t){i("socket error %j",t),h.priorWebsocketSuccess=!1,this.emit("error",t),this.onClose("transport error",t)},h.prototype.onClose=function(t,e){if("opening"===this.readyState||"open"===this.readyState||"closing"===this.readyState){i('socket close with reason: "%s"',t);clearTimeout(this.pingIntervalTimer),clearTimeout(this.pingTimeoutTimer),this.transport.removeAllListeners("close"),this.transport.close(),this.transport.removeAllListeners(),this.readyState="closed",this.id=null,this.emit("close",t,e),this.writeBuffer=[],this.prevBufferLen=0}},h.prototype.filterUpgrades=function(t){for(var e=[],n=0,r=t.length;n<r;n++)~s(this.transports,t[n])&&e.push(t[n]);return e}},function(t,e){try{t.exports="undefined"!=typeof XMLHttpRequest&&"withCredentials"in new XMLHttpRequest}catch(e){t.exports=!1}},function(t,e,n){var r=n(8),o=n(17),i=n(1),s=n(4),a=n(0)("engine.io-client:polling-xhr");function c(){}function u(t){if(o.call(this,t),this.requestTimeout=t.requestTimeout,this.extraHeaders=t.extraHeaders,"undefined"!=typeof location){var e="https:"===location.protocol,n=location.port;n||(n=e?443:80),this.xd="undefined"!=typeof location&&t.hostname!==location.hostname||n!==t.port,this.xs=t.secure!==e}}function h(t){this.method=t.method||"GET",this.uri=t.uri,this.xd=!!t.xd,this.xs=!!t.xs,this.async=!1!==t.async,this.data=void 0!==t.data?t.data:null,this.agent=t.agent,this.isBinary=t.isBinary,this.supportsBinary=t.supportsBinary,this.enablesXDR=t.enablesXDR,this.withCredentials=t.withCredentials,this.requestTimeout=t.requestTimeout,this.pfx=t.pfx,this.key=t.key,this.passphrase=t.passphrase,this.cert=t.cert,this.ca=t.ca,this.ciphers=t.ciphers,this.rejectUnauthorized=t.rejectUnauthorized,this.extraHeaders=t.extraHeaders,this.create()}if(t.exports=u,t.exports.Request=h,s(u,o),u.prototype.supportsBinary=!0,u.prototype.request=function(t){return(t=t||{}).uri=this.uri(),t.xd=this.xd,t.xs=this.xs,t.agent=this.agent||!1,t.supportsBinary=this.supportsBinary,t.enablesXDR=this.enablesXDR,t.withCredentials=this.withCredentials,t.pfx=this.pfx,t.key=this.key,t.passphrase=this.passphrase,t.cert=this.cert,t.ca=this.ca,t.ciphers=this.ciphers,t.rejectUnauthorized=this.rejectUnauthorized,t.requestTimeout=this.requestTimeout,t.extraHeaders=this.extraHeaders,new h(t)},u.prototype.doWrite=function(t,e){var n="string"!=typeof t&&void 0!==t,r=this.request({method:"POST",data:t,isBinary:n}),o=this;r.on("success",e),r.on("error",(function(t){o.onError("xhr post error",t)})),this.sendXhr=r},u.prototype.doPoll=function(){a("xhr poll");var t=this.request(),e=this;t.on("data",(function(t){e.onData(t)})),t.on("error",(function(t){e.onError("xhr poll error",t)})),this.pollXhr=t},i(h.prototype),h.prototype.create=function(){var t={agent:this.agent,xdomain:this.xd,xscheme:this.xs,enablesXDR:this.enablesXDR};t.pfx=this.pfx,t.key=this.key,t.passphrase=this.passphrase,t.cert=this.cert,t.ca=this.ca,t.ciphers=this.ciphers,t.rejectUnauthorized=this.rejectUnauthorized;var e=this.xhr=new r(t),n=this;try{a("xhr open %s: %s",this.method,this.uri),e.open(this.method,this.uri,this.async);try{if(this.extraHeaders)for(var o in e.setDisableHeaderCheck&&e.setDisableHeaderCheck(!0),this.extraHeaders)this.extraHeaders.hasOwnProperty(o)&&e.setRequestHeader(o,this.extraHeaders[o])}catch(t){}if("POST"===this.method)try{this.isBinary?e.setRequestHeader("Content-type","application/octet-stream"):e.setRequestHeader("Content-type","text/plain;charset=UTF-8")}catch(t){}try{e.setRequestHeader("Accept","*/*")}catch(t){}"withCredentials"in e&&(e.withCredentials=this.withCredentials),this.requestTimeout&&(e.timeout=this.requestTimeout),this.hasXDR()?(e.onload=function(){n.onLoad()},e.onerror=function(){n.onError(e.responseText)}):e.onreadystatechange=function(){if(2===e.readyState)try{var t=e.getResponseHeader("Content-Type");(n.supportsBinary&&"application/octet-stream"===t||"application/octet-stream; charset=UTF-8"===t)&&(e.responseType="arraybuffer")}catch(t){}4===e.readyState&&(200===e.status||1223===e.status?n.onLoad():setTimeout((function(){n.onError("number"==typeof e.status?e.status:0)}),0))},a("xhr data %s",this.data),e.send(this.data)}catch(t){return void setTimeout((function(){n.onError(t)}),0)}"undefined"!=typeof document&&(this.index=h.requestsCount++,h.requests[this.index]=this)},h.prototype.onSuccess=function(){this.emit("success"),this.cleanup()},h.prototype.onData=function(t){this.emit("data",t),this.onSuccess()},h.prototype.onError=function(t){this.emit("error",t),this.cleanup(!0)},h.prototype.cleanup=function(t){if(void 0!==this.xhr&&null!==this.xhr){if(this.hasXDR()?this.xhr.onload=this.xhr.onerror=c:this.xhr.onreadystatechange=c,t)try{this.xhr.abort()}catch(t){}"undefined"!=typeof document&&delete h.requests[this.index],this.xhr=null}},h.prototype.onLoad=function(){var t;try{var e;try{e=this.xhr.getResponseHeader("Content-Type")}catch(t){}t=("application/octet-stream"===e||"application/octet-stream; charset=UTF-8"===e)&&this.xhr.response||this.xhr.responseText}catch(t){this.onError(t)}null!=t&&this.onData(t)},h.prototype.hasXDR=function(){return"undefined"!=typeof XDomainRequest&&!this.xs&&this.enablesXDR},h.prototype.abort=function(){this.cleanup()},h.requestsCount=0,h.requests={},"undefined"!=typeof document)if("function"==typeof attachEvent)attachEvent("onunload",p);else if("function"==typeof addEventListener){var f="onpagehide"in self?"pagehide":"unload";addEventListener(f,p,!1)}function p(){for(var t in h.requests)h.requests.hasOwnProperty(t)&&h.requests[t].abort()}},function(t,e){t.exports=Object.keys||function(t){var e=[],n=Object.prototype.hasOwnProperty;for(var r in t)n.call(t,r)&&e.push(r);return e}},function(t,e){var n={}.toString;t.exports=Array.isArray||function(t){return"[object Array]"==n.call(t)}},function(t,e){t.exports=function(t,e,n){var r=t.byteLength;if(e=e||0,n=n||r,t.slice)return t.slice(e,n);if(e<0&&(e+=r),n<0&&(n+=r),n>r&&(n=r),e>=r||e>=n||0===r)return new ArrayBuffer(0);for(var o=new Uint8Array(t),i=new Uint8Array(n-e),s=e,a=0;s<n;s++,a++)i[a]=o[s];return i.buffer}},function(t,e){function n(){}t.exports=function(t,e,r){var o=!1;return r=r||n,i.count=t,0===t?e():i;function i(t,n){if(i.count<=0)throw new Error("after called too many times");--i.count,t?(o=!0,e(t),e=r):0!==i.count||o||e(null,n)}}},function(t,e){
/*! https://mths.be/utf8js v2.1.2 by @mathias */
var n,r,o,i=String.fromCharCode;function s(t){for(var e,n,r=[],o=0,i=t.length;o<i;)(e=t.charCodeAt(o++))>=55296&&e<=56319&&o<i?56320==(64512&(n=t.charCodeAt(o++)))?r.push(((1023&e)<<10)+(1023&n)+65536):(r.push(e),o--):r.push(e);return r}function a(t,e){if(t>=55296&&t<=57343){if(e)throw Error("Lone surrogate U+"+t.toString(16).toUpperCase()+" is not a scalar value");return!1}return!0}function c(t,e){return i(t>>e&63|128)}function u(t,e){if(0==(4294967168&t))return i(t);var n="";return 0==(4294965248&t)?n=i(t>>6&31|192):0==(4294901760&t)?(a(t,e)||(t=65533),n=i(t>>12&15|224),n+=c(t,6)):0==(4292870144&t)&&(n=i(t>>18&7|240),n+=c(t,12),n+=c(t,6)),n+=i(63&t|128)}function h(){if(o>=r)throw Error("Invalid byte index");var t=255&n[o];if(o++,128==(192&t))return 63&t;throw Error("Invalid continuation byte")}function f(t){var e,i;if(o>r)throw Error("Invalid byte index");if(o==r)return!1;if(e=255&n[o],o++,0==(128&e))return e;if(192==(224&e)){if((i=(31&e)<<6|h())>=128)return i;throw Error("Invalid continuation byte")}if(224==(240&e)){if((i=(15&e)<<12|h()<<6|h())>=2048)return a(i,t)?i:65533;throw Error("Invalid continuation byte")}if(240==(248&e)&&(i=(7&e)<<18|h()<<12|h()<<6|h())>=65536&&i<=1114111)return i;throw Error("Invalid UTF-8 detected")}t.exports={version:"2.1.2",encode:function(t,e){for(var n=!1!==(e=e||{}).strict,r=s(t),o=r.length,i=-1,a="";++i<o;)a+=u(r[i],n);return a},decode:function(t,e){var a=!1!==(e=e||{}).strict;n=s(t),r=n.length,o=0;for(var c,u=[];!1!==(c=f(a));)u.push(c);return function(t){for(var e,n=t.length,r=-1,o="";++r<n;)(e=t[r])>65535&&(o+=i((e-=65536)>>>10&1023|55296),e=56320|1023&e),o+=i(e);return o}(u)}}},function(t,e){!function(){"use strict";for(var t="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",n=new Uint8Array(256),r=0;r<t.length;r++)n[t.charCodeAt(r)]=r;e.encode=function(e){var n,r=new Uint8Array(e),o=r.length,i="";for(n=0;n<o;n+=3)i+=t[r[n]>>2],i+=t[(3&r[n])<<4|r[n+1]>>4],i+=t[(15&r[n+1])<<2|r[n+2]>>6],i+=t[63&r[n+2]];return o%3==2?i=i.substring(0,i.length-1)+"=":o%3==1&&(i=i.substring(0,i.length-2)+"=="),i},e.decode=function(t){var e,r,o,i,s,a=.75*t.length,c=t.length,u=0;"="===t[t.length-1]&&(a--,"="===t[t.length-2]&&a--);var h=new ArrayBuffer(a),f=new Uint8Array(h);for(e=0;e<c;e+=4)r=n[t.charCodeAt(e)],o=n[t.charCodeAt(e+1)],i=n[t.charCodeAt(e+2)],s=n[t.charCodeAt(e+3)],f[u++]=r<<2|o>>4,f[u++]=(15&o)<<4|i>>2,f[u++]=(3&i)<<6|63&s;return h}}()},function(t,e){var n=void 0!==n?n:"undefined"!=typeof WebKitBlobBuilder?WebKitBlobBuilder:"undefined"!=typeof MSBlobBuilder?MSBlobBuilder:"undefined"!=typeof MozBlobBuilder&&MozBlobBuilder,r=function(){try{return 2===new Blob(["hi"]).size}catch(t){return!1}}(),o=r&&function(){try{return 2===new Blob([new Uint8Array([1,2])]).size}catch(t){return!1}}(),i=n&&n.prototype.append&&n.prototype.getBlob;function s(t){return t.map((function(t){if(t.buffer instanceof ArrayBuffer){var e=t.buffer;if(t.byteLength!==e.byteLength){var n=new Uint8Array(t.byteLength);n.set(new Uint8Array(e,t.byteOffset,t.byteLength)),e=n.buffer}return e}return t}))}function a(t,e){e=e||{};var r=new n;return s(t).forEach((function(t){r.append(t)})),e.type?r.getBlob(e.type):r.getBlob()}function c(t,e){return new Blob(s(t),e||{})}"undefined"!=typeof Blob&&(a.prototype=Blob.prototype,c.prototype=Blob.prototype),t.exports=r?o?Blob:c:i?a:void 0},function(t,e,n){(function(e){var r=n(17),o=n(4);t.exports=h;var i,s=/\n/g,a=/\\n/g;function c(){}function u(){return"undefined"!=typeof self?self:"undefined"!=typeof window?window:void 0!==e?e:{}}function h(t){if(r.call(this,t),this.query=this.query||{},!i){var e=u();i=e.___eio=e.___eio||[]}this.index=i.length;var n=this;i.push((function(t){n.onData(t)})),this.query.j=this.index,"function"==typeof addEventListener&&addEventListener("beforeunload",(function(){n.script&&(n.script.onerror=c)}),!1)}o(h,r),h.prototype.supportsBinary=!1,h.prototype.doClose=function(){this.script&&(this.script.parentNode.removeChild(this.script),this.script=null),this.form&&(this.form.parentNode.removeChild(this.form),this.form=null,this.iframe=null),r.prototype.doClose.call(this)},h.prototype.doPoll=function(){var t=this,e=document.createElement("script");this.script&&(this.script.parentNode.removeChild(this.script),this.script=null),e.async=!0,e.src=this.uri(),e.onerror=function(e){t.onError("jsonp poll error",e)};var n=document.getElementsByTagName("script")[0];n?n.parentNode.insertBefore(e,n):(document.head||document.body).appendChild(e),this.script=e,"undefined"!=typeof navigator&&/gecko/i.test(navigator.userAgent)&&setTimeout((function(){var t=document.createElement("iframe");document.body.appendChild(t),document.body.removeChild(t)}),100)},h.prototype.doWrite=function(t,e){var n=this;if(!this.form){var r,o=document.createElement("form"),i=document.createElement("textarea"),c=this.iframeId="eio_iframe_"+this.index;o.className="socketio",o.style.position="absolute",o.style.top="-1000px",o.style.left="-1000px",o.target=c,o.method="POST",o.setAttribute("accept-charset","utf-8"),i.name="d",o.appendChild(i),document.body.appendChild(o),this.form=o,this.area=i}function u(){h(),e()}function h(){if(n.iframe)try{n.form.removeChild(n.iframe)}catch(t){n.onError("jsonp polling iframe removal error",t)}try{var t='<iframe src="javascript:0" name="'+n.iframeId+'">';r=document.createElement(t)}catch(t){(r=document.createElement("iframe")).name=n.iframeId,r.src="javascript:0"}r.id=n.iframeId,n.form.appendChild(r),n.iframe=r}this.form.action=this.uri(),h(),t=t.replace(a,"\\\n"),this.area.value=t.replace(s,"\\n");try{this.form.submit()}catch(t){}this.iframe.attachEvent?this.iframe.onreadystatechange=function(){"complete"===n.iframe.readyState&&u()}:this.iframe.onload=u}}).call(this,n(7))},function(t,e,n){(function(e){var r,o,i=n(9),s=n(2),a=n(3),c=n(4),u=n(19),h=n(0)("engine.io-client:websocket");if("undefined"!=typeof WebSocket?r=WebSocket:"undefined"!=typeof self&&(r=self.WebSocket||self.MozWebSocket),"undefined"==typeof window)try{o=n(50)}catch(t){}var f=r||o;function p(t){t&&t.forceBase64&&(this.supportsBinary=!1),this.perMessageDeflate=t.perMessageDeflate,this.usingBrowserWebSocket=r&&!t.forceNode,this.protocols=t.protocols,this.usingBrowserWebSocket||(f=o),i.call(this,t)}t.exports=p,c(p,i),p.prototype.name="websocket",p.prototype.supportsBinary=!0,p.prototype.doOpen=function(){if(this.check()){var t=this.uri(),e=this.protocols,n={agent:this.agent,perMessageDeflate:this.perMessageDeflate};n.pfx=this.pfx,n.key=this.key,n.passphrase=this.passphrase,n.cert=this.cert,n.ca=this.ca,n.ciphers=this.ciphers,n.rejectUnauthorized=this.rejectUnauthorized,this.extraHeaders&&(n.headers=this.extraHeaders),this.localAddress&&(n.localAddress=this.localAddress);try{this.ws=this.usingBrowserWebSocket&&!this.isReactNative?e?new f(t,e):new f(t):new f(t,e,n)}catch(t){return this.emit("error",t)}void 0===this.ws.binaryType&&(this.supportsBinary=!1),this.ws.supports&&this.ws.supports.binary?(this.supportsBinary=!0,this.ws.binaryType="nodebuffer"):this.ws.binaryType="arraybuffer",this.addEventListeners()}},p.prototype.addEventListeners=function(){var t=this;this.ws.onopen=function(){t.onOpen()},this.ws.onclose=function(){t.onClose()},this.ws.onmessage=function(e){t.onData(e.data)},this.ws.onerror=function(e){t.onError("websocket error",e)}},p.prototype.write=function(t){var n=this;this.writable=!1;for(var r=t.length,o=0,i=r;o<i;o++)!function(t){s.encodePacket(t,n.supportsBinary,(function(o){if(!n.usingBrowserWebSocket){var i={};if(t.options&&(i.compress=t.options.compress),n.perMessageDeflate)("string"==typeof o?e.byteLength(o):o.length)<n.perMessageDeflate.threshold&&(i.compress=!1)}try{n.usingBrowserWebSocket?n.ws.send(o):n.ws.send(o,i)}catch(t){h("websocket closed before onclose event")}--r||a()}))}(t[o]);function a(){n.emit("flush"),setTimeout((function(){n.writable=!0,n.emit("drain")}),0)}},p.prototype.onClose=function(){i.prototype.onClose.call(this)},p.prototype.doClose=function(){void 0!==this.ws&&this.ws.close()},p.prototype.uri=function(){var t=this.query||{},e=this.secure?"wss":"ws",n="";return this.port&&("wss"===e&&443!==Number(this.port)||"ws"===e&&80!==Number(this.port))&&(n=":"+this.port),this.timestampRequests&&(t[this.timestampParam]=u()),this.supportsBinary||(t.b64=1),(t=a.encode(t)).length&&(t="?"+t),e+"://"+(-1!==this.hostname.indexOf(":")?"["+this.hostname+"]":this.hostname)+n+this.path+t},p.prototype.check=function(){return!(!f||"__initialize"in f&&this.name===p.prototype.name)}}).call(this,n(6).Buffer)},function(t,e){},function(t,e){t.exports=function(t,e){for(var n=[],r=(e=e||0)||0;r<t.length;r++)n[r-e]=t[r];return n}},function(t,e){function n(t){t=t||{},this.ms=t.min||100,this.max=t.max||1e4,this.factor=t.factor||2,this.jitter=t.jitter>0&&t.jitter<=1?t.jitter:0,this.attempts=0}t.exports=n,n.prototype.duration=function(){var t=this.ms*Math.pow(this.factor,this.attempts++);if(this.jitter){var e=Math.random(),n=Math.floor(e*this.jitter*t);t=0==(1&Math.floor(10*e))?t-n:t+n}return 0|Math.min(t,this.max)},n.prototype.reset=function(){this.attempts=0},n.prototype.setMin=function(t){this.ms=t},n.prototype.setMax=function(t){this.max=t},n.prototype.setJitter=function(t){this.jitter=t}},function(t,e,n){var r=n(54)({"&":"&","<":"<",">":">",'"':""","'":"'"});t.exports=r},function(t,e){t.exports=function(t){return function(e){return null==t?void 0:t[e]}}},function(t,e,n){var r=n(56);t.exports=function(t){return null==t?"":r(t)}},function(t,e,n){var r=n(10),o=n(59),i=n(60),s=n(61),a=r?r.prototype:void 0,c=a?a.toString:void 0;t.exports=function t(e){if("string"==typeof e)return e;if(i(e))return o(e,t)+"";if(s(e))return c?c.call(e):"";var n=e+"";return"0"==n&&1/e==-1/0?"-0":n}},function(t,e,n){var r=n(58),o="object"==typeof self&&self&&self.Object===Object&&self,i=r||o||Function("return this")();t.exports=i},function(t,e,n){(function(e){var n="object"==typeof e&&e&&e.Object===Object&&e;t.exports=n}).call(this,n(7))},function(t,e){t.exports=function(t,e){for(var n=-1,r=null==t?0:t.length,o=Array(r);++n<r;)o[n]=e(t[n],n,t);return o}},function(t,e){var n=Array.isArray;t.exports=n},function(t,e,n){var r=n(62),o=n(65);t.exports=function(t){return"symbol"==typeof t||o(t)&&"[object Symbol]"==r(t)}},function(t,e,n){var r=n(10),o=n(63),i=n(64),s=r?r.toStringTag:void 0;t.exports=function(t){return null==t?void 0===t?"[object Undefined]":"[object Null]":s&&s in Object(t)?o(t):i(t)}},function(t,e,n){var r=n(10),o=Object.prototype,i=o.hasOwnProperty,s=o.toString,a=r?r.toStringTag:void 0;t.exports=function(t){var e=i.call(t,a),n=t[a];try{t[a]=void 0;var r=!0}catch(t){}var o=s.call(t);return r&&(e?t[a]=n:delete t[a]),o}},function(t,e){var n=Object.prototype.toString;t.exports=function(t){return n.call(t)}},function(t,e){t.exports=function(t){return null!=t&&"object"==typeof t}},function(t,e,n){},function(t,e,n){},function(t,e,n){"use strict";n.r(e);var r=n(25),o=n.n(r);function i(t,e,n,r){var o,i=!1,s=0;function a(){o&&clearTimeout(o)}function c(){var c=this,u=Date.now()-s,h=arguments;function f(){s=Date.now(),n.apply(c,h)}function p(){o=void 0}i||(r&&!o&&f(),a(),void 0===r&&u>t?f():!0!==e&&(o=setTimeout(r?p:f,void 0===r?t-u:t)))}return"boolean"!=typeof e&&(r=n,n=e,e=void 0),c.cancel=function(){a(),i=!0},c}var s=n(26),a=n.n(s),c=document.getElementById("leaderboard"),u=document.querySelectorAll("#leaderboard table tr");function h(t){t?c.classList.add("hidden"):c.classList.remove("hidden")}var f=[],p=0,l=0;function d(t){l||(l=t.t,p=Date.now()),f.push(t),function(t){for(var e=0;e<t.length;e++)u[e+1].innerHTML="<td>".concat(a()(t[e].username.slice(0,15))||"Anonymous","</td><td>").concat(t[e].score,"</td>");for(var n=t.length;n<5;n++)u[n+1].innerHTML="<td>-</td><td>-</td>"}(t.leaderboard);var e=g();e>0&&f.splice(0,e)}function y(){return l+(Date.now()-p)-100}function g(){for(var t=y(),e=f.length-1;e>=0;e--)if(f[e].t<=t)return e;return-1}function m(t,e,n){if(!e)return t;var r={};return Object.keys(t).forEach((function(o){r[o]="direction"===o?function(t,e,n){return Math.abs(e-t)>=Math.PI?t>e?t+(e+2*Math.PI-t)*n:t-(e-2*Math.PI-t)*n:t+(e-t)*n}(t[o],e[o],n):t[o]+(e[o]-t[o])*n})),r}function v(t,e,n){return t.map((function(t){return m(t,e.find((function(e){return t.id===e.id})),n)}))}var w=n(24),b=window.location.protocol.includes("https")?"wss":"ws",C=o()("".concat(b,"://").concat(window.location.host),{reconnection:!1}),A=new Promise((function(t){C.on("connect",(function(){console.log("Connected to server!"),t()}))})),E=i(20,(function(t){C.emit(w.MSG_TYPES.INPUT,t)})),k={},B=Promise.all(["ship.svg","bullet.svg"].map((function(t){return new Promise((function(e){var n=new Image;n.onload=function(){console.log("Downloaded ".concat(t)),k[t]=n,e()},n.src="/assets/".concat(t)}))})));var x,R,S,T=function(t){return k[t]},P=n(24),_=P.PLAYER_RADIUS,F=P.PLAYER_MAX_HP,O=P.BULLET_RADIUS,U=P.MAP_SIZE,I=document.getElementById("game-canvas"),L=I.getContext("2d");function N(){var t=Math.max(1,800/window.innerWidth);I.width=t*window.innerWidth,I.height=t*window.innerHeight}function D(){var t=function(){if(!l)return{};var t=g(),e=y();if(t<0||t===f.length-1)return f[f.length-1];var n=f[t],r=f[t+1],o=(e-n.t)/(r.t-n.t);return{me:m(n.me,r.me,o),others:v(n.others,r.others,o),bullets:v(n.bullets,r.bullets,o)}}(),e=t.me,n=t.others,r=t.bullets;e&&(M(e.x,e.y),L.strokeStyle="black",L.lineWidth=2,L.strokeRect(I.width/2-e.x,I.height/2-e.y,U,U),r.forEach(q.bind(null,e)),j(e,e),n.forEach(j.bind(null,e)))}function M(t,e){I.width,I.height;backgroundGradient.addColorStop(0,"gray"),backgroundGradient.addColorStop(1,"gray"),L.fillStyle=backgroundGradient,L.fillRect(0,0,I.width,I.height)}function j(t,e){var n=e.x,r=e.y,o=e.direction,i=I.width/2+n-t.x,s=I.height/2+r-t.y;L.save(),L.translate(i,s),L.rotate(o),L.drawImage(T("ship.svg"),-_,-_,2*_,2*_),L.restore(),L.fillStyle="white",L.fillRect(i-_,s+_+8,2*_,2),L.fillStyle="red",L.fillRect(i-_+2*_*e.hp/F,s+_+8,2*_*(1-e.hp/F),2)}function q(t,e){var n=e.x,r=e.y;L.drawImage(T("bullet.svg"),I.width/2+n-t.x-O,I.height/2+r-t.y-O,2*O,2*O)}function Y(){var t=Date.now()/7500;M(U/2+800*Math.cos(t),U/2+800*Math.sin(t))}N(),window.addEventListener("resize",(x=40,R=N,void 0===S?i(x,R,!1):i(x,S,!1!==R)));var z=setInterval(Y,1e3/60);function H(t){W(t.clientX,t.clientY)}function X(t){var e=t.touches[0];W(e.clientX,e.clientY)}function W(t,e){var n=Math.atan2(t-window.innerWidth/2,window.innerHeight/2-e);E(n)}n(66),n(67);var J,G=document.getElementById("play-menu"),V=document.getElementById("play-button"),$=document.getElementById("username-input");Promise.all([(J=function(){window.removeEventListener("mousemove",H),window.removeEventListener("click",H),window.removeEventListener("touchstart",X),window.removeEventListener("touchmove",X),clearInterval(z),z=setInterval(Y,1e3/60),G.classList.remove("hidden"),h(!0)},A.then((function(){C.on(w.MSG_TYPES.GAME_UPDATE,d),C.on(w.MSG_TYPES.GAME_OVER,J),C.on("disconnect",(function(){console.log("Disconnected from server."),document.getElementById("disconnect-modal").classList.remove("hidden"),document.getElementById("reconnect-button").onclick=function(){window.location.reload()}}))}))),B]).then((function(){G.classList.remove("hidden"),$.focus(),V.onclick=function(){var t;t=$.value,C.emit(w.MSG_TYPES.JOIN_GAME,t),G.classList.add("hidden"),p=0,l=0,window.addEventListener("mousemove",H),window.addEventListener("click",H),window.addEventListener("touchstart",X),window.addEventListener("touchmove",X),clearInterval(z),z=setInterval(D,1e3/60),h(!1)}})).catch(console.error)}]);
|
*
* @author Feross Aboukhadijeh <http://feross.org>
* @license MIT
|
index.ts
|
// @ts-ignore
import App from './App.svelte';
const app = new App({
target: document.body,
});
export default app;
// Hot Module Replacement (HMR) - Remove this snippet to remove HMR.
// Learn more: https://www.snowpack.dev/#hot-module-replacement
|
import.meta.hot.accept();
import.meta.hot.dispose(() => {
app.$destroy();
});
}
// Type override for HMR so TS doesn't complain
declare global {
interface ImportMeta {
hot: {
accept: Function;
dispose: Function;
};
}
}
|
if (import.meta.hot) {
|
core.go
|
package carrot
import (
"log"
"sync"
"time"
"github.com/gorilla/websocket"
)
func receiveMsg(wsconn *websocket.Conn, done chan *Routine, rout *Routine) {
for {
_, message, err := wsconn.ReadMessage()
rout.ReceiveTime = time.Now()
rout.Diff = rout.ReceiveTime.Sub(rout.SendTime)
rout.ReceivedMsg = string(message)
if err != nil {
log.Println("read:", err)
return
}
|
}
func writeMsg(wsconn *websocket.Conn, base *Base, rout *Routine) {
time.Sleep(time.Second * time.Duration(base.Delay))
rout.SendTime = time.Now()
wsconn.WriteMessage(websocket.TextMessage, base.Msg)
}
func singleTest(counter *Counter, queue chan *Routine, base *Base, rout *Routine) {
doneCh := make(chan *Routine)
conn, err := CreateSocket(base.URL, base.Proto, base.Path, counter)
if err != nil {
return
}
go writeMsg(conn, base, rout)
go receiveMsg(conn, doneCh, rout)
queue <- <-doneCh
}
func LoadTest(base *Base, latencyCh chan []float64, timeCh chan []time.Time) {
queue := make(chan *Routine, 1)
globalCounter := &Counter{0, sync.Mutex{}, 0, 0}
localCounter := 0
var latency []float64
var timeSeries []time.Time
for range time.Tick(time.Millisecond * time.Duration(base.TickDelay)) {
routine := &Routine{time.Now(), time.Now(), 0, ""}
go singleTest(globalCounter, queue, base, routine)
localCounter++
if localCounter == base.Count {
break
}
}
go func() {
bufferLimit := 0
for req := range queue {
latency = append(latency, req.Diff.Seconds()*1000)
timeSeries = append(timeSeries, req.SendTime)
bufferLimit++
if bufferLimit == base.Count {
latencyCh <- latency
timeCh <- timeSeries
}
}
}()
}
|
done <- rout
}
|
populate_db.py
|
from django.core.management.base import BaseCommand
from django.db.utils import OperationalError
from customers.models import Customer
from geolocation.models import Location
import csv
import sys
class Command(BaseCommand):
"""
Command that populates the Customers table
"""
def __init__(self, *args, **kwargs):
super().__init__()
self.customers = self._get_customer_from_file()
self.cities = [c['city'] for c in self.customers]
def handle(self, *args, **options):
sys.stdout.write("Populating db...\n")
try:
for customer in self.customers:
Customer.objects.get_or_create(
id=customer['id'],
email=customer['email'],
first_name=customer['first_name'],
last_name=customer['last_name'],
gender=customer['gender'],
company=customer['company'],
title=customer['title']
)
i = 1
for city in self.cities:
customer = Customer.objects.get(id=i)
Location.objects.get_or_create(
customer=customer,
|
longitude=0
)
i += 1
except OperationalError as error:
raise error
sys.stdout.write("Db populated\n")
def _get_customer_from_file(self):
with open('./customers.csv') as file:
reader = csv.DictReader(file)
return [{
'id': row['id'],
'email': row['email'],
'first_name': row['first_name'],
'last_name': row['last_name'],
'gender': row['gender'],
'company': row['company'],
'title': row['title'],
'city': row['city']
}
for row in reader]
|
city=city,
latitude=0,
|
rabin_miller.py
|
from __future__ import print_function
# Primality Testing with the Rabin-Miller Algorithm
import random
def rabinMiller(num):
s = num - 1
t = 0
while s % 2 == 0:
s = s // 2
t += 1
for trials in range(5):
a = random.randrange(2, num - 1)
v = pow(a, s, num)
if v != 1:
i = 0
while v != (num - 1):
if i == t - 1:
return False
else:
i = i + 1
v = (v ** 2) % num
return True
def isPrime(num):
if (num < 2):
return False
lowPrimes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59,
61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127,
131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191,
193, 197, 199, 211, 223, 227, 229, 233, 239, 241, 251, 257,
263, 269, 271, 277, 281, 283, 293, 307, 311, 313, 317, 331,
337, 347, 349, 353, 359, 367, 373, 379, 383, 389, 397, 401,
|
719, 727, 733, 739, 743, 751, 757, 761, 769, 773, 787, 797,
809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877,
881, 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967,
971, 977, 983, 991, 997]
if num in lowPrimes:
return True
for prime in lowPrimes:
if (num % prime) == 0:
return False
return rabinMiller(num)
def generateLargePrime(keysize = 1024):
while True:
num = random.randrange(2 ** (keysize - 1), 2 ** (keysize))
if isPrime(num):
return num
if __name__ == '__main__':
num = generateLargePrime()
print(('Prime number:', num))
print(('isPrime:', isPrime(num)))
|
409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467,
479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563,
569, 571, 577, 587, 593, 599, 601, 607, 613, 617, 619, 631,
641, 643, 647, 653, 659, 661, 673, 677, 683, 691, 701, 709,
|
blockchain_database.rs
|
// Copyright 2020, The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use crate::{
blocks::Block,
chain_storage::BlockchainDatabase,
tari_utilities::Hashable,
test_helpers::{
blockchain::{create_new_blockchain, TempDatabase},
create_block,
},
};
use std::sync::Arc;
use tari_test_utils::unpack_enum;
fn setup() -> BlockchainDatabase<TempDatabase> {
create_new_blockchain()
}
fn add_many_chained_blocks(size: usize, db: &BlockchainDatabase<TempDatabase>) -> Vec<Arc<Block>> {
let mut prev_block_hash = db.fetch_block(0).unwrap().block.hash();
let mut blocks = Vec::with_capacity(size);
for i in 1..=size as u64 {
let mut block = create_block(1, i, vec![]);
block.header.prev_hash = prev_block_hash.clone();
prev_block_hash = block.hash();
let block = Arc::new(block);
db.add_block(block.clone()).unwrap().assert_added();
blocks.push(block);
}
blocks
}
mod fetch_blocks {
use super::*;
#[test]
fn it_returns_genesis() {
let db = setup();
let blocks = db.fetch_blocks(0..).unwrap();
assert_eq!(blocks.len(), 1);
}
#[test]
fn it_returns_all() {
let db = setup();
add_many_chained_blocks(4, &db);
let blocks = db.fetch_blocks(..).unwrap();
assert_eq!(blocks.len(), 5);
for i in 0..=4 {
assert_eq!(blocks[i].block.header.height, i as u64);
}
}
#[test]
fn it_returns_one() {
let db = setup();
let new_blocks = add_many_chained_blocks(1, &db);
let blocks = db.fetch_blocks(1..=1).unwrap();
assert_eq!(blocks.len(), 1);
assert_eq!(blocks[0].block.hash(), new_blocks[0].hash());
}
#[test]
fn it_returns_nothing_if_asking_for_blocks_out_of_range() {
let db = setup();
add_many_chained_blocks(1, &db);
let blocks = db.fetch_blocks(2..).unwrap();
assert!(blocks.is_empty());
}
#[test]
fn it_returns_blocks_between_bounds_exclusive() {
let db = setup();
add_many_chained_blocks(5, &db);
let blocks = db.fetch_blocks(3..5).unwrap();
assert_eq!(blocks.len(), 2);
assert_eq!(blocks[0].block.header.height, 3);
assert_eq!(blocks[1].block.header.height, 4);
}
#[test]
fn it_returns_blocks_between_bounds_inclusive() {
let db = setup();
add_many_chained_blocks(5, &db);
let blocks = db.fetch_blocks(3..=5).unwrap();
assert_eq!(blocks.len(), 3);
assert_eq!(blocks[0].block.header.height, 3);
assert_eq!(blocks[1].block.header.height, 4);
assert_eq!(blocks[2].block.header.height, 5);
}
#[test]
fn it_returns_blocks_to_the_tip() {
let db = setup();
add_many_chained_blocks(5, &db);
let blocks = db.fetch_blocks(3..).unwrap();
assert_eq!(blocks.len(), 3);
assert_eq!(blocks[0].block.header.height, 3);
assert_eq!(blocks[1].block.header.height, 4);
assert_eq!(blocks[2].block.header.height, 5);
}
#[test]
fn it_returns_blocks_from_genesis() {
let db = setup();
add_many_chained_blocks(5, &db);
let blocks = db.fetch_blocks(..=3).unwrap();
assert_eq!(blocks.len(), 4);
assert_eq!(blocks[0].block.header.height, 0);
assert_eq!(blocks[1].block.header.height, 1);
assert_eq!(blocks[2].block.header.height, 2);
assert_eq!(blocks[3].block.header.height, 3);
}
}
mod fetch_headers {
use super::*;
#[test]
fn it_returns_genesis() {
let db = setup();
let headers = db.fetch_headers(0..).unwrap();
assert_eq!(headers.len(), 1);
}
#[test]
fn it_returns_all() {
let db = setup();
add_many_chained_blocks(4, &db);
let headers = db.fetch_headers(..).unwrap();
assert_eq!(headers.len(), 5);
for i in 0..=4 {
assert_eq!(headers[i].height, i as u64);
}
}
#[test]
fn it_returns_nothing_if_asking_for_blocks_out_of_range()
|
#[test]
fn it_returns_blocks_between_bounds_exclusive() {
let db = setup();
add_many_chained_blocks(5, &db);
let headers = db.fetch_headers(3..5).unwrap();
assert_eq!(headers.len(), 2);
assert_eq!(headers[0].height, 3);
assert_eq!(headers[1].height, 4);
}
#[test]
fn it_returns_blocks_between_bounds_inclusive() {
let db = setup();
add_many_chained_blocks(5, &db);
let headers = db.fetch_headers(3..=5).unwrap();
assert_eq!(headers.len(), 3);
assert_eq!(headers[0].height, 3);
assert_eq!(headers[1].height, 4);
assert_eq!(headers[2].height, 5);
}
#[test]
fn it_returns_blocks_to_the_tip() {
let db = setup();
add_many_chained_blocks(5, &db);
let headers = db.fetch_headers(3..).unwrap();
assert_eq!(headers.len(), 3);
assert_eq!(headers[0].height, 3);
assert_eq!(headers[1].height, 4);
assert_eq!(headers[2].height, 5);
}
#[test]
fn it_returns_blocks_from_genesis() {
let db = setup();
add_many_chained_blocks(5, &db);
let headers = db.fetch_headers(..=3).unwrap();
assert_eq!(headers.len(), 4);
assert_eq!(headers[0].height, 0);
assert_eq!(headers[1].height, 1);
assert_eq!(headers[2].height, 2);
assert_eq!(headers[3].height, 3);
}
}
mod find_headers_after_hash {
use super::*;
use crate::chain_storage::ChainStorageError;
#[test]
fn it_returns_none_given_empty_vec() {
let db = setup();
let hashes = vec![];
assert!(db.find_headers_after_hash(hashes, 1).unwrap().is_none());
}
#[test]
fn it_returns_from_genesis() {
let db = setup();
let genesis_hash = db.fetch_block(0).unwrap().block.hash();
add_many_chained_blocks(1, &db);
let hashes = vec![genesis_hash.clone()];
let (index, headers) = db.find_headers_after_hash(hashes, 1).unwrap().unwrap();
assert_eq!(index, 0);
assert_eq!(headers.len(), 1);
assert_eq!(headers[0].prev_hash, genesis_hash);
}
#[test]
fn it_returns_the_first_headers_found() {
let db = setup();
add_many_chained_blocks(5, &db);
let hashes = (1..=3)
.rev()
.map(|i| db.fetch_block(i).unwrap().block.hash())
.collect::<Vec<_>>();
let (index, headers) = db.find_headers_after_hash(hashes, 10).unwrap().unwrap();
assert_eq!(index, 0);
assert_eq!(headers.len(), 2);
assert_eq!(headers[0], db.fetch_block(4).unwrap().block.header);
}
#[test]
fn it_ignores_unknown_hashes() {
let db = setup();
add_many_chained_blocks(5, &db);
let hashes = (2..=4)
.map(|i| db.fetch_block(i).unwrap().block.hash())
.chain(vec![vec![0; 32], vec![0; 32]])
.rev();
let (index, headers) = db.find_headers_after_hash(hashes, 1).unwrap().unwrap();
assert_eq!(index, 2);
assert_eq!(headers.len(), 1);
assert_eq!(headers[0], db.fetch_block(5).unwrap().block.header);
}
#[test]
fn it_errors_for_hashes_with_an_invalid_length() {
let db = setup();
let err = db.find_headers_after_hash(vec![vec![]], 1).unwrap_err();
unpack_enum!(ChainStorageError::InvalidArguments {..} = err);
}
}
mod fetch_block_hashes_from_header_tip {
use super::*;
#[test]
fn it_returns_genesis() {
let db = setup();
let genesis = db.fetch_tip_header().unwrap();
let hashes = db.fetch_block_hashes_from_header_tip(10, 0).unwrap();
assert_eq!(hashes.len(), 1);
assert_eq!(&hashes[0], genesis.hash());
}
#[test]
fn it_returns_empty_set_for_big_offset() {
let db = setup();
add_many_chained_blocks(5, &db);
let hashes = db.fetch_block_hashes_from_header_tip(3, 6).unwrap();
assert!(hashes.is_empty());
}
#[test]
fn it_returns_n_hashes_from_tip() {
let db = setup();
let blocks = add_many_chained_blocks(5, &db);
let hashes = db.fetch_block_hashes_from_header_tip(3, 1).unwrap();
assert_eq!(hashes.len(), 3);
assert_eq!(hashes[0], blocks[3].hash());
assert_eq!(hashes[1], blocks[2].hash());
assert_eq!(hashes[2], blocks[1].hash());
}
#[test]
fn it_returns_hashes_without_overlapping() {
let db = setup();
let blocks = add_many_chained_blocks(3, &db);
let hashes = db.fetch_block_hashes_from_header_tip(2, 0).unwrap();
assert_eq!(hashes[0], blocks[2].hash());
assert_eq!(hashes[1], blocks[1].hash());
let hashes = db.fetch_block_hashes_from_header_tip(1, 2).unwrap();
assert_eq!(hashes[0], blocks[0].hash());
}
#[test]
fn it_returns_all_hashes_from_tip() {
let db = setup();
let genesis = db.fetch_tip_header().unwrap();
let blocks = add_many_chained_blocks(5, &db);
let hashes = db.fetch_block_hashes_from_header_tip(10, 0).unwrap();
assert_eq!(hashes.len(), 6);
assert_eq!(hashes[0], blocks[4].hash());
assert_eq!(&hashes[5], genesis.hash());
}
}
|
{
let db = setup();
add_many_chained_blocks(1, &db);
let headers = db.fetch_headers(2..).unwrap();
assert!(headers.is_empty());
}
|
main.d.ts
|
import { Grid } from './Grid';
import { GridProps } from './interfaces/GridProps';
import { GridColumn } from './GridColumn';
import { GridColumnProps } from './interfaces/GridColumnProps';
import { GridColumnMenuProps } from './interfaces/GridColumnMenuProps';
import { GridColumnMenuWrapper, GridColumnMenuWrapperProps } from './columnMenu/GridColumnMenuWrapper';
import { GridColumnMenuGroup } from './columnMenu/GridColumnMenuGroup';
import { GridColumnMenuSort } from './columnMenu/GridColumnMenuSort';
import { GridColumnMenuFilter } from './columnMenu/GridColumnMenuFilter';
import { GridColumnMenuFilterUI } from './columnMenu/GridColumnMenuFilterUI';
import { GridColumnMenuFilterCell } from './columnMenu/GridColumnMenuFilterCell';
import { GridColumnMenuCheckboxFilter, GridColumnMenuCheckboxFilterProps } from './columnMenu/GridColumnMenuCheckboxFilter';
import { GridCellProps } from './interfaces/GridCellProps';
import { GridCell } from './cells/GridCell';
import { GridEditCell } from './cells/GridEditCell';
import { GridGroupCell } from './cells/GridGroupCell';
import { GridHierarchyCell } from './cells/GridHierarchyCell';
import { GridFilterCell } from './cells/GridFilterCell';
import { GridFilterCellProps } from './interfaces/GridFilterCellProps';
|
import { GridFilterOperators } from './interfaces/GridFilterOperators';
import { GridHeaderCell } from './header/GridHeaderCell';
import { GridHeaderCellProps } from './interfaces/GridHeaderCellProps';
import { GridDetailRow } from './rows/GridDetailRow';
import { GridDetailRowProps } from './interfaces/GridDetailRowProps';
import { GridRow } from './rows/GridRow';
import { GridRowProps } from './interfaces/GridRowProps';
import { GridToolbar } from './GridToolbar';
import { GridToolbarProps } from './interfaces/GridToolbarProps';
import { GridNoRecords } from './GridNoRecords';
import { GridNoRecordsProps } from './interfaces/GridNoRecordsProps';
export * from './interfaces/events';
import { GridSortSettings } from './interfaces/GridSortSettings';
import { GridPagerSettings } from './paging/GridPagerSettings';
import { GridGroupableSettings } from './interfaces/GridGroupableSettings';
import { GridColumnMenuItem } from './columnMenu/GridColumnMenuItem';
import { GridColumnMenuItemContent } from './columnMenu/GridColumnMenuItemContent';
import { GridColumnMenuItemGroup } from './columnMenu/GridColumnMenuItemGroup';
import { GridFooterCellProps } from './interfaces/GridFooterCellProps';
import { GridSelectableMode } from './interfaces/GridSelectableSettings';
import { GridColumnMenuFilterUIProps } from './interfaces/GridColumnMenuFilterUIProps';
import { GRID_COL_INDEX_ATTRIBUTE, GRID_ROW_INDEX_ATTRIBUTE } from './constants';
import { getSelectedState, getSelectedStateFromKeyDown, setSelectedState } from '@progress/kendo-react-data-tools';
export { Grid, GridProps, GridColumn, GridColumnProps, GridCellProps, GridCell, GridEditCell, GridGroupCell, GridHierarchyCell, GridDetailRow, GridDetailRowProps, GridRow, GridRowProps, GridFilterCell, GridFilterCellProps, GridHeaderCell, GridHeaderCellProps, GridColumnMenuProps, GridColumnMenuSort, GridColumnMenuFilter, GridColumnMenuGroup, GridColumnMenuItem, GridColumnMenuItemContent, GridColumnMenuItemGroup, GridColumnMenuFilterUI, GridColumnMenuFilterUIProps, GridColumnMenuFilterCell, GridColumnMenuCheckboxFilter, GridColumnMenuCheckboxFilterProps, GridToolbar, GridToolbarProps, GridNoRecords, GridNoRecordsProps, GridSortSettings, GridPagerSettings, GridGroupableSettings, GridFooterCellProps, GridSelectableMode, GridFilterOperators, GridColumnMenuWrapper, GridColumnMenuWrapperProps, getSelectedState, setSelectedState, getSelectedStateFromKeyDown, GRID_COL_INDEX_ATTRIBUTE, GRID_ROW_INDEX_ATTRIBUTE };
| |
ceng113_hw5_260201003.py
|
# Umutcan CEYHAN 260201003
import numpy
class Game():
def __init__(self,map_width,map_height,init_time, action_cost):
# The Game initializes map parameters.
self.mapwidth = map_width
self.mapheight = map_height
# The Game initializes its map with all empty squares.
self.map = [['empty' for col in range(map_width)] for row in range(map_height)]
# The Game creates its player object.
self.player = Player()
# The Game creates its time object.
self.time = Time(0,init_time, action_cost)
# The Game initializes the player icon.
self.picon = person
# The Game sets the continue state to true (still has time)
self.cont = True
# The Game sets the safe state to false (still not in shelter)
self.safe = False
# The Game initializes the list of squares that constitutes the shelter
self.score_map = []
# The Game randomly inserts wood, dirt or water on the map.
self.generate_map()
# The Game prints the current status on the screen.
self.update_screen()
# For each square, put wood with probability of 0.3, put dirt with probability of 0.3,
# put water with probability of 0.2 or leave empty with probability of 0.2. You have
# to use 'numpy.random.randint' function.
def generate_map(self):
for row in range(self.mapheight):
for col in range(self.mapwidth):
square = numpy.random.randint(0, 11)
if( 1 >= square <= 3):
self.map[row][col] = "wood "
elif( 4 >= square <= 6):
self.map[row][col] = "dirt "
elif( 7 >= square <= 8):
self.map[row][col] = "water"
else:
self.map[row][col] = "empty"
def show_controls(self):
print()
print("**************************** Game Control ****************************")
print("w: up s: down a: left d: rigth")
print("1: put brick 2: put dirt 3: put plank 4: put water 5: put wood")
print("q: pick e: make plank r: make brick o: exit game")
print("plank: 2 woods brick: 2 dirt 1 water")
print("plank: 2 pts brick: 3 pts enclosed square: 3 pts")
print("**********************************************************************")
print()
def show_map(self):
ppy = self.player.pos[0]
ppx = self.player.pos[1]
print()
for row in range(MAP_HEIGHT):
color_row = [COLORS[self.map[row][c]] for c in range(MAP_WIDTH)]
if row == ppy:
color_row[ppx] = color_row[ppx].replace(' ',' '+self.picon+' ')
print(''.join(color_row))
def update_screen(self):
self.player.show_inventory()
self.time.show_time()
self.show_map()
self.show_controls()
def _flood_fill(self,wmf,ppx,ppy,source,target,conn8=True):
if wmf[ppy,ppx]!=source:
return
wmf[ppy,ppx] = target
if ppy>0: self._flood_fill(wmf,ppx,ppy-1,source,target,conn8)
if ppy<wmf.shape[0]-1: self._flood_fill(wmf,ppx,ppy+1,source,target,conn8)
if ppx>0: self._flood_fill(wmf,ppx-1,ppy,source,target,conn8)
if ppx<wmf.shape[1]-1: self._flood_fill(wmf,ppx+1,ppy,source,target,conn8)
if conn8:
if ppy>0 and ppx>0: self._flood_fill(wmf,ppx-1,ppy-1,source,target,conn8)
if ppy>0 and ppx<wmf.shape[1]-1: self._flood_fill(wmf,ppx+1,ppy-1,source,target,conn8)
if ppy<wmf.shape[0]-1 and ppx>0: self._flood_fill(wmf,ppx-1,ppy+1,source,target),conn8
if ppy<wmf.shape[0]-1 and ppx<wmf.shape[1]-1: self._flood_fill(wmf,ppx+1,ppy+1,source,target,conn8)
def check_safety(self):
# This function checks if the player is in a shelter. It should be called
# at the end of each successfully executed action to check if the game
# has finished.
wall_map = numpy.zeros((game.mapwidth+2,game.mapheight+2)).astype(int)
wall_map_bool = [numpy.in1d(row, ['brick','plank']) for row in game.map]
wall_map[1:-1,1:-1] = numpy.array(wall_map_bool).astype(int)
label = 2
while((wall_map == 1).any()):
py = numpy.where(wall_map==1)[0][0]
px = numpy.where(wall_map==1)[1][0]
self._flood_fill(wall_map, px, py, 1, label, False)
label += 1
ppx = game.player.pos[1]+1
ppy = game.player.pos[0]+1
if not wall_map[ppy,ppx]:
for wall in range(2,label):
wall_map_fill = (wall_map == wall).astype(int)
self._flood_fill(wall_map_fill,ppx,ppy,0,label)
edges = [wall_map_fill[0,:],wall_map_fill[-1,:], wall_map_fill[:,0],wall_map_fill[:,-1]]
if label not in numpy.array(edges):
self.safe = True
self.score_map = wall_map_fill[1:-1,1:-1]
self.picon = happy
break
def calc_score(self):
final_map = (numpy.array(self.map) == 'brick').astype(int) + self.score_map
unique, counts = numpy.unique(final_map, return_counts=True)
score = counts[-1]*3 + counts[-2]*3 + counts[-3]*2 #area*3+brick*3+plank*2
return score
def move_player(self,direction):
self.player.move(direction)
self.update_screen()
# Pick item using the player object's pick method and update the map if
# there is an item to pick, otherwise print "There is nothing to pick!".
def pick_item(self):
ppx = self.player.pos[0] #Row
ppy = self.player.pos[1] #Column
if (self.map[ppx][ppy] == "empty"):
print("There is nothing to pick!")
else:
self.player.pick(self.map[ppx][ppy]) #Picks the item where it is
self.map[ppx][ppy] = "empty"
self.update_screen()
# Put the given item using the player object's put method if the current
# square is empty, otherwise print ""There is nowhere to put!". If the
# player scan successfully put the item, update the map.
def put_item(self,item):
ppx = self.player.pos[0]
|
else:
print("There is nowhere to put!")
# Make the given item using the player object's corresponding method. If
# the player can not make the item, print "Not enough material!".
def make(self,item):
if(item == "e"):
if(self.player.make_plank()):
self.update_screen()
else:
print("Not enough material!")
else:
if(self.player.make_brick()):
self.update_screen()
else:
print("Not enough material!")
class Player():
def __init__(self):
# Initialize the player position at the top left corner
self.pos = [0,0]
# Initialize the inventory as empty
self.inventory = {'wood ':0,'dirt ':0,'water':0,'plank':0,'brick':0}
def move(self, direction):
# Update the player position with respect to move direction.
if(direction == "w" ):
if not(self.pos[0] == 0):
self.pos[0] -= 1
game.time.spend()
else:
print("Sorry I can not move up")
elif(direction == "s"):
if not(self.pos[0] == MAP_HEIGHT-1):
self.pos[0] += 1
game.time.spend()
else:
print("Sorry I can not move down")
elif(direction == "a"):
if not(self.pos[1] == 0):
self.pos[1] -= 1
game.time.spend()
else:
print("Sorry I can not move left")
else:
if not(self.pos[1] == MAP_WIDTH-1):
self.pos[1] += 1
game.time.spend()
else:
print("Sorry I can not move right")
return self.pos
# Pick and update the player inventory with respect to the item.
def pick(self, item):
if(item == "brick"):
self.inventory["brick"] += 1
game.time.spend()
elif(item == "dirt "):
self.inventory["dirt "] += 1
game.time.spend()
elif(item == "plank"):
self.inventory["plank"] += 1
game.time.spend()
elif(item == "water"):
self.inventory["water"] += 1
game.time.spend()
elif(item == "wood "):
self.inventory["wood "] += 1
game.time.spend()
# Put and update the player inventory with respect to the item, if the
# player has one or more of that item in the inventory. Return true if
# successfully put, otherwise false.
def put(self,item):
ppx = game.player.pos[0] #Row
ppy = game.player.pos[1] #Column
if(item == "1"):
if(self.inventory["brick"] >= 1):
self.inventory["brick"] -= 1
game.map[ppx][ppy] = "brick"
game.time.spend()
return True
else:
print("There is nothing to put!")
return False
elif(item == "2"):
if(self.inventory["dirt "] >= 1):
self.inventory["dirt "] -= 1
game.map[ppx][ppy] = "dirt "
game.time.spend()
return True
else:
print("There is nothing to put!")
return False
elif(item == "3"):
if(self.inventory["plank"] >= 1):
self.inventory["plank"] -= 1
game.map[ppx][ppy] = "plank"
game.time.spend()
return True
else:
print("There is nothing to put!")
return False
elif(item == "4"):
if(self.inventory["water"] >= 1):
self.inventory["water"] -= 1
game.map[ppx][ppy] = "water"
game.time.spend()
return True
else:
print("There is nothing to put!")
return False
else:
if(self.inventory["wood "] >= 1):
self.inventory["wood "] -= 1
game.map[ppx][ppy] = "wood "
game.time.spend()
return True
else:
print("There is nothing to put!")
return False
# Make plank and update the player inventory with respect to the action,
# if the player has enough materials. Return true if plank is successfully
# made, otherwise false.
def make_plank(self):
if(self.inventory["wood "] >= 2):
self.inventory["wood "] -= 2
self.inventory["plank"] += 1
game.time.spend()
return True
return False
# Make brick and update the player inventory with respect to the action,
# if the player has enough materials. Return true if brick is successfully
# made, otherwise false.
def make_brick(self):
if(self.inventory["dirt "] >= 2 and self.inventory["water"] >= 1):
self.inventory["dirt "] -= 2
self.inventory["water"] -= 1
self.inventory["brick"] += 1
game.time.spend()
return True
return False
# Shows the player inventory
def show_inventory(self):
print()
c = 1
for key in sorted(self.inventory.keys()):
print("{}. {}\t: {}".format(c, key, (COLORS[key]+" ")*self.inventory[key]))
c += 1
print()
class Time():
def __init__(self, mins, hours, action_cost):
self.mins = mins
self.hours = hours
self.action_cost = action_cost
# Spend the action cost and update mins and/or hours. If the time is
# up return False, otherwise True.
def spend(self):
if(self.hours > 0 and self.mins == 0):
self.mins = 60
self.mins -= self.action_cost
self.hours -= 1
return True
else:
self.mins -= self.action_cost
if(self.hours == 0 and self.mins == 0):
game.cont = False
return False
return True
# Shows the remaning time in hours and mins format
def show_time(self):
print("{} hours {} minutes left!!!".format(self.hours, self.mins))
# CONSTANTS
MAP_WIDTH = 10
MAP_HEIGHT = 10
ACTION_COST = 15 #minutes
INIT_TIME = 16 #hours
person =u"\u2687" #character #u'U' #u'\u267f' #u'\u2687'
happy = u"\u263b" # happy character
COLORS = {'empty':'\033[40m \033[0m', 'wood ':'\033[42m \033[0m',
'dirt ':'\033[47m \033[0m', 'water':'\033[46m \033[0m',
'plank':'\033[43m \033[0m', 'brick':'\033[41m \033[0m'}
# Constant moves, items and products
moves = {"w":"up", "s":"down", "a":"left", "d":"right"}
items = {"1":"brick", "2":"dirt ", "3":"plank", "4":"water", "5":"wood "}
products = {"e":"plank", "r":"brick"}
# A Game class is instantiated each time a new game begins.
game = Game(MAP_WIDTH, MAP_HEIGHT, INIT_TIME, ACTION_COST)
out = False
################## THIS PART CAUSES AN INFINITE LOOP!!! ##################
# Implement the game play. Take the instructions from the user and execute them.
while game.cont and not game.safe:
instructions = input("Make your move : ")
if(instructions == "o"):
game.cont = False
out = True
elif(instructions == "a" or instructions == "w" or instructions == "d" or instructions == "s" ):
game.move_player(instructions)
elif(instructions == "q"):
game.pick_item()
elif(instructions == "1" or instructions == "2" or instructions == "3" or instructions == "4" or instructions == "5"):
game.put_item(instructions)
elif(instructions == "e" or instructions == "r"):
game.make(instructions)
game.check_safety()
if game.safe:
print("Congratulations! You are safe!!!")
print("Your score is {}.".format(game.calc_score()))
elif out:
print("Bye!")
else:
print("Too late! They are coming!!!")
|
ppy = self.player.pos[1]
if(self.map[ppx][ppy] == "empty"):
self.player.put(item)
self.update_screen()
|
get_networking_v1beta1_api_resources.go
|
// Code generated by go-swagger; DO NOT EDIT.
// Copyright 2017-2020 Authors of Cilium
// SPDX-License-Identifier: Apache-2.0
package networking_v1beta1
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the generate command
import (
"net/http"
"github.com/go-openapi/runtime/middleware"
)
// GetNetworkingV1beta1APIResourcesHandlerFunc turns a function with the right signature into a get networking v1beta1 API resources handler
type GetNetworkingV1beta1APIResourcesHandlerFunc func(GetNetworkingV1beta1APIResourcesParams) middleware.Responder
// Handle executing the request and returning a response
func (fn GetNetworkingV1beta1APIResourcesHandlerFunc) Handle(params GetNetworkingV1beta1APIResourcesParams) middleware.Responder {
return fn(params)
}
// GetNetworkingV1beta1APIResourcesHandler interface for that can handle valid get networking v1beta1 API resources params
type GetNetworkingV1beta1APIResourcesHandler interface {
Handle(GetNetworkingV1beta1APIResourcesParams) middleware.Responder
}
// NewGetNetworkingV1beta1APIResources creates a new http.Handler for the get networking v1beta1 API resources operation
func NewGetNetworkingV1beta1APIResources(ctx *middleware.Context, handler GetNetworkingV1beta1APIResourcesHandler) *GetNetworkingV1beta1APIResources
|
/*GetNetworkingV1beta1APIResources swagger:route GET /apis/networking.k8s.io/v1beta1/ networking_v1beta1 getNetworkingV1beta1ApiResources
get available resources
*/
type GetNetworkingV1beta1APIResources struct {
Context *middleware.Context
Handler GetNetworkingV1beta1APIResourcesHandler
}
func (o *GetNetworkingV1beta1APIResources) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
route, rCtx, _ := o.Context.RouteInfo(r)
if rCtx != nil {
r = rCtx
}
var Params = NewGetNetworkingV1beta1APIResourcesParams()
if err := o.Context.BindValidRequest(r, route, &Params); err != nil { // bind params
o.Context.Respond(rw, r, route.Produces, route, err)
return
}
res := o.Handler.Handle(Params) // actually handle the request
o.Context.Respond(rw, r, route.Produces, route, res)
}
|
{
return &GetNetworkingV1beta1APIResources{Context: ctx, Handler: handler}
}
|
test_servo_sanity_check.py
|
from car_ctrl import servo
import time
#max angle turns right
#0 turns left
def test_servo_rotation():
s = servo()
print(vars(s))
print("max_angle: " +str(s.max_angle))
print("slope: " +str(s.slope))
for i in range(0,3):
s.steer(s.max_angle)
print("turning left")
time.sleep(0.5)
for i in range(0,3):
s.steer(0)
time.sleep(0.5)
print("turning right")
for i in range(0,3):
s.steer(s.max_angle)
time.sleep(0.5)
print("Return to center")
s.kill_servo()
|
test_servo_rotation()
|
|
mod.rs
|
mod source;
pub use source::{Source, SourceMutationRoot, SourceRoot};
pub mod manga;
pub use manga::Manga;
pub mod chapter;
pub use chapter::Chapter;
use tanoshi_vm::extension::SourceBus;
use crate::db::MangaDatabase;
use async_graphql::{scalar, Context, Object, Result};
use rayon::prelude::*;
use tanoshi_lib::models::Input;
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize)]
pub struct InputList(Vec<Input>);
scalar!(InputList);
#[derive(Default)]
pub struct CatalogueRoot;
|
impl CatalogueRoot {
async fn get_popular_manga(
&self,
ctx: &Context<'_>,
#[graphql(desc = "source id")] source_id: i64,
#[graphql(desc = "page")] page: i64,
) -> Result<Vec<Manga>> {
let fetched_manga = ctx
.data::<SourceBus>()?
.get_popular_manga(source_id, page)
.await?
.into_par_iter()
.map(Manga::from)
.collect();
Ok(fetched_manga)
}
async fn get_latest_manga(
&self,
ctx: &Context<'_>,
#[graphql(desc = "source id")] source_id: i64,
#[graphql(desc = "page")] page: i64,
) -> Result<Vec<Manga>> {
let fetched_manga = ctx
.data::<SourceBus>()?
.get_latest_manga(source_id, page)
.await?
.into_par_iter()
.map(Manga::from)
.collect();
Ok(fetched_manga)
}
async fn browse_source(
&self,
ctx: &Context<'_>,
#[graphql(desc = "source id")] source_id: i64,
#[graphql(desc = "page")] page: i64,
#[graphql(desc = "query")] query: Option<String>,
#[graphql(desc = "filters")] filters: Option<InputList>,
) -> Result<Vec<Manga>> {
let fetched_manga = ctx
.data::<SourceBus>()?
.search_manga(source_id, page, query, filters.map(|filters| filters.0))
.await?
.into_par_iter()
.map(Manga::from)
.collect();
Ok(fetched_manga)
}
async fn manga_by_source_path(
&self,
ctx: &Context<'_>,
#[graphql(desc = "source id")] source_id: i64,
#[graphql(desc = "path to manga in source")] path: String,
) -> Result<Manga> {
let db = ctx.data::<MangaDatabase>()?;
let manga = if let Ok(manga) = db.get_manga_by_source_path(source_id, &path).await {
manga
} else {
let mut m: crate::db::model::Manga = ctx
.data::<SourceBus>()?
.get_manga_detail(source_id, path)
.await?
.into();
db.insert_manga(&mut m).await?;
m
};
Ok(manga.into())
}
async fn manga(
&self,
ctx: &Context<'_>,
#[graphql(desc = "manga id")] id: i64,
#[graphql(desc = "refresh data from source", default = false)] refresh: bool,
) -> Result<Manga> {
let db = ctx.data::<MangaDatabase>()?;
let manga = db.get_manga_by_id(id).await?;
if refresh {
let mut m: crate::db::model::Manga = ctx
.data::<SourceBus>()?
.get_manga_detail(manga.source_id, manga.path.clone())
.await?
.into();
m.id = manga.id;
db.insert_manga(&mut m).await?;
Ok(m.into())
} else {
Ok(manga.into())
}
}
async fn chapter(
&self,
ctx: &Context<'_>,
#[graphql(desc = "chapter id")] id: i64,
) -> Result<Chapter> {
let db = ctx.data::<MangaDatabase>()?;
Ok(db.get_chapter_by_id(id).await?.into())
}
}
|
#[Object]
|
test_claim.py
|
# -*- coding: utf-8 -*-
import unittest.mock
import pytest
import pycamunda.task
from tests.mock import raise_requests_exception_mock, not_ok_response_mock
def test_claim_params(engine_url):
claim_task = pycamunda.task.Claim(url=engine_url, id_='anId', user_id='anUserId')
assert claim_task.url == engine_url + '/task/anId/claim'
assert claim_task.query_parameters() == {}
assert claim_task.body_parameters() == {'userId': 'anUserId'}
@unittest.mock.patch('requests.Session.request')
def test_claim_calls_requests(mock, engine_url):
claim_task = pycamunda.task.Claim(url=engine_url, id_='anId', user_id='anUserId')
claim_task()
assert mock.called
assert mock.call_args[1]['method'].upper() == 'POST'
@unittest.mock.patch('requests.Session.request', raise_requests_exception_mock)
def test_claim_raises_pycamunda_exception(engine_url):
claim_task = pycamunda.task.Claim(url=engine_url, id_='anId', user_id='anUserId')
with pytest.raises(pycamunda.PyCamundaException):
claim_task()
@unittest.mock.patch('requests.Session.request', not_ok_response_mock)
@unittest.mock.patch('pycamunda.base._raise_for_status')
def test_claim_raises_for_status(mock, engine_url):
claim_task = pycamunda.task.Claim(url=engine_url, id_='anId', user_id='anUserId')
claim_task()
assert mock.called
@unittest.mock.patch('requests.Session.request', unittest.mock.MagicMock())
def
|
(engine_url):
claim_task = pycamunda.task.Claim(url=engine_url, id_='anId', user_id='anUserId')
result = claim_task()
assert result is None
|
test_claim_returns_none
|
index.ts
|
import Telegraf, { Markup, Context } from "telegraf";
import { v4 as uuidv4 } from "uuid";
import eveningQuestions from "./eveningQuestions";
import morningQuestions from "./morningQuestions";
import * as moment from 'moment'
const LocalSession = require("telegraf-session-local");
const low = require("lowdb");
const FileAsync = require("lowdb/adapters/FileAsync");
const adapter = new FileAsync("db.json");
let db: any;
const initAdapter = async () => {
db = await low(adapter);
await db.defaults({ entries: [] }).write();
};
initAdapter();
interface Quiz {
type: EntryType;
questionIndex: number;
answers: {
[property: string]: any;
};
questions: Array<Question>;
}
interface Session {
quiz: Quiz | null;
date: string
}
declare module "telegraf" {
interface Context {
session: Session;
}
}
const bot = new Telegraf(process.env.DORMOBOT_API_TOKEN as string);
bot.use(
new LocalSession({
database: "state.json",
storage: LocalSession.storageMemory
}).middleware()
);
bot.command("start", async ctx => {
// ctx.session.done = 0; // restart done counter
// ctx.session.quiz = null;
await ctx.reply("Welcome to the Dormo Bot! ⭐️");
await ctx.reply("/evening /morning");
});
type EntryType = "evening" | "morning";
interface Entry {
date: string;
user?: string;
answers: {
[propName: string]: any;
};
}
interface Alternative {
text: string;
value: any;
id?: string
}
interface Question {
text: string;
key: string;
validateFn?: (x: string) => false | any;
alternatives: Alternative[];
}
export interface Questions extends Array<Question> {
}
bot.command("evening", async ctx => {
let dateMoment = moment()
if (dateMoment.hours() < 12) {
// after midnight but still count it as last day
dateMoment = dateMoment.subtract(1, "day")
}
const date = dateMoment.format("LL")
const alreadyEntry = await getAlreadyEntry(date, ctx.from?.username)
const quiz: Quiz = {
type: "evening",
questionIndex: -1,
questions: eveningQuestions,
answers: { ...alreadyEntry.answers }
};
ctx.session.quiz = quiz;
ctx.session.date = date;
nextQuestion(ctx);
});
const getAlreadyEntry = async (date: string, user?: string) => {
const alreadyEntry = await db
.get("entries")
.find({
user: user,
date: date
})
.value()
if(alreadyEntry){
return alreadyEntry
}
return {
answers: []
}
}
bot.command("morning", async ctx => {
const date = moment().subtract(1, "day").format("LL")
const alreadyEntry = await getAlreadyEntry(date, ctx.from?.username)
const quiz: Quiz = {
type: "morning",
questionIndex: -1,
questions: morningQuestions,
answers: { ...alreadyEntry.answers }
};
ctx.session.quiz = quiz;
ctx.session.date = date
nextQuestion(ctx);
});
bot.command("export", async ctx => {
const yo = await db
.get("entries")
.filter({
user: ctx.from?.username
})
.value()
console.log(yo)
await ctx.reply("```\n" + JSON.stringify(yo) + "\n```")
});
const keyboard = (alternatives: Alternative[], value: any) => {
return Markup.inlineKeyboard(
alternatives.map(alt => {
if (!alt.id) {
throw new Error("please add id!")
}
return Markup.callbackButton(
value === alt.value ? `→ ${alt.text} ←` : alt.text,
alt.id
)
}
)
);
};
const finish = (ctx: Context) => {
ctx.reply(
"Finish!",
Markup.inlineKeyboard([Markup.callbackButton("Save", "save")]).extra()
);
};
bot.action("save", async ctx => {
if (!ctx.session.quiz) {
return;
}
const entry: Entry = {
date: ctx.session.date,
// type: ctx.session.quiz.type,
answers: ctx.session.quiz.answers,
user: ctx.from?.username
};
ctx.editMessageReplyMarkup(Markup.inlineKeyboard([]));
|
const yo = await db
.get("entries")
.filter({
user: entry.user,
date: entry.date
})
.value()
console.log('yo: ', yo);
if (yo.length) {
console.log("update!")
await db.get('entries')
.find({
user: entry.user,
date: entry.date
})
.assign({
answers: {
...yo[0].answers,
...entry.answers
}
})
.write()
} else {
console.log("add!")
await db
.get("entries")
.push(entry)
.write();
}
ctx.reply(`Added your ${ctx.session.quiz.type} entry ✅\n\nQuick commands: \n→ /morning\n→ /evening\n→ /export`);
ctx.session.quiz = null
});
const nextQuestion = (ctx: Context) => {
if (!ctx.session.quiz) {
return
}
const questions = ctx.session.quiz.questions;
ctx.session.quiz.questionIndex += 1;
const thisIndex = ctx.session.quiz.questionIndex;
if (thisIndex >= questions.length) {
return finish(ctx);
}
const q = questions[ctx.session.quiz.questionIndex];
const alts = q.alternatives.map(a => ({
...a,
id: uuidv4()
}));
alts.forEach(a => {
bot.action(a.id, async ctx => {
if (!ctx.session.quiz) {
return
}
await ctx.answerCbQuery();
ctx.session.quiz.answers[q.key] = a.value;
ctx
.editMessageReplyMarkup(keyboard(alts, ctx.session.quiz.answers[q.key]))
.catch(() => { });
if (thisIndex == ctx.session.quiz.questionIndex) {
nextQuestion(ctx);
}
});
});
ctx.reply(q.text, keyboard(alts, ctx.session.quiz.answers[q.key]).extra());
};
bot.on("text", ctx => {
if (ctx.session.quiz) {
const questions = ctx.session.quiz.questions;
const q = questions[ctx.session.quiz.questionIndex];
if (q.validateFn && ctx.message && ctx.message.text) {
if (q.validateFn(ctx.message.text)) {
ctx.session.quiz.answers[q.key] = ctx.message.text;
return nextQuestion(ctx);
} else {
return ctx.reply("We couldn't understand that answer...!");
}
}
return ctx.reply(`horse`);
} else {
return ctx.reply(`whats up homie`);
}
});
bot.launch();
| |
ListSchemaExtensionsCommand.ts
|
import { getSerdePlugin } from "@aws-sdk/middleware-serde";
import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http";
import { Command as $Command } from "@aws-sdk/smithy-client";
import {
FinalizeHandlerArguments,
Handler,
HandlerExecutionContext,
HttpHandlerOptions as __HttpHandlerOptions,
MetadataBearer as __MetadataBearer,
MiddlewareStack,
SerdeContext as __SerdeContext,
} from "@aws-sdk/types";
import { DirectoryServiceClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DirectoryServiceClient";
import { ListSchemaExtensionsRequest, ListSchemaExtensionsResult } from "../models/models_0";
import {
deserializeAws_json1_1ListSchemaExtensionsCommand,
serializeAws_json1_1ListSchemaExtensionsCommand,
} from "../protocols/Aws_json1_1";
export interface ListSchemaExtensionsCommandInput extends ListSchemaExtensionsRequest {}
export interface ListSchemaExtensionsCommandOutput extends ListSchemaExtensionsResult, __MetadataBearer {}
/**
* <p>Lists all schema extensions applied to a Microsoft AD Directory.</p>
* @example
* Use a bare-bones client and the command you need to make an API call.
* ```javascript
* import { DirectoryServiceClient, ListSchemaExtensionsCommand } from "@aws-sdk/client-directory-service"; // ES Modules import
* // const { DirectoryServiceClient, ListSchemaExtensionsCommand } = require("@aws-sdk/client-directory-service"); // CommonJS import
* const client = new DirectoryServiceClient(config);
* const command = new ListSchemaExtensionsCommand(input);
* const response = await client.send(command);
* ```
*
* @see {@link ListSchemaExtensionsCommandInput} for command's `input` shape.
* @see {@link ListSchemaExtensionsCommandOutput} for command's `response` shape.
* @see {@link DirectoryServiceClientResolvedConfig | config} for command's `input` shape.
*
*/
|
DirectoryServiceClientResolvedConfig
> {
// Start section: command_properties
// End section: command_properties
constructor(readonly input: ListSchemaExtensionsCommandInput) {
// Start section: command_constructor
super();
// End section: command_constructor
}
/**
* @internal
*/
resolveMiddleware(
clientStack: MiddlewareStack<ServiceInputTypes, ServiceOutputTypes>,
configuration: DirectoryServiceClientResolvedConfig,
options?: __HttpHandlerOptions
): Handler<ListSchemaExtensionsCommandInput, ListSchemaExtensionsCommandOutput> {
this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize));
const stack = clientStack.concat(this.middlewareStack);
const { logger } = configuration;
const clientName = "DirectoryServiceClient";
const commandName = "ListSchemaExtensionsCommand";
const handlerExecutionContext: HandlerExecutionContext = {
logger,
clientName,
commandName,
inputFilterSensitiveLog: ListSchemaExtensionsRequest.filterSensitiveLog,
outputFilterSensitiveLog: ListSchemaExtensionsResult.filterSensitiveLog,
};
const { requestHandler } = configuration;
return stack.resolve(
(request: FinalizeHandlerArguments<any>) =>
requestHandler.handle(request.request as __HttpRequest, options || {}),
handlerExecutionContext
);
}
private serialize(input: ListSchemaExtensionsCommandInput, context: __SerdeContext): Promise<__HttpRequest> {
return serializeAws_json1_1ListSchemaExtensionsCommand(input, context);
}
private deserialize(output: __HttpResponse, context: __SerdeContext): Promise<ListSchemaExtensionsCommandOutput> {
return deserializeAws_json1_1ListSchemaExtensionsCommand(output, context);
}
// Start section: command_body_extra
// End section: command_body_extra
}
|
export class ListSchemaExtensionsCommand extends $Command<
ListSchemaExtensionsCommandInput,
ListSchemaExtensionsCommandOutput,
|
test_integration.py
|
import json
import time
from collections import OrderedDict
from datetime import datetime
import requests_mock
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.models import AnonymousUser
from django.http import HttpRequest
from django.test.utils import override_settings
from django.urls import reverse
from fitbit.exceptions import HTTPConflict
from freezegun import freeze_time
from mock import patch
from requests.auth import _basic_auth_str
from fitapp import utils
from fitapp.decorators import fitbit_integration_warning
from fitapp.models import TimeSeriesDataType, UserFitbit
from fitapp.tasks import subscribe, unsubscribe
from .base import FitappTestBase
class TestIntegrationUtility(FitappTestBase):
def test_is_integrated(self):
"""Users with stored OAuth information are integrated."""
self.assertTrue(utils.is_integrated(self.user))
def test_is_not_integrated(self):
"""User is not integrated if we have no OAuth data for them"""
UserFitbit.objects.all().delete()
self.assertFalse(utils.is_integrated(self.user))
def test_unauthenticated(self):
"""User is not integrated if they aren't logged in."""
user = AnonymousUser()
self.assertFalse(utils.is_integrated(user))
class TestIntegrationDecorator(FitappTestBase):
def setUp(self):
super(TestIntegrationDecorator, self).setUp()
self.fake_request = HttpRequest()
self.fake_request.user = self.user
self.fake_view = lambda request: "hello"
self.messages = []
def _mock_decorator(self, msg=None):
def mock_error(request, message, *args, **kwargs):
self.messages.append(message)
with patch.object(messages, 'error', mock_error) as error:
return fitbit_integration_warning(msg=msg)(self.fake_view)(
self.fake_request)
def test_unauthenticated(self):
"""Message should be added if user is not logged in."""
self.fake_request.user = AnonymousUser()
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(
self.messages[0], utils.get_setting('FITAPP_DECORATOR_MESSAGE'))
def test_is_integrated(self):
"""Decorator should have no effect if user is integrated."""
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 0)
def test_is_not_integrated(self):
"""Message should be added if user is not integrated."""
UserFitbit.objects.all().delete()
results = self._mock_decorator()
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(
self.messages[0], utils.get_setting('FITAPP_DECORATOR_MESSAGE'))
def test_custom_msg(self):
"""Decorator should support a custom message string."""
UserFitbit.objects.all().delete()
msg = "customized"
results = self._mock_decorator(msg)
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(self.messages[0], "customized")
def test_custom_msg_func(self):
"""Decorator should support a custom message function."""
UserFitbit.objects.all().delete()
msg = lambda request: "message to {0}".format(request.user)
results = self._mock_decorator(msg)
self.assertEqual(results, "hello")
self.assertEqual(len(self.messages), 1)
self.assertEqual(self.messages[0], msg(self.fake_request))
class TestLoginView(FitappTestBase):
url_name = 'fitbit-login'
def test_get(self):
"""
Login view should generate a token_url and then
redirect to an authorization URL.
"""
response = self._mock_client()
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unauthenticated(self):
"""User must be logged in to access Login view."""
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unintegrated(self):
|
def test_next(self):
response = self._mock_client(get_kwargs={'next': '/next'})
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(self.client.session.get('fitbit_next', None), '/next')
self.assertEqual(UserFitbit.objects.count(), 1)
class TestCompleteView(FitappTestBase):
url_name = 'fitbit-complete'
user_id = 'userid'
token = {
'access_token': 'AccessToken123',
'refresh_token': 'RefreshToken123',
'expires_at': time.time() + 300,
'user_id': user_id
}
code = 'Code123'
def setUp(self):
super(TestCompleteView, self).setUp()
self.fbuser.delete()
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete(self, tsd_apply_async, sub_apply_async):
"""Complete view should fetch & store user's access credentials."""
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_once_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
tsdts = TimeSeriesDataType.objects.all()
self.assertEqual(tsd_apply_async.call_count, tsdts.count())
for i, _type in enumerate(tsdts):
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, _type.category, _type.resource,),
countdown=10 + (i * 5))
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
@override_settings(FITAPP_HISTORICAL_INIT_DELAY=11)
@override_settings(FITAPP_BETWEEN_DELAY=6)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_different_delays(self, tsd_apply_async, sub_apply_async):
"""Complete view should use configured delays"""
tsdts = TimeSeriesDataType.objects.all()
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
for i, _type in enumerate(tsdts):
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, _type.category, _type.resource,),
countdown=11 + (i * 6))
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_empty_subs(self, tsd_apply_async, sub_apply_async):
"""Complete view should not import data if subs dict is empty"""
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([('foods', [])]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_no_res(self, tsd_apply_async, sub_apply_async):
"""Complete view shouldn't import data if subs dict has no resources"""
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([
('foods', ['steps'])
]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_bad_resources(self, tsd_apply_async, sub_apply_async):
"""
Complete view shouldn't import data if subs dict has invalid resources
"""
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertContains(
response,
"['steps'] resources are invalid for the foods category",
status_code=500
)
self.assertEqual(tsd_apply_async.call_count, 0)
@override_settings(FITAPP_SUBSCRIPTIONS=OrderedDict([
('activities', ['steps', 'calories', 'distance', 'activityCalories']),
('foods', ['log/water']),
]))
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_sub_list(self, tsd_apply_async, sub_apply_async):
"""
Complete view should only import the listed subscriptions, in the right
order
"""
activities = TimeSeriesDataType.activities
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'steps',), countdown=10)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'calories',), countdown=15)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'distance',), countdown=20)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, activities, 'activityCalories'), countdown=25)
tsd_apply_async.assert_any_call(
(fbuser.fitbit_user, TimeSeriesDataType.foods, 'log/water',),
countdown=30)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_complete_already_integrated(self, tsd_apply_async, sub_apply_async):
"""
Complete view redirect to the error view if a user attempts to connect
an already integrated fitbit user to a second user.
"""
self.create_userfitbit(user=self.user, fitbit_user=self.user_id)
username = '{0}2'.format(self.username)
self.create_user(username=username, password=self.password)
self.client.logout()
self.client.login(username=username, password=self.password)
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.all().count(), 1)
self.assertEqual(sub_apply_async.call_count, 0)
self.assertEqual(tsd_apply_async.call_count, 0)
def test_unauthenticated(self):
"""User must be logged in to access Complete view."""
self.client.logout()
response = self._mock_client()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_next(self, tsd_apply_async, sub_apply_async):
"""
Complete view should redirect to session['fitbit_next'] if available.
"""
self._set_session_vars(fitbit_next='/test')
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, '/test')
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_once_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
self.assertEqual(
tsd_apply_async.call_count, TimeSeriesDataType.objects.count())
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.expires_at, self.token['expires_at'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
def test_access_error(self):
"""
Complete view should redirect to error if access token is
inaccessible.
"""
response = self._mock_client(client_kwargs={'error': Exception})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
def test_no_code(self):
"""
Complete view should redirect to error if `code` param is not
present.
"""
response = self._mock_client()
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
def test_no_access_token(self):
"""
Complete view should redirect to error if there isn't an access_token.
"""
token = self.token.copy()
token.pop('access_token')
response = self._mock_client(
client_kwargs=token, get_kwargs={'code': self.code})
self.assertRedirectsNoFollow(response, reverse('fitbit-error'))
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.subscribe.apply_async')
@patch('fitapp.tasks.get_time_series_data.apply_async')
def test_integrated(self, tsd_apply_async, sub_apply_async):
"""Complete view should overwrite existing credentials for this user.
"""
self.fbuser = self.create_userfitbit(user=self.user)
response = self._mock_client(
client_kwargs=self.token, get_kwargs={'code': self.code})
fbuser = UserFitbit.objects.get()
sub_apply_async.assert_called_with(
(fbuser.fitbit_user, settings.FITAPP_SUBSCRIBER_ID), countdown=5)
self.assertEqual(tsd_apply_async.call_count,
TimeSeriesDataType.objects.count())
self.assertEqual(fbuser.user, self.user)
self.assertEqual(fbuser.access_token, self.token['access_token'])
self.assertEqual(fbuser.refresh_token, self.token['refresh_token'])
self.assertEqual(fbuser.fitbit_user, self.user_id)
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
class TestErrorView(FitappTestBase):
url_name = 'fitbit-error'
def test_get(self):
"""Should be able to retrieve Error page."""
response = self._get()
self.assertEqual(response.status_code, 200)
def test_unauthenticated(self):
"""User must be logged in to access Error view."""
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
def test_unintegrated(self):
"""No Fitbit credentials required to access Error view."""
self.fbuser.delete()
response = self._get()
self.assertEqual(response.status_code, 200)
class TestLogoutView(FitappTestBase):
url_name = 'fitbit-logout'
@patch('fitapp.tasks.unsubscribe.apply_async')
def test_get(self, apply_async):
"""Logout view should remove associated UserFitbit and redirect."""
response = self._get()
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
apply_async.assert_called_once_with(kwargs=kwargs, countdown=5)
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(UserFitbit.objects.count(), 0)
@freeze_time(datetime.fromtimestamp(1483500000))
@patch('fitbit.Fitbit.subscription')
def test_get_token_expired(self, subscription):
subs_url = 'https://api.fitbit.com/1/user/-/apiSubscriptions.json'
self.fbuser.expires_at = 1483400000
self.fbuser.save()
sub = {
'ownerId': self.fbuser.fitbit_user,
'subscriberId': '1',
'subscriptionId': str(self.user.id),
'collectionType': 'user',
'ownerType': 'user'
}
subs = {'apiSubscriptions': [sub]}
tok = {
'access_token': 'fake_return_access_token',
'refresh_token': 'fake_return_refresh_token',
'expires_at': 1483600000,
}
with requests_mock.mock() as m:
m.get(subs_url, text=json.dumps(subs), status_code=200)
m.post('https://api.fitbit.com/oauth2/token', text=json.dumps(tok))
response = self._get()
mock_requests = m.request_history
assert mock_requests[0].path == '/oauth2/token'
assert mock_requests[0].headers['Authorization'] == _basic_auth_str(
settings.FITAPP_CONSUMER_KEY,
settings.FITAPP_CONSUMER_SECRET
)
assert mock_requests[1].path == '/1/user/-/apisubscriptions.json'
assert mock_requests[1].headers['Authorization'] == 'Bearer {}'.format(
tok['access_token']
)
subscription.assert_called_once_with(
sub['subscriptionId'], sub['subscriberId'], method="DELETE")
def test_unauthenticated(self):
"""User must be logged in to access Logout view."""
self.client.logout()
response = self._get()
self.assertEqual(response.status_code, 302)
self.assertEqual(UserFitbit.objects.count(), 1)
def test_unintegrated(self):
"""No Fitbit credentials required to access Logout view."""
self.fbuser.delete()
response = self._get()
self.assertRedirectsNoFollow(
response, utils.get_setting('FITAPP_LOGIN_REDIRECT'))
self.assertEqual(UserFitbit.objects.count(), 0)
@patch('fitapp.tasks.unsubscribe.apply_async')
def test_next(self, apply_async):
"""Logout view should redirect to GET['next'] if available."""
response = self._get(get_kwargs={'next': '/test'})
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
apply_async.assert_called_with(kwargs=kwargs, countdown=5)
self.assertRedirectsNoFollow(response, '/test')
self.assertEqual(UserFitbit.objects.count(), 0)
class TestSubscription(FitappTestBase):
@patch('fitbit.Fitbit.subscription')
def test_subscribe(self, subscription):
subscribe.apply_async((self.fbuser.fitbit_user, 1,))
subscription.assert_called_once_with(self.user.id, 1, )
@patch('fitbit.Fitbit.subscription')
def test_subscribe_error(self, subscription):
subscription.side_effect = HTTPConflict
apply_result = subscribe.apply_async((self.fbuser.fitbit_user, 1,))
self.assertEqual(apply_result.status, 'REJECTED')
subscription.assert_called_once_with(self.user.id, 1, )
@patch('fitbit.Fitbit.subscription')
@patch('fitbit.Fitbit.list_subscriptions')
def test_unsubscribe(self, list_subscriptions, subscription):
sub = {
'ownerId': self.fbuser.fitbit_user,
'subscriberId': '1',
'subscriptionId': str(self.user.id).encode('utf8'),
'collectionType': 'user',
'ownerType': 'user'
}
list_subscriptions.return_value = {'apiSubscriptions': [sub]}
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
unsubscribe.apply_async(kwargs=kwargs)
list_subscriptions.assert_called_once_with()
subscription.assert_called_once_with(
sub['subscriptionId'], sub['subscriberId'], method="DELETE")
@patch('fitbit.Fitbit.subscription')
@patch('fitbit.Fitbit.list_subscriptions')
def test_unsubscribe_error(self, list_subscriptions, subscription):
list_subscriptions.side_effect = HTTPConflict
kwargs = self.fbuser.get_user_data()
del kwargs['refresh_cb']
result = unsubscribe.apply_async(kwargs=kwargs)
self.assertEqual(result.status, 'REJECTED')
list_subscriptions.assert_called_once_with()
self.assertEqual(subscription.call_count, 0)
|
"""Fitbit credentials not required to access Login view."""
self.fbuser.delete()
response = self._mock_client()
self.assertRedirectsNoFollow(response, '/complete/')
self.assertEqual(UserFitbit.objects.count(), 0)
|
insert_image_preprocessing_op.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
from coremltools.converters.mil.mil.passes.pass_registry import register_pass
from coremltools.converters.mil.mil.passes.graph_pass import AbstractGraphPass
from coremltools.converters.mil.input_types import ImageType
# import mil internal ops to add it to the builder
from coremltools.converters.mil.mil.ops import defs as _ops
from coremltools.converters.mil.mil import Builder as mb
from coremltools.converters.mil.mil.types import nptype_from_builtin
import numpy as np
@register_pass(namespace="mil_backend")
class
|
(AbstractGraphPass):
"""
Insert preprocessing ops, right after the input if its of type Image
"""
def apply(self, prog):
for f_name, f in prog.functions.items():
if f_name == 'main':
_insert_image_preprocessing_ops(f, prog)
def _insert_image_preprocessing_ops(block, prog):
input_types = list(prog.main_input_types)
for input_type in input_types:
if isinstance(input_type, ImageType):
if input_type.name not in block.inputs:
continue
input_var = block.inputs[input_type.name]
placeholder_op = block.placeholder_inputs[input_type.name]
first_op = block.operations[0]
old_var = placeholder_op.outputs[0]
has_bias = np.any(np.array(input_type.bias) != 0)
with block:
last_output = input_var
input_nptype = nptype_from_builtin(type(last_output.dtype()))
if input_type.scale != 1:
last_output = mb.mul(x=last_output,
y=np.array(input_type.scale, dtype=input_nptype),
before_op=first_op, name=input_var.name + "__scaled__")
if has_bias:
if input_type.color_layout == "G":
last_output = mb.add(x=last_output,
y=np.array(input_type.bias, dtype=input_nptype),
before_op=first_op, name=input_var.name + "__biased__")
else:
if len(last_output.shape) == 3:
last_output = mb.add(x=last_output,
y=np.array(input_type.bias, dtype=input_nptype).reshape([3, 1, 1]),
before_op=first_op, name=input_var.name + "__biased__")
elif len(last_output.shape) == 4:
last_output = mb.add(x=last_output,
y=np.array(input_type.bias, dtype=input_nptype).reshape([1, 3, 1, 1]),
before_op=first_op, name=input_var.name + "__biased__")
else:
raise TypeError("Unsupported rank for image input type.")
if last_output != input_var:
block.replace_uses_of_var_after_op(anchor_op=last_output.op,
old_var=old_var,
new_var=last_output)
|
insert_image_preprocessing_ops
|
descriptor_pool.py
|
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides DescriptorPool to use as a container for proto2 descriptors.
The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
a collection of protocol buffer descriptors for use when dynamically creating
message types at runtime.
For most applications protocol buffers should be used via modules generated by
the protocol buffer compiler tool. This should only be used when the type of
protocol buffers used in an application or library cannot be predetermined.
Below is a straightforward example on how to use this class:
pool = DescriptorPool()
file_descriptor_protos = [ ... ]
for file_descriptor_proto in file_descriptor_protos:
pool.Add(file_descriptor_proto)
my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
The message descriptor can be used in conjunction with the message_factory
module in order to create a protocol buffer class that can be encoded and
decoded.
If you want to get a Python class for the specified proto, use the
helper functions inside google.protobuf.message_factory
directly instead of this class.
"""
__author__ = '[email protected] (Matt Toia)'
import collections
import warnings
from dis_sdk_python.dependency.google.protobuf import descriptor
from dis_sdk_python.dependency.google.protobuf import descriptor_database
from dis_sdk_python.dependency.google.protobuf import text_encoding
_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
def _NormalizeFullyQualifiedName(name):
"""Remove leading period from fully-qualified type name.
Due to b/13860351 in descriptor_database.py, types in the root namespace are
generated with a leading period. This function removes that prefix.
Args:
name: A str, the fully-qualified symbol name.
Returns:
A str, the normalized fully-qualified symbol name.
"""
return name.lstrip('.')
def _OptionsOrNone(descriptor_proto):
"""Returns the value of the field `options`, or None if it is not set."""
if descriptor_proto.HasField('options'):
return descriptor_proto.options
else:
return None
def _IsMessageSetExtension(field):
return (field.is_extension and
field.containing_type.has_options and
field.containing_type.GetOptions().message_set_wire_format and
field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
class DescriptorPool(object):
"""A collection of protobufs dynamically constructed by descriptor protos."""
if _USE_C_DESCRIPTORS:
def __new__(cls, descriptor_db=None):
# pylint: disable=protected-access
return descriptor._message.DescriptorPool(descriptor_db)
def __init__(self, descriptor_db=None):
"""Initializes a Pool of proto buffs.
The descriptor_db argument to the constructor is provided to allow
specialized file descriptor proto lookup code to be triggered on demand. An
example would be an implementation which will read and compile a file
specified in a call to FindFileByName() and not require the call to Add()
at all. Results from this database will be cached internally here as well.
Args:
descriptor_db: A secondary source of file descriptors.
"""
self._internal_db = descriptor_database.DescriptorDatabase()
self._descriptor_db = descriptor_db
self._descriptors = {}
self._enum_descriptors = {}
self._service_descriptors = {}
self._file_descriptors = {}
self._toplevel_extensions = {}
# TODO(jieluo): Remove _file_desc_by_toplevel_extension after
# maybe year 2020 for compatibility issue (with 3.4.1 only).
self._file_desc_by_toplevel_extension = {}
# We store extensions in two two-level mappings: The first key is the
# descriptor of the message being extended, the second key is the extension
# full name or its tag number.
self._extensions_by_name = collections.defaultdict(dict)
self._extensions_by_number = collections.defaultdict(dict)
def _CheckConflictRegister(self, desc):
"""Check if the descriptor name conflicts with another of the same name.
Args:
desc: Descriptor of a message, enum, service or extension.
"""
desc_name = desc.full_name
for register, descriptor_type in [
(self._descriptors, descriptor.Descriptor),
(self._enum_descriptors, descriptor.EnumDescriptor),
(self._service_descriptors, descriptor.ServiceDescriptor),
(self._toplevel_extensions, descriptor.FieldDescriptor)]:
if desc_name in register:
file_name = register[desc_name].file.name
if not isinstance(desc, descriptor_type) or (
file_name != desc.file.name):
warn_msg = ('Conflict register for file "' + desc.file.name +
'": ' + desc_name +
' is already defined in file "' +
file_name + '"')
warnings.warn(warn_msg, RuntimeWarning)
return
def Add(self, file_desc_proto):
"""Adds the FileDescriptorProto and its types to this pool.
Args:
file_desc_proto: The FileDescriptorProto to add.
"""
self._internal_db.Add(file_desc_proto)
def AddSerializedFile(self, serialized_file_desc_proto):
"""Adds the FileDescriptorProto and its types to this pool.
Args:
serialized_file_desc_proto: A bytes string, serialization of the
FileDescriptorProto to add.
"""
# pylint: disable=g-import-not-at-top
from dis_sdk_python.dependency.google.protobuf import descriptor_pb2
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
serialized_file_desc_proto)
self.Add(file_desc_proto)
def AddDescriptor(self, desc):
"""Adds a Descriptor to the pool, non-recursively.
If the Descriptor contains nested messages or enums, the caller must
explicitly register them. This method also registers the FileDescriptor
associated with the message.
Args:
desc: A Descriptor.
"""
if not isinstance(desc, descriptor.Descriptor):
raise TypeError('Expected instance of descriptor.Descriptor.')
self._CheckConflictRegister(desc)
self._descriptors[desc.full_name] = desc
self._AddFileDescriptor(desc.file)
def AddEnumDescriptor(self, enum_desc):
"""Adds an EnumDescriptor to the pool.
This method also registers the FileDescriptor associated with the enum.
Args:
enum_desc: An EnumDescriptor.
"""
if not isinstance(enum_desc, descriptor.EnumDescriptor):
raise TypeError('Expected instance of descriptor.EnumDescriptor.')
self._CheckConflictRegister(enum_desc)
self._enum_descriptors[enum_desc.full_name] = enum_desc
self._AddFileDescriptor(enum_desc.file)
def AddServiceDescriptor(self, service_desc):
"""Adds a ServiceDescriptor to the pool.
Args:
service_desc: A ServiceDescriptor.
"""
if not isinstance(service_desc, descriptor.ServiceDescriptor):
raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
self._CheckConflictRegister(service_desc)
self._service_descriptors[service_desc.full_name] = service_desc
def AddExtensionDescriptor(self, extension):
"""Adds a FieldDescriptor describing an extension to the pool.
Args:
extension: A FieldDescriptor.
Raises:
AssertionError: when another extension with the same number extends the
same message.
TypeError: when the specified extension is not a
descriptor.FieldDescriptor.
"""
if not (isinstance(extension, descriptor.FieldDescriptor) and
extension.is_extension):
raise TypeError('Expected an extension descriptor.')
if extension.extension_scope is None:
self._CheckConflictRegister(extension)
self._toplevel_extensions[extension.full_name] = extension
try:
existing_desc = self._extensions_by_number[
extension.containing_type][extension.number]
except KeyError:
pass
else:
if extension is not existing_desc:
raise AssertionError(
'Extensions "%s" and "%s" both try to extend message type "%s" '
'with field number %d.' %
(extension.full_name, existing_desc.full_name,
extension.containing_type.full_name, extension.number))
self._extensions_by_number[extension.containing_type][
extension.number] = extension
self._extensions_by_name[extension.containing_type][
extension.full_name] = extension
# Also register MessageSet extensions with the type name.
if _IsMessageSetExtension(extension):
self._extensions_by_name[extension.containing_type][
extension.message_type.full_name] = extension
def AddFileDescriptor(self, file_desc):
"""Adds a FileDescriptor to the pool, non-recursively.
If the FileDescriptor contains messages or enums, the caller must explicitly
register them.
Args:
file_desc: A FileDescriptor.
"""
self._AddFileDescriptor(file_desc)
# TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
# FieldDescriptor.file is added in code gen. Remove this solution after
# maybe 2020 for compatibility reason (with 3.4.1 only).
for extension in list(file_desc.extensions_by_name.values()):
self._file_desc_by_toplevel_extension[
extension.full_name] = file_desc
def _AddFileDescriptor(self, file_desc):
"""Adds a FileDescriptor to the pool, non-recursively.
If the FileDescriptor contains messages or enums, the caller must explicitly
register them.
Args:
file_desc: A FileDescriptor.
"""
if not isinstance(file_desc, descriptor.FileDescriptor):
raise TypeError('Expected instance of descriptor.FileDescriptor.')
self._file_descriptors[file_desc.name] = file_desc
def FindFileByName(self, file_name):
"""Gets a FileDescriptor by file name.
Args:
file_name: The path to the file to get a descriptor for.
Returns:
A FileDescriptor for the named file.
Raises:
KeyError: if the file cannot be found in the pool.
"""
try:
return self._file_descriptors[file_name]
except KeyError:
pass
try:
file_proto = self._internal_db.FindFileByName(file_name)
except KeyError as error:
if self._descriptor_db:
file_proto = self._descriptor_db.FindFileByName(file_name)
else:
raise error
if not file_proto:
raise KeyError('Cannot find a file named %s' % file_name)
return self._ConvertFileProtoToFileDescriptor(file_proto)
def FindFileContainingSymbol(self, symbol):
"""Gets the FileDescriptor for the file containing the specified symbol.
Args:
symbol: The name of the symbol to search for.
Returns:
A FileDescriptor that contains the specified symbol.
Raises:
KeyError: if the file cannot be found in the pool.
"""
symbol = _NormalizeFullyQualifiedName(symbol)
try:
return self._descriptors[symbol].file
except KeyError:
pass
try:
return self._enum_descriptors[symbol].file
except KeyError:
pass
try:
return self._service_descriptors[symbol].file
except KeyError:
pass
try:
return self._FindFileContainingSymbolInDb(symbol)
except KeyError:
pass
try:
return self._file_desc_by_toplevel_extension[symbol]
except KeyError:
pass
# Try nested extensions inside a message.
message_name, _, extension_name = symbol.rpartition('.')
try:
message = self.FindMessageTypeByName(message_name)
assert message.extensions_by_name[extension_name]
return message.file
except KeyError:
raise KeyError('Cannot find a file containing %s' % symbol)
def FindMessageTypeByName(self, full_name):
"""Loads the named descriptor from the pool.
Args:
full_name: The full name of the descriptor to load.
Returns:
The descriptor for the named type.
Raises:
KeyError: if the message cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._descriptors:
self._FindFileContainingSymbolInDb(full_name)
return self._descriptors[full_name]
def FindEnumTypeByName(self, full_name):
"""Loads the named enum descriptor from the pool.
Args:
full_name: The full name of the enum descriptor to load.
Returns:
The enum descriptor for the named type.
Raises:
KeyError: if the enum cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._enum_descriptors:
self._FindFileContainingSymbolInDb(full_name)
return self._enum_descriptors[full_name]
def FindFieldByName(self, full_name):
"""Loads the named field descriptor from the pool.
Args:
full_name: The full name of the field descriptor to load.
Returns:
The field descriptor for the named field.
Raises:
KeyError: if the field cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
message_name, _, field_name = full_name.rpartition('.')
message_descriptor = self.FindMessageTypeByName(message_name)
return message_descriptor.fields_by_name[field_name]
def FindOneofByName(self, full_name):
"""Loads the named oneof descriptor from the pool.
Args:
full_name: The full name of the oneof descriptor to load.
Returns:
The oneof descriptor for the named oneof.
Raises:
KeyError: if the oneof cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
message_name, _, oneof_name = full_name.rpartition('.')
message_descriptor = self.FindMessageTypeByName(message_name)
return message_descriptor.oneofs_by_name[oneof_name]
def FindExtensionByName(self, full_name):
"""Loads the named extension descriptor from the pool.
Args:
full_name: The full name of the extension descriptor to load.
Returns:
A FieldDescriptor, describing the named extension.
Raises:
KeyError: if the extension cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
try:
# The proto compiler does not give any link between the FileDescriptor
# and top-level extensions unless the FileDescriptorProto is added to
# the DescriptorDatabase, but this can impact memory usage.
# So we registered these extensions by name explicitly.
return self._toplevel_extensions[full_name]
except KeyError:
pass
message_name, _, extension_name = full_name.rpartition('.')
try:
# Most extensions are nested inside a message.
scope = self.FindMessageTypeByName(message_name)
except KeyError:
# Some extensions are defined at file scope.
scope = self._FindFileContainingSymbolInDb(full_name)
return scope.extensions_by_name[extension_name]
def FindExtensionByNumber(self, message_descriptor, number):
"""Gets the extension of the specified message with the specified number.
Extensions have to be registered to this pool by calling
AddExtensionDescriptor.
Args:
message_descriptor: descriptor of the extended message.
number: integer, number of the extension field.
Returns:
A FieldDescriptor describing the extension.
Raises:
KeyError: when no extension with the given number is known for the
specified message.
"""
return self._extensions_by_number[message_descriptor][number]
def FindAllExtensions(self, message_descriptor):
"""Gets all the known extension of a given message.
Extensions have to be registered to this pool by calling
AddExtensionDescriptor.
Args:
message_descriptor: descriptor of the extended message.
Returns:
A list of FieldDescriptor describing the extensions.
"""
return list(self._extensions_by_number[message_descriptor].values())
def FindServiceByName(self, full_name):
"""Loads the named service descriptor from the pool.
Args:
full_name: The full name of the service descriptor to load.
Returns:
The service descriptor for the named service.
Raises:
KeyError: if the service cannot be found in the pool.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._service_descriptors:
self._FindFileContainingSymbolInDb(full_name)
return self._service_descriptors[full_name]
def _FindFileContainingSymbolInDb(self, symbol):
"""Finds the file in descriptor DB containing the specified symbol.
Args:
symbol: The name of the symbol to search for.
Returns:
A FileDescriptor that contains the specified symbol.
Raises:
KeyError: if the file cannot be found in the descriptor database.
"""
try:
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
except KeyError as error:
if self._descriptor_db:
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
else:
raise error
if not file_proto:
raise KeyError('Cannot find a file containing %s' % symbol)
return self._ConvertFileProtoToFileDescriptor(file_proto)
def _ConvertFileProtoToFileDescriptor(self, file_proto):
"""Creates a FileDescriptor from a proto or returns a cached copy.
This method also has the side effect of loading all the symbols found in
the file into the appropriate dictionaries in the pool.
Args:
file_proto: The proto to convert.
Returns:
A FileDescriptor matching the passed in proto.
"""
if file_proto.name not in self._file_descriptors:
built_deps = list(self._GetDeps(file_proto.dependency))
direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
public_deps = [direct_deps[i] for i in file_proto.public_dependency]
file_descriptor = descriptor.FileDescriptor(
pool=self,
name=file_proto.name,
package=file_proto.package,
syntax=file_proto.syntax,
options=_OptionsOrNone(file_proto),
serialized_pb=file_proto.SerializeToString(),
dependencies=direct_deps,
public_dependencies=public_deps)
scope = {}
# This loop extracts all the message and enum types from all the
# dependencies of the file_proto. This is necessary to create the
# scope of available message types when defining the passed in
# file proto.
for dependency in built_deps:
scope.update(self._ExtractSymbols(
list(dependency.message_types_by_name.values())))
scope.update((_PrefixWithDot(enum.full_name), enum)
for enum in list(dependency.enum_types_by_name.values()))
for message_type in file_proto.message_type:
message_desc = self._ConvertMessageDescriptor(
message_type, file_proto.package, file_descriptor, scope,
file_proto.syntax)
file_descriptor.message_types_by_name[message_desc.name] = (
message_desc)
for enum_type in file_proto.enum_type:
file_descriptor.enum_types_by_name[enum_type.name] = (
self._ConvertEnumDescriptor(enum_type, file_proto.package,
file_descriptor, None, scope))
for index, extension_proto in enumerate(file_proto.extension):
extension_desc = self._MakeFieldDescriptor(
extension_proto, file_proto.package, index, file_descriptor,
is_extension=True)
extension_desc.containing_type = self._GetTypeFromScope(
file_descriptor.package, extension_proto.extendee, scope)
self._SetFieldType(extension_proto, extension_desc,
file_descriptor.package, scope)
file_descriptor.extensions_by_name[extension_desc.name] = (
extension_desc)
for desc_proto in file_proto.message_type:
self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
if file_proto.package:
desc_proto_prefix = _PrefixWithDot(file_proto.package)
else:
desc_proto_prefix = ''
for desc_proto in file_proto.message_type:
desc = self._GetTypeFromScope(
desc_proto_prefix, desc_proto.name, scope)
file_descriptor.message_types_by_name[desc_proto.name] = desc
for index, service_proto in enumerate(file_proto.service):
file_descriptor.services_by_name[service_proto.name] = (
self._MakeServiceDescriptor(service_proto, index, scope,
file_proto.package, file_descriptor))
self.Add(file_proto)
self._file_descriptors[file_proto.name] = file_descriptor
return self._file_descriptors[file_proto.name]
def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
scope=None, syntax=None):
"""Adds the proto to the pool in the specified package.
Args:
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
package: The package the proto should be located in.
file_desc: The file containing this message.
scope: Dict mapping short and full symbols to message and enum types.
syntax: string indicating syntax of the file ("proto2" or "proto3")
Returns:
The added descriptor.
"""
if package:
desc_name = '.'.join((package, desc_proto.name))
else:
desc_name = desc_proto.name
if file_desc is None:
file_name = None
else:
file_name = file_desc.name
if scope is None:
scope = {}
nested = [
self._ConvertMessageDescriptor(
nested, desc_name, file_desc, scope, syntax)
for nested in desc_proto.nested_type]
enums = [
self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope)
for enum in desc_proto.enum_type]
fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
for index, field in enumerate(desc_proto.field)]
extensions = [
self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
is_extension=True)
for index, extension in enumerate(desc_proto.extension)]
oneofs = [
descriptor.OneofDescriptor(desc.name, '.'.join((desc_name, desc.name)),
index, None, [], desc.options)
for index, desc in enumerate(desc_proto.oneof_decl)]
extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
if extension_ranges:
is_extendable = True
else:
is_extendable = False
desc = descriptor.Descriptor(
name=desc_proto.name,
full_name=desc_name,
filename=file_name,
containing_type=None,
fields=fields,
oneofs=oneofs,
nested_types=nested,
enum_types=enums,
extensions=extensions,
options=_OptionsOrNone(desc_proto),
is_extendable=is_extendable,
extension_ranges=extension_ranges,
file=file_desc,
serialized_start=None,
serialized_end=None,
syntax=syntax)
for nested in desc.nested_types:
nested.containing_type = desc
for enum in desc.enum_types:
enum.containing_type = desc
for field_index, field_desc in enumerate(desc_proto.field):
if field_desc.HasField('oneof_index'):
oneof_index = field_desc.oneof_index
oneofs[oneof_index].fields.append(fields[field_index])
fields[field_index].containing_oneof = oneofs[oneof_index]
scope[_PrefixWithDot(desc_name)] = desc
self._CheckConflictRegister(desc)
self._descriptors[desc_name] = desc
return desc
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
containing_type=None, scope=None):
"""Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
Args:
enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
package: Optional package name for the new message EnumDescriptor.
file_desc: The file containing the enum descriptor.
containing_type: The type containing this enum.
scope: Scope containing available types.
Returns:
The added descriptor
"""
if package:
enum_name = '.'.join((package, enum_proto.name))
else:
enum_name = enum_proto.name
if file_desc is None:
file_name = None
else:
file_name = file_desc.name
values = [self._MakeEnumValueDescriptor(value, index)
for index, value in enumerate(enum_proto.value)]
desc = descriptor.EnumDescriptor(name=enum_proto.name,
full_name=enum_name,
filename=file_name,
file=file_desc,
values=values,
containing_type=containing_type,
options=_OptionsOrNone(enum_proto))
scope['.%s' % enum_name] = desc
self._CheckConflictRegister(desc)
self._enum_descriptors[enum_name] = desc
return desc
def _MakeFieldDescriptor(self, field_proto, message_name, index,
file_desc, is_extension=False):
"""Creates a field descriptor from a FieldDescriptorProto.
For message and enum type fields, this method will do a look up
in the pool for the appropriate descriptor for that type. If it
is unavailable, it will fall back to the _source function to
create it. If this type is still unavailable, construction will
fail.
Args:
field_proto: The proto describing the field.
message_name: The name of the containing message.
index: Index of the field
file_desc: The file containing the field descriptor.
is_extension: Indication that this field is for an extension.
Returns:
An initialized FieldDescriptor object
"""
if message_name:
full_name = '.'.join((message_name, field_proto.name))
else:
full_name = field_proto.name
return descriptor.FieldDescriptor(
name=field_proto.name,
full_name=full_name,
index=index,
number=field_proto.number,
type=field_proto.type,
cpp_type=None,
message_type=None,
enum_type=None,
containing_type=None,
label=field_proto.label,
has_default_value=False,
default_value=None,
is_extension=is_extension,
extension_scope=None,
options=_OptionsOrNone(field_proto),
file=file_desc)
def _SetAllFieldTypes(self, package, desc_proto, scope):
"""Sets all the descriptor's fields's types.
This method also sets the containing types on any extensions.
Args:
package: The current package of desc_proto.
desc_proto: The message descriptor to update.
scope: Enclosing scope of available types.
"""
package = _PrefixWithDot(package)
main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
if package == '.':
nested_package = _PrefixWithDot(desc_proto.name)
else:
nested_package = '.'.join([package, desc_proto.name])
for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
self._SetFieldType(field_proto, field_desc, nested_package, scope)
for extension_proto, extension_desc in (
list(zip(desc_proto.extension, main_desc.extensions))):
extension_desc.containing_type = self._GetTypeFromScope(
nested_package, extension_proto.extendee, scope)
self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
for nested_type in desc_proto.nested_type:
self._SetAllFieldTypes(nested_package, nested_type, scope)
def _SetFieldType(self, field_proto, field_desc, package, scope):
"""Sets the field's type, cpp_type, message_type and enum_type.
Args:
field_proto: Data about the field in proto format.
field_desc: The descriptor to modiy.
package: The package the field's container is in.
scope: Enclosing scope of available types.
"""
if field_proto.type_name:
desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
else:
desc = None
if not field_proto.HasField('type'):
if isinstance(desc, descriptor.Descriptor):
field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
else:
field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
field_proto.type)
if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
field_desc.message_type = desc
if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
field_desc.enum_type = desc
if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
field_desc.has_default_value = False
field_desc.default_value = []
elif field_proto.HasField('default_value'):
field_desc.has_default_value = True
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
field_desc.default_value = float(field_proto.default_value)
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
field_desc.default_value = field_proto.default_value
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
field_desc.default_value = field_proto.default_value.lower() == 'true'
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
field_desc.default_value = field_desc.enum_type.values_by_name[
field_proto.default_value].number
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
field_desc.default_value = text_encoding.CUnescape(
field_proto.default_value)
else:
# All other types are of the "int" type.
field_desc.default_value = int(field_proto.default_value)
else:
field_desc.has_default_value = False
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
field_desc.default_value = 0.0
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
field_desc.default_value = ''
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
field_desc.default_value = False
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
field_desc.default_value = field_desc.enum_type.values[0].number
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
field_desc.default_value = b''
else:
# All other types are of the "int" type.
field_desc.default_value = 0
field_desc.type = field_proto.type
def _MakeEnumValueDescriptor(self, value_proto, index):
"""Creates a enum value descriptor object from a enum value proto.
Args:
value_proto: The proto describing the enum value.
index: The index of the enum value.
Returns:
An initialized EnumValueDescriptor object.
"""
return descriptor.EnumValueDescriptor(
name=value_proto.name,
index=index,
number=value_proto.number,
options=_OptionsOrNone(value_proto),
type=None)
def _MakeServiceDescriptor(self, service_proto, service_index, scope,
package, file_desc):
"""Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
Args:
service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
service_index: The index of the service in the File.
scope: Dict mapping short and full symbols to message and enum types.
package: Optional package name for the new message EnumDescriptor.
file_desc: The file containing the service descriptor.
Returns:
The added descriptor.
"""
if package:
service_name = '.'.join((package, service_proto.name))
else:
service_name = service_proto.name
methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
scope, index)
for index, method_proto in enumerate(service_proto.method)]
desc = descriptor.ServiceDescriptor(name=service_proto.name,
full_name=service_name,
index=service_index,
methods=methods,
options=_OptionsOrNone(service_proto),
file=file_desc)
self._CheckConflictRegister(desc)
self._service_descriptors[service_name] = desc
return desc
def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
index):
"""Creates a method descriptor from a MethodDescriptorProto.
Args:
method_proto: The proto describing the method.
service_name: The name of the containing service.
package: Optional package name to look up for types.
scope: Scope containing available types.
|
An initialized MethodDescriptor object.
"""
full_name = '.'.join((service_name, method_proto.name))
input_type = self._GetTypeFromScope(
package, method_proto.input_type, scope)
output_type = self._GetTypeFromScope(
package, method_proto.output_type, scope)
return descriptor.MethodDescriptor(name=method_proto.name,
full_name=full_name,
index=index,
containing_service=None,
input_type=input_type,
output_type=output_type,
options=_OptionsOrNone(method_proto))
def _ExtractSymbols(self, descriptors):
"""Pulls out all the symbols from descriptor protos.
Args:
descriptors: The messages to extract descriptors from.
Yields:
A two element tuple of the type name and descriptor object.
"""
for desc in descriptors:
yield (_PrefixWithDot(desc.full_name), desc)
for symbol in self._ExtractSymbols(desc.nested_types):
yield symbol
for enum in desc.enum_types:
yield (_PrefixWithDot(enum.full_name), enum)
def _GetDeps(self, dependencies):
"""Recursively finds dependencies for file protos.
Args:
dependencies: The names of the files being depended on.
Yields:
Each direct and indirect dependency.
"""
for dependency in dependencies:
dep_desc = self.FindFileByName(dependency)
yield dep_desc
for parent_dep in dep_desc.dependencies:
yield parent_dep
def _GetTypeFromScope(self, package, type_name, scope):
"""Finds a given type name in the current scope.
Args:
package: The package the proto should be located in.
type_name: The name of the type to be found in the scope.
scope: Dict mapping short and full symbols to message and enum types.
Returns:
The descriptor for the requested type.
"""
if type_name not in scope:
components = _PrefixWithDot(package).split('.')
while components:
possible_match = '.'.join(components + [type_name])
if possible_match in scope:
type_name = possible_match
break
else:
components.pop(-1)
return scope[type_name]
def _PrefixWithDot(name):
return name if name.startswith('.') else '.%s' % name
if _USE_C_DESCRIPTORS:
# TODO(amauryfa): This pool could be constructed from Python code, when we
# support a flag like 'use_cpp_generated_pool=True'.
# pylint: disable=protected-access
_DEFAULT = descriptor._message.default_pool
else:
_DEFAULT = DescriptorPool()
def Default():
return _DEFAULT
|
index: Index of the method in the service.
Returns:
|
coin_store.py
|
from typing import List, Optional
import aiosqlite
from shamrock.types.blockchain_format.coin import Coin
from shamrock.types.blockchain_format.sized_bytes import bytes32
from shamrock.types.coin_record import CoinRecord
from shamrock.types.full_block import FullBlock
from shamrock.util.db_wrapper import DBWrapper
from shamrock.util.ints import uint32, uint64
from shamrock.util.lru_cache import LRUCache
class CoinStore:
"""
This object handles CoinRecords in DB.
A cache is maintained for quicker access to recent coins.
"""
coin_record_db: aiosqlite.Connection
coin_record_cache: LRUCache
cache_size: uint32
db_wrapper: DBWrapper
@classmethod
async def create(cls, db_wrapper: DBWrapper, cache_size: uint32 = uint32(60000)):
self = cls()
self.cache_size = cache_size
self.db_wrapper = db_wrapper
self.coin_record_db = db_wrapper.db
await self.coin_record_db.execute("pragma journal_mode=wal")
await self.coin_record_db.execute("pragma synchronous=2")
await self.coin_record_db.execute(
(
"CREATE TABLE IF NOT EXISTS coin_record("
"coin_name text PRIMARY KEY,"
" confirmed_index bigint,"
" spent_index bigint,"
" spent int,"
" coinbase int,"
" puzzle_hash text,"
" coin_parent text,"
" amount blob,"
" timestamp bigint)"
)
)
# Useful for reorg lookups
await self.coin_record_db.execute(
"CREATE INDEX IF NOT EXISTS coin_confirmed_index on coin_record(confirmed_index)"
)
await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_spent_index on coin_record(spent_index)")
await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_spent on coin_record(spent)")
await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_puzzle_hash on coin_record(puzzle_hash)")
await self.coin_record_db.commit()
self.coin_record_cache = LRUCache(cache_size)
return self
async def
|
(self, block: FullBlock, tx_additions: List[Coin], tx_removals: List[bytes32]):
"""
Only called for blocks which are blocks (and thus have rewards and transactions)
"""
if block.is_transaction_block() is False:
return None
assert block.foliage_transaction_block is not None
for coin in tx_additions:
record: CoinRecord = CoinRecord(
coin,
block.height,
uint32(0),
False,
False,
block.foliage_transaction_block.timestamp,
)
await self._add_coin_record(record, False)
included_reward_coins = block.get_included_reward_coins()
if block.height == 0:
assert len(included_reward_coins) == 0
else:
assert len(included_reward_coins) >= 2
for coin in included_reward_coins:
reward_coin_r: CoinRecord = CoinRecord(
coin,
block.height,
uint32(0),
False,
True,
block.foliage_transaction_block.timestamp,
)
await self._add_coin_record(reward_coin_r, False)
total_amount_spent: int = 0
for coin_name in tx_removals:
total_amount_spent += await self._set_spent(coin_name, block.height)
# Sanity check, already checked in block_body_validation
assert sum([a.amount for a in tx_additions]) <= total_amount_spent
# Checks DB and DiffStores for CoinRecord with coin_name and returns it
async def get_coin_record(self, coin_name: bytes32) -> Optional[CoinRecord]:
cached = self.coin_record_cache.get(coin_name)
if cached is not None:
return cached
cursor = await self.coin_record_db.execute("SELECT * from coin_record WHERE coin_name=?", (coin_name.hex(),))
row = await cursor.fetchone()
await cursor.close()
if row is not None:
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
record = CoinRecord(coin, row[1], row[2], row[3], row[4], row[8])
self.coin_record_cache.put(record.coin.name(), record)
return record
return None
async def get_coins_added_at_height(self, height: uint32) -> List[CoinRecord]:
cursor = await self.coin_record_db.execute("SELECT * from coin_record WHERE confirmed_index=?", (height,))
rows = await cursor.fetchall()
await cursor.close()
coins = []
for row in rows:
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
coins.append(CoinRecord(coin, row[1], row[2], row[3], row[4], row[8]))
return coins
async def get_coins_removed_at_height(self, height: uint32) -> List[CoinRecord]:
cursor = await self.coin_record_db.execute("SELECT * from coin_record WHERE spent_index=?", (height,))
rows = await cursor.fetchall()
await cursor.close()
coins = []
for row in rows:
spent: bool = bool(row[3])
if spent:
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
coin_record = CoinRecord(coin, row[1], row[2], spent, row[4], row[8])
coins.append(coin_record)
return coins
# Checks DB and DiffStores for CoinRecords with puzzle_hash and returns them
async def get_coin_records_by_puzzle_hash(
self,
include_spent_coins: bool,
puzzle_hash: bytes32,
start_height: uint32 = uint32(0),
end_height: uint32 = uint32((2 ** 32) - 1),
) -> List[CoinRecord]:
coins = set()
cursor = await self.coin_record_db.execute(
f"SELECT * from coin_record WHERE puzzle_hash=? AND confirmed_index>=? AND confirmed_index<? "
f"{'' if include_spent_coins else 'AND spent=0'}",
(puzzle_hash.hex(), start_height, end_height),
)
rows = await cursor.fetchall()
await cursor.close()
for row in rows:
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
coins.add(CoinRecord(coin, row[1], row[2], row[3], row[4], row[8]))
return list(coins)
async def get_coin_records_by_puzzle_hashes(
self,
include_spent_coins: bool,
puzzle_hashes: List[bytes32],
start_height: uint32 = uint32(0),
end_height: uint32 = uint32((2 ** 32) - 1),
) -> List[CoinRecord]:
if len(puzzle_hashes) == 0:
return []
coins = set()
puzzle_hashes_db = tuple([ph.hex() for ph in puzzle_hashes])
cursor = await self.coin_record_db.execute(
f'SELECT * from coin_record WHERE puzzle_hash in ({"?," * (len(puzzle_hashes_db) - 1)}?) '
f"AND confirmed_index>=? AND confirmed_index<? "
f"{'' if include_spent_coins else 'AND spent=0'}",
puzzle_hashes_db + (start_height, end_height),
)
rows = await cursor.fetchall()
await cursor.close()
for row in rows:
coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7]))
coins.add(CoinRecord(coin, row[1], row[2], row[3], row[4], row[8]))
return list(coins)
async def rollback_to_block(self, block_index: int):
"""
Note that block_index can be negative, in which case everything is rolled back
"""
# Update memory cache
delete_queue: bytes32 = []
for coin_name, coin_record in list(self.coin_record_cache.cache.items()):
if int(coin_record.spent_block_index) > block_index:
new_record = CoinRecord(
coin_record.coin,
coin_record.confirmed_block_index,
uint32(0),
False,
coin_record.coinbase,
coin_record.timestamp,
)
self.coin_record_cache.put(coin_record.coin.name(), new_record)
if int(coin_record.confirmed_block_index) > block_index:
delete_queue.append(coin_name)
for coin_name in delete_queue:
self.coin_record_cache.remove(coin_name)
# Delete from storage
c1 = await self.coin_record_db.execute("DELETE FROM coin_record WHERE confirmed_index>?", (block_index,))
await c1.close()
c2 = await self.coin_record_db.execute(
"UPDATE coin_record SET spent_index = 0, spent = 0 WHERE spent_index>?",
(block_index,),
)
await c2.close()
# Store CoinRecord in DB and ram cache
async def _add_coin_record(self, record: CoinRecord, allow_replace: bool) -> None:
if self.coin_record_cache.get(record.coin.name()) is not None:
self.coin_record_cache.remove(record.coin.name())
cursor = await self.coin_record_db.execute(
f"INSERT {'OR REPLACE ' if allow_replace else ''}INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)",
(
record.coin.name().hex(),
record.confirmed_block_index,
record.spent_block_index,
int(record.spent),
int(record.coinbase),
str(record.coin.puzzle_hash.hex()),
str(record.coin.parent_coin_info.hex()),
bytes(record.coin.amount),
record.timestamp,
),
)
await cursor.close()
# Update coin_record to be spent in DB
async def _set_spent(self, coin_name: bytes32, index: uint32) -> uint64:
current: Optional[CoinRecord] = await self.get_coin_record(coin_name)
if current is None:
raise ValueError(f"Cannot spend a coin that does not exist in db: {coin_name}")
assert not current.spent # Redundant sanity check, already checked in block_body_validation
spent: CoinRecord = CoinRecord(
current.coin,
current.confirmed_block_index,
index,
True,
current.coinbase,
current.timestamp,
) # type: ignore # noqa
await self._add_coin_record(spent, True)
return current.coin.amount
|
new_block
|
events.ts
|
import { EventModelApi } from '@alice/alice-common/models/alice-model-engine';
import { Location, SpellTrace } from '@alice/sr2020-common/models/location.model';
import * as uuid from 'uuid';
import { duration } from 'moment';
const MAX_SPELL_TRACES = 100;
export function recordSpellTrace(api: EventModelApi<Location>, data: SpellTrace) {
if (api.model.spellTraces.length > MAX_SPELL_TRACES) api.model.spellTraces.shift();
api.model.spellTraces.push(data);
}
export function shiftSpellTraces(api: EventModelApi<Location>, data: { shiftTimeSeconds: number; maxLookupSeconds: number }) {
|
})
.sort((t1, t2) => t1.timestamp - t2.timestamp);
}
export function brasiliaEffect(api: EventModelApi<Location>, data: { durationMinutes: number }) {
for (let i = 1; i <= data.durationMinutes; ++i) {
api.setTimer(uuid.v4(), 'Сдвиг заклинаний в прошлое', duration(i, 'minutes'), shiftSpellTraces, {
maxLookupSeconds: 600,
shiftTimeSeconds: 300,
});
}
}
|
api.model.spellTraces = api.model.spellTraces
.map((trace) => {
if (trace.timestamp >= api.model.timestamp - data.maxLookupSeconds * 1000) trace.timestamp -= data.shiftTimeSeconds * 1000;
return trace;
|
io_wrapper.py
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions that wrap both gfile and gcs.
This module is *not* intended to be a general-purpose IO wrapper library; it
only implements the operations that are necessary for loading event files. The
functions either dispatch to the gcs library or to gfile, depending on whether
the path is a GCS 'pseudo-path' (i.e., it satisfies gcs.IsGCSPath) or not.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.platform import gfile
from tensorflow.python.summary.impl import event_file_loader
from tensorflow.python.summary.impl import gcs
from tensorflow.python.summary.impl import gcs_file_loader
def CreateFileLoader(path):
"""Creates a file loader for the given path.
Args:
path: A string representing either a normal path or a GCS
Returns:
An object with a Load() method that yields event_pb2.Event protos.
"""
if gcs.IsGCSPath(path):
return gcs_file_loader.GCSFileLoader(path)
else:
return event_file_loader.EventFileLoader(path)
def ListDirectoryAbsolute(directory):
"""Yields all files in the given directory. The paths are absolute."""
if gcs.IsGCSPath(directory):
return gcs.ListDirectory(directory)
else:
return (os.path.join(directory, path)
for path in gfile.ListDirectory(directory))
def ListRecursively(top):
"""Walks a directory tree, yielding (dir_path, file_paths) tuples.
For each of `top` and its subdirectories, yields a tuple containing the path
to the directory and the path to each of the contained files. Note that
unlike os.Walk()/gfile.Walk(), this does not list subdirectories and the file
paths are all absolute.
If the directory does not exist, this yields nothing.
Args:
top: A path to a directory..
Yields:
A list of (dir_path, file_paths) tuples.
"""
if gcs.IsGCSPath(top):
for x in gcs.ListRecursively(top):
yield x
else:
for dir_path, _, filenames in gfile.Walk(top):
yield (dir_path, (os.path.join(dir_path, filename)
for filename in filenames))
|
if gcs.IsGCSPath(path):
return gcs.IsDirectory(path)
else:
return gfile.IsDirectory(path)
def Exists(path):
if gcs.IsGCSPath(path):
return gcs.Exists(path)
else:
return gfile.Exists(path)
def Size(path):
"""Returns the number of bytes in the given file. Doesn't work on GCS."""
if gcs.IsGCSPath(path):
raise NotImplementedError("io_wrapper.Size doesn't support GCS paths")
else:
return gfile.Open(path).size()
|
def IsDirectory(path):
"""Returns true if path exists and is a directory."""
|
go_bench.go
|
package main
import (
"os"
"strconv"
)
func main()
|
{
count, _ := strconv.Atoi(os.Args[1])
for i := 0; i < count; i++ {
// do nothing
}
}
|
|
index.d.ts
|
export declare class
|
{
private worker;
constructor();
doWork(n: number): Promise<string>;
end(): void;
}
|
PackageWorkerAPI
|
router.d.ts
|
/**
* @license Angular v12.0.3
* (c) 2010-2021 Google LLC. https://angular.io/
* License: MIT
*/
import { AfterContentInit } from '@angular/core';
import { ChangeDetectorRef } from '@angular/core';
import { Compiler } from '@angular/core';
import { ComponentFactoryResolver } from '@angular/core';
import { ComponentRef } from '@angular/core';
import { ElementRef } from '@angular/core';
import { EventEmitter } from '@angular/core';
import { HashLocationStrategy } from '@angular/common';
import { InjectionToken } from '@angular/core';
import { Injector } from '@angular/core';
import { Location } from '@angular/common';
import { LocationStrategy } from '@angular/common';
import { ModuleWithProviders } from '@angular/core';
import { NgModuleFactory } from '@angular/core';
import { NgModuleFactoryLoader } from '@angular/core';
import { NgProbeToken } from '@angular/core';
import { Observable } from 'rxjs';
import { OnChanges } from '@angular/core';
import { OnDestroy } from '@angular/core';
import { OnInit } from '@angular/core';
import { PathLocationStrategy } from '@angular/common';
import { PlatformLocation } from '@angular/common';
import { Provider } from '@angular/core';
import { QueryList } from '@angular/core';
import { Renderer2 } from '@angular/core';
import { SimpleChanges } from '@angular/core';
import { Type } from '@angular/core';
import { Version } from '@angular/core';
import { ViewContainerRef } from '@angular/core';
import { ViewportScroller } from '@angular/common';
/**
* Provides access to information about a route associated with a component
* that is loaded in an outlet.
* Use to traverse the `RouterState` tree and extract information from nodes.
*
* The following example shows how to construct a component using information from a
* currently activated route.
*
* Note: the observables in this class only emit when the current and previous values differ based
* on shallow equality. For example, changing deeply nested properties in resolved `data` will not
* cause the `ActivatedRoute.data` `Observable` to emit a new value.
*
* {@example router/activated-route/module.ts region="activated-route"
* header="activated-route.component.ts"}
*
* @see [Getting route information](guide/router#getting-route-information)
*
* @publicApi
*/
import * as ɵngcc0 from '@angular/core';
export declare class ActivatedRoute {
/** An observable of the URL segments matched by this route. */
url: Observable<UrlSegment[]>;
/** An observable of the matrix parameters scoped to this route. */
params: Observable<Params>;
/** An observable of the query parameters shared by all the routes. */
queryParams: Observable<Params>;
/** An observable of the URL fragment shared by all the routes. */
fragment: Observable<string | null>;
/** An observable of the static and resolved data of this route. */
data: Observable<Data>;
/** The outlet name of the route, a constant. */
outlet: string;
/** The component of the route, a constant. */
component: Type<any> | string | null;
/** The current snapshot of this route */
snapshot: ActivatedRouteSnapshot;
/** The configuration used to match this route. */
get routeConfig(): Route | null;
/** The root of the router state. */
get root(): ActivatedRoute;
/** The parent of this route in the router state tree. */
get parent(): ActivatedRoute | null;
/** The first child of this route in the router state tree. */
get firstChild(): ActivatedRoute | null;
/** The children of this route in the router state tree. */
get children(): ActivatedRoute[];
/** The path from the root of the router state tree to this route. */
get pathFromRoot(): ActivatedRoute[];
/**
* An Observable that contains a map of the required and optional parameters
* specific to the route.
* The map supports retrieving single and multiple values from the same parameter.
*/
get paramMap(): Observable<ParamMap>;
/**
* An Observable that contains a map of the query parameters available to all routes.
* The map supports retrieving single and multiple values from the query parameter.
*/
get queryParamMap(): Observable<ParamMap>;
toString(): string;
}
/**
* @description
*
* Contains the information about a route associated with a component loaded in an
* outlet at a particular moment in time. ActivatedRouteSnapshot can also be used to
* traverse the router state tree.
*
* The following example initializes a component with route information extracted
* from the snapshot of the root node at the time of creation.
*
* ```
* @Component({templateUrl:'./my-component.html'})
* class MyComponent {
* constructor(route: ActivatedRoute) {
* const id: string = route.snapshot.params.id;
* const url: string = route.snapshot.url.join('');
* const user = route.snapshot.data.user;
* }
* }
* ```
*
* @publicApi
*/
export declare class ActivatedRouteSnapshot {
/** The URL segments matched by this route */
url: UrlSegment[];
/**
* The matrix parameters scoped to this route.
*
* You can compute all params (or data) in the router state or to get params outside
* of an activated component by traversing the `RouterState` tree as in the following
* example:
* ```
* collectRouteParams(router: Router) {
* let params = {};
* let stack: ActivatedRouteSnapshot[] = [router.routerState.snapshot.root];
* while (stack.length > 0) {
* const route = stack.pop()!;
* params = {...params, ...route.params};
* stack.push(...route.children);
* }
* return params;
* }
* ```
*/
params: Params;
/** The query parameters shared by all the routes */
queryParams: Params;
/** The URL fragment shared by all the routes */
fragment: string | null;
/** The static and resolved data of this route */
data: Data;
/** The outlet name of the route */
outlet: string;
/** The component of the route */
component: Type<any> | string | null;
/** The configuration used to match this route **/
readonly routeConfig: Route | null;
/** The root of the router state */
get root(): ActivatedRouteSnapshot;
/** The parent of this route in the router state tree */
get parent(): ActivatedRouteSnapshot | null;
/** The first child of this route in the router state tree */
get firstChild(): ActivatedRouteSnapshot | null;
/** The children of this route in the router state tree */
get children(): ActivatedRouteSnapshot[];
/** The path from the root of the router state tree to this route */
get pathFromRoot(): ActivatedRouteSnapshot[];
get paramMap(): ParamMap;
get queryParamMap(): ParamMap;
toString(): string;
}
/**
* An event triggered at the end of the activation part
* of the Resolve phase of routing.
* @see `ActivationStart`
* @see `ResolveStart`
*
* @publicApi
*/
export declare class ActivationEnd {
/** @docsNotRequired */
snapshot: ActivatedRouteSnapshot;
constructor(
/** @docsNotRequired */
snapshot: ActivatedRouteSnapshot);
toString(): string;
}
/**
* An event triggered at the start of the activation part
* of the Resolve phase of routing.
* @see `ActivationEnd`
* @see `ResolveStart`
*
* @publicApi
*/
export declare class ActivationStart {
/** @docsNotRequired */
snapshot: ActivatedRouteSnapshot;
constructor(
/** @docsNotRequired */
snapshot: ActivatedRouteSnapshot);
toString(): string;
}
/**
* @description
*
* This base route reuse strategy only reuses routes when the matched router configs are
* identical. This prevents components from being destroyed and recreated
* when just the fragment or query parameters change
* (that is, the existing component is _reused_).
*
* This strategy does not store any routes for later reuse.
*
* Angular uses this strategy by default.
*
*
* It can be used as a base class for custom route reuse strategies, i.e. you can create your own
* class that extends the `BaseRouteReuseStrategy` one.
* @publicApi
*/
export declare abstract class BaseRouteReuseStrategy implements RouteReuseStrategy {
/**
* Whether the given route should detach for later reuse.
* Always returns false for `BaseRouteReuseStrategy`.
* */
shouldDetach(route: ActivatedRouteSnapshot): boolean;
/**
* A no-op; the route is never stored since this strategy never detaches routes for later re-use.
*/
store(route: ActivatedRouteSnapshot, detachedTree: DetachedRouteHandle): void;
/** Returns `false`, meaning the route (and its subtree) is never reattached */
shouldAttach(route: ActivatedRouteSnapshot): boolean;
/** Returns `null` because this strategy does not store routes for later re-use. */
retrieve(route: ActivatedRouteSnapshot): DetachedRouteHandle | null;
/**
* Determines if a route should be reused.
* This strategy returns `true` when the future route config and current route config are
* identical.
*/
shouldReuseRoute(future: ActivatedRouteSnapshot, curr: ActivatedRouteSnapshot): boolean;
}
/**
* @description
*
* Interface that a class can implement to be a guard deciding if a route can be activated.
* If all guards return `true`, navigation continues. If any guard returns `false`,
* navigation is cancelled. If any guard returns a `UrlTree`, the current navigation
* is cancelled and a new navigation begins to the `UrlTree` returned from the guard.
*
* The following example implements a `CanActivate` function that checks whether the
* current user has permission to activate the requested route.
*
* ```
* class UserToken {}
* class Permissions {
* canActivate(user: UserToken, id: string): boolean {
* return true;
* }
* }
*
* @Injectable()
* class CanActivateTeam implements CanActivate {
* constructor(private permissions: Permissions, private currentUser: UserToken) {}
*
* canActivate(
* route: ActivatedRouteSnapshot,
* state: RouterStateSnapshot
* ): Observable<boolean|UrlTree>|Promise<boolean|UrlTree>|boolean|UrlTree {
* return this.permissions.canActivate(this.currentUser, route.params.id);
* }
* }
* ```
*
* Here, the defined guard function is provided as part of the `Route` object
* in the router configuration:
*
* ```
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'team/:id',
* component: TeamComponent,
* canActivate: [CanActivateTeam]
* }
* ])
* ],
* providers: [CanActivateTeam, UserToken, Permissions]
* })
* class AppModule {}
* ```
*
* You can alternatively provide an in-line function with the `canActivate` signature:
*
* ```
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'team/:id',
* component: TeamComponent,
* canActivate: ['canActivateTeam']
* }
* ])
* ],
* providers: [
* {
* provide: 'canActivateTeam',
* useValue: (route: ActivatedRouteSnapshot, state: RouterStateSnapshot) => true
* }
* ]
* })
* class AppModule {}
* ```
*
* @publicApi
*/
export declare interface CanActivate {
canActivate(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<boolean | UrlTree> | Promise<boolean | UrlTree> | boolean | UrlTree;
}
/**
* @description
*
* Interface that a class can implement to be a guard deciding if a child route can be activated.
* If all guards return `true`, navigation continues. If any guard returns `false`,
* navigation is cancelled. If any guard returns a `UrlTree`, current navigation
* is cancelled and a new navigation begins to the `UrlTree` returned from the guard.
*
* The following example implements a `CanActivateChild` function that checks whether the
* current user has permission to activate the requested child route.
*
* ```
* class UserToken {}
* class Permissions {
* canActivate(user: UserToken, id: string): boolean {
* return true;
* }
* }
*
* @Injectable()
* class CanActivateTeam implements CanActivateChild {
* constructor(private permissions: Permissions, private currentUser: UserToken) {}
*
* canActivateChild(
* route: ActivatedRouteSnapshot,
* state: RouterStateSnapshot
* ): Observable<boolean|UrlTree>|Promise<boolean|UrlTree>|boolean|UrlTree {
* return this.permissions.canActivate(this.currentUser, route.params.id);
* }
* }
* ```
*
* Here, the defined guard function is provided as part of the `Route` object
* in the router configuration:
*
* ```
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'root',
* canActivateChild: [CanActivateTeam],
* children: [
* {
* path: 'team/:id',
* component: TeamComponent
* }
* ]
* }
* ])
* ],
* providers: [CanActivateTeam, UserToken, Permissions]
* })
* class AppModule {}
* ```
*
* You can alternatively provide an in-line function with the `canActivateChild` signature:
*
* ```
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'root',
* canActivateChild: ['canActivateTeam'],
* children: [
* {
* path: 'team/:id',
* component: TeamComponent
* }
* ]
* }
* ])
* ],
* providers: [
* {
* provide: 'canActivateTeam',
* useValue: (route: ActivatedRouteSnapshot, state: RouterStateSnapshot) => true
* }
* ]
* })
* class AppModule {}
* ```
*
* @publicApi
*/
export declare interface CanActivateChild {
canActivateChild(childRoute: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<boolean | UrlTree> | Promise<boolean | UrlTree> | boolean | UrlTree;
}
/**
* @description
*
* Interface that a class can implement to be a guard deciding if a route can be deactivated.
* If all guards return `true`, navigation continues. If any guard returns `false`,
* navigation is cancelled. If any guard returns a `UrlTree`, current navigation
* is cancelled and a new navigation begins to the `UrlTree` returned from the guard.
*
* The following example implements a `CanDeactivate` function that checks whether the
* current user has permission to deactivate the requested route.
*
* ```
* class UserToken {}
* class Permissions {
* canDeactivate(user: UserToken, id: string): boolean {
* return true;
* }
* }
* ```
*
* Here, the defined guard function is provided as part of the `Route` object
* in the router configuration:
*
* ```
*
* @Injectable()
* class CanDeactivateTeam implements CanDeactivate<TeamComponent> {
* constructor(private permissions: Permissions, private currentUser: UserToken) {}
*
* canDeactivate(
* component: TeamComponent,
* currentRoute: ActivatedRouteSnapshot,
* currentState: RouterStateSnapshot,
* nextState: RouterStateSnapshot
* ): Observable<boolean|UrlTree>|Promise<boolean|UrlTree>|boolean|UrlTree {
* return this.permissions.canDeactivate(this.currentUser, route.params.id);
* }
* }
*
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'team/:id',
* component: TeamComponent,
* canDeactivate: [CanDeactivateTeam]
* }
* ])
* ],
* providers: [CanDeactivateTeam, UserToken, Permissions]
* })
* class AppModule {}
* ```
*
* You can alternatively provide an in-line function with the `canDeactivate` signature:
*
* ```
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'team/:id',
* component: TeamComponent,
* canDeactivate: ['canDeactivateTeam']
* }
* ])
* ],
* providers: [
* {
* provide: 'canDeactivateTeam',
* useValue: (component: TeamComponent, currentRoute: ActivatedRouteSnapshot, currentState:
* RouterStateSnapshot, nextState: RouterStateSnapshot) => true
* }
* ]
* })
* class AppModule {}
* ```
*
* @publicApi
*/
export declare interface CanDeactivate<T> {
canDeactivate(component: T, currentRoute: ActivatedRouteSnapshot, currentState: RouterStateSnapshot, nextState?: RouterStateSnapshot): Observable<boolean | UrlTree> | Promise<boolean | UrlTree> | boolean | UrlTree;
}
/**
* @description
*
* Interface that a class can implement to be a guard deciding if children can be loaded.
* If all guards return `true`, navigation continues. If any guard returns `false`,
* navigation is cancelled. If any guard returns a `UrlTree`, current navigation
* is cancelled and a new navigation starts to the `UrlTree` returned from the guard.
*
* The following example implements a `CanLoad` function that decides whether the
* current user has permission to load requested child routes.
*
*
* ```
* class UserToken {}
* class Permissions {
* canLoadChildren(user: UserToken, id: string, segments: UrlSegment[]): boolean {
* return true;
* }
* }
*
* @Injectable()
* class CanLoadTeamSection implements CanLoad {
* constructor(private permissions: Permissions, private currentUser: UserToken) {}
*
* canLoad(route: Route, segments: UrlSegment[]): Observable<boolean>|Promise<boolean>|boolean {
* return this.permissions.canLoadChildren(this.currentUser, route, segments);
* }
* }
* ```
*
* Here, the defined guard function is provided as part of the `Route` object
* in the router configuration:
*
* ```
*
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'team/:id',
* component: TeamComponent,
* loadChildren: 'team.js',
* canLoad: [CanLoadTeamSection]
* }
* ])
* ],
* providers: [CanLoadTeamSection, UserToken, Permissions]
* })
* class AppModule {}
* ```
*
* You can alternatively provide an in-line function with the `canLoad` signature:
*
* ```
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'team/:id',
* component: TeamComponent,
* loadChildren: 'team.js',
* canLoad: ['canLoadTeamSection']
* }
* ])
* ],
* providers: [
* {
* provide: 'canLoadTeamSection',
* useValue: (route: Route, segments: UrlSegment[]) => true
* }
* ]
* })
* class AppModule {}
* ```
*
* @publicApi
*/
export declare interface CanLoad {
canLoad(route: Route, segments: UrlSegment[]): Observable<boolean | UrlTree> | Promise<boolean | UrlTree> | boolean | UrlTree;
}
/**
* An event triggered at the end of the child-activation part
* of the Resolve phase of routing.
* @see `ChildActivationStart`
* @see `ResolveStart`
* @publicApi
*/
export declare class ChildActivationEnd {
/** @docsNotRequired */
snapshot: ActivatedRouteSnapshot;
constructor(
/** @docsNotRequired */
snapshot: ActivatedRouteSnapshot);
toString(): string;
}
/**
* An event triggered at the start of the child-activation
* part of the Resolve phase of routing.
* @see `ChildActivationEnd`
* @see `ResolveStart`
*
* @publicApi
*/
export declare class ChildActivationStart {
/** @docsNotRequired */
snapshot: ActivatedRouteSnapshot;
constructor(
/** @docsNotRequired */
snapshot: ActivatedRouteSnapshot);
toString(): string;
}
/**
* Store contextual information about the children (= nested) `RouterOutlet`
*
* @publicApi
*/
export declare class ChildrenOutletContexts {
private contexts;
/** Called when a `RouterOutlet` directive is instantiated */
onChildOutletCreated(childName: string, outlet: RouterOutletContract): void;
/**
* Called when a `RouterOutlet` directive is destroyed.
* We need to keep the context as the outlet could be destroyed inside a NgIf and might be
* re-created later.
*/
onChildOutletDestroyed(childName: string): void;
/**
* Called when the corresponding route is deactivated during navigation.
* Because the component get destroyed, all children outlet are destroyed.
*/
onOutletDeactivated(): Map<string, OutletContext>;
onOutletReAttached(contexts: Map<string, OutletContext>): void;
getOrCreateContext(childName: string): OutletContext;
getContext(childName: string): OutletContext | null;
}
/**
* Converts a `Params` instance to a `ParamMap`.
* @param params The instance to convert.
* @returns The new map instance.
*
* @publicApi
*/
export declare function convertToParamMap(params: Params): ParamMap;
/**
*
* Represents static data associated with a particular route.
*
* @see `Route#data`
*
* @publicApi
*/
export declare type Data = {
[name: string]: any;
};
/**
* @description
*
* A default implementation of the `UrlSerializer`.
*
* Example URLs:
*
* ```
* /inbox/33(popup:compose)
* /inbox/33;open=true/messages/44
* ```
*
* DefaultUrlSerializer uses parentheses to serialize secondary segments (e.g., popup:compose), the
* colon syntax to specify the outlet, and the ';parameter=value' syntax (e.g., open=true) to
* specify route specific parameters.
*
* @publicApi
*/
export declare class DefaultUrlSerializer implements UrlSerializer {
/** Parses a url into a `UrlTree` */
parse(url: string): UrlTree;
/** Converts a `UrlTree` into a url */
serialize(tree: UrlTree): string;
}
/**
* A string of the form `path/to/file#exportName` that acts as a URL for a set of routes to load.
*
* @see `loadChildrenCallback`
* @publicApi
* @deprecated The `string` form of `loadChildren` is deprecated in favor of the
* `LoadChildrenCallback` function which uses the ES dynamic `import()` expression.
* This offers a more natural and standards-based mechanism to dynamically
* load an ES module at runtime.
*/
export declare type DeprecatedLoadChildren = string;
/**
* @description
*
* Represents the detached route tree.
*
* This is an opaque value the router will give to a custom route reuse strategy
* to store and retrieve later on.
*
* @publicApi
*/
export declare type DetachedRouteHandle = {};
/**
* Error handler that is invoked when a navigation error occurs.
*
* If the handler returns a value, the navigation Promise is resolved with this value.
* If the handler throws an exception, the navigation Promise is rejected with
* the exception.
*
* @publicApi
*/
declare type ErrorHandler = (error: any) => any;
/**
* Router events that allow you to track the lifecycle of the router.
*
* The events occur in the following sequence:
*
* * [NavigationStart](api/router/NavigationStart): Navigation starts.
* * [RouteConfigLoadStart](api/router/RouteConfigLoadStart): Before
* the router [lazy loads](/guide/router#lazy-loading) a route configuration.
* * [RouteConfigLoadEnd](api/router/RouteConfigLoadEnd): After a route has been lazy loaded.
* * [RoutesRecognized](api/router/RoutesRecognized): When the router parses the URL
* and the routes are recognized.
* * [GuardsCheckStart](api/router/GuardsCheckStart): When the router begins the *guards*
* phase of routing.
* * [ChildActivationStart](api/router/ChildActivationStart): When the router
* begins activating a route's children.
* * [ActivationStart](api/router/ActivationStart): When the router begins activating a route.
* * [GuardsCheckEnd](api/router/GuardsCheckEnd): When the router finishes the *guards*
* phase of routing successfully.
* * [ResolveStart](api/router/ResolveStart): When the router begins the *resolve*
* phase of routing.
* * [ResolveEnd](api/router/ResolveEnd): When the router finishes the *resolve*
* phase of routing successfuly.
* * [ChildActivationEnd](api/router/ChildActivationEnd): When the router finishes
* activating a route's children.
* * [ActivationEnd](api/router/ActivationEnd): When the router finishes activating a route.
* * [NavigationEnd](api/router/NavigationEnd): When navigation ends successfully.
* * [NavigationCancel](api/router/NavigationCancel): When navigation is canceled.
* * [NavigationError](api/router/NavigationError): When navigation fails
* due to an unexpected error.
* * [Scroll](api/router/Scroll): When the user scrolls.
*
* @publicApi
*/
export declare type Event = RouterEvent | RouteConfigLoadStart | RouteConfigLoadEnd | ChildActivationStart | ChildActivationEnd | ActivationStart | ActivationEnd | Scroll;
/**
* A set of configuration options for a router module, provided in the
* `forRoot()` method.
*
* @see `forRoot()`
*
*
* @publicApi
*/
export declare interface ExtraOptions {
/**
* When true, log all internal navigation events to the console.
* Use for debugging.
*/
enableTracing?: boolean;
/**
* When true, enable the location strategy that uses the URL fragment
* instead of the history API.
*/
useHash?: boolean;
/**
* One of `enabled`, `enabledBlocking`, `enabledNonBlocking` or `disabled`.
* When set to `enabled` or `enabledBlocking`, the initial navigation starts before the root
* component is created. The bootstrap is blocked until the initial navigation is complete. This
* value is required for [server-side rendering](guide/universal) to work. When set to
* `enabledNonBlocking`, the initial navigation starts after the root component has been created.
* The bootstrap is not blocked on the completion of the initial navigation. When set to
* `disabled`, the initial navigation is not performed. The location listener is set up before the
* root component gets created. Use if there is a reason to have more control over when the router
* starts its initial navigation due to some complex initialization logic.
*/
initialNavigation?: InitialNavigation;
/**
* A custom error handler for failed navigations.
* If the handler returns a value, the navigation Promise is resolved with this value.
* If the handler throws an exception, the navigation Promise is rejected with the exception.
*
*/
errorHandler?: ErrorHandler;
/**
* Configures a preloading strategy.
* One of `PreloadAllModules` or `NoPreloading` (the default).
*/
preloadingStrategy?: any;
/**
* Define what the router should do if it receives a navigation request to the current URL.
* Default is `ignore`, which causes the router ignores the navigation.
* This can disable features such as a "refresh" button.
* Use this option to configure the behavior when navigating to the
* current URL. Default is 'ignore'.
*/
onSameUrlNavigation?: 'reload' | 'ignore';
/**
* Configures if the scroll position needs to be restored when navigating back.
*
* * 'disabled'- (Default) Does nothing. Scroll position is maintained on navigation.
* * 'top'- Sets the scroll position to x = 0, y = 0 on all navigation.
* * 'enabled'- Restores the previous scroll position on backward navigation, else sets the
* position to the anchor if one is provided, or sets the scroll position to [0, 0] (forward
* navigation). This option will be the default in the future.
*
* You can implement custom scroll restoration behavior by adapting the enabled behavior as
* in the following example.
*
* ```typescript
* class AppModule {
* constructor(router: Router, viewportScroller: ViewportScroller) {
* router.events.pipe(
* filter((e: Event): e is Scroll => e instanceof Scroll)
* ).subscribe(e => {
* if (e.position) {
* // backward navigation
* viewportScroller.scrollToPosition(e.position);
* } else if (e.anchor) {
* // anchor navigation
* viewportScroller.scrollToAnchor(e.anchor);
* } else {
* // forward navigation
* viewportScroller.scrollToPosition([0, 0]);
* }
* });
* }
* }
* ```
*/
scrollPositionRestoration?: 'disabled' | 'enabled' | 'top';
/**
* When set to 'enabled', scrolls to the anchor element when the URL has a fragment.
* Anchor scrolling is disabled by default.
*
* Anchor scrolling does not happen on 'popstate'. Instead, we restore the position
* that we stored or scroll to the top.
*/
anchorScrolling?: 'disabled' | 'enabled';
/**
* Configures the scroll offset the router will use when scrolling to an element.
*
* When given a tuple with x and y position value,
* the router uses that offset each time it scrolls.
* When given a function, the router invokes the function every time
* it restores scroll position.
*/
scrollOffset?: [number, number] | (() => [number, number]);
/**
* Defines how the router merges parameters, data, and resolved data from parent to child
* routes. By default ('emptyOnly'), inherits parent parameters only for
* path-less or component-less routes.
*
* Set to 'always' to enable unconditional inheritance of parent parameters.
*
* Note that when dealing with matrix parameters, "parent" refers to the parent `Route`
* config which does not necessarily mean the "URL segment to the left". When the `Route` `path`
* contains multiple segments, the matrix parameters must appear on the last segment. For example,
* matrix parameters for `{path: 'a/b', component: MyComp}` should appear as `a/b;foo=bar` and not
* `a;foo=bar/b`.
*
*/
paramsInheritanceStrategy?: 'emptyOnly' | 'always';
/**
* A custom handler for malformed URI errors. The handler is invoked when `encodedURI` contains
* invalid character sequences.
* The default implementation is to redirect to the root URL, dropping
* any path or parameter information. The function takes three parameters:
*
* - `'URIError'` - Error thrown when parsing a bad URL.
* - `'UrlSerializer'` - UrlSerializer that’s configured with the router.
* - `'url'` - The malformed URL that caused the URIError
* */
malformedUriErrorHandler?: (error: URIError, urlSerializer: UrlSerializer, url: string) => UrlTree;
/**
* Defines when the router updates the browser URL. By default ('deferred'),
* update after successful navigation.
* Set to 'eager' if prefer to update the URL at the beginning of navigation.
* Updating the URL early allows you to handle a failure of navigation by
* showing an error message with the URL that failed.
*/
urlUpdateStrategy?: 'deferred' | 'eager';
/**
* Enables a bug fix that corrects relative link resolution in components with empty paths.
* Example:
*
* ```
* const routes = [
* {
* path: '',
* component: ContainerComponent,
* children: [
* { path: 'a', component: AComponent },
* { path: 'b', component: BComponent },
* ]
* }
* ];
* ```
*
* From the `ContainerComponent`, you should be able to navigate to `AComponent` using
* the following `routerLink`, but it will not work if `relativeLinkResolution` is set
* to `'legacy'`:
*
* `<a [routerLink]="['./a']">Link to A</a>`
*
* However, this will work:
*
* `<a [routerLink]="['../a']">Link to A</a>`
*
* In other words, you're required to use `../` rather than `./` when the relative link
* resolution is set to `'legacy'`.
*
* The default in v11 is `corrected`.
*/
relativeLinkResolution?: 'legacy' | 'corrected';
}
/**
* An event triggered at the end of the Guard phase of routing.
*
* @see `GuardsCheckStart`
*
* @publicApi
*/
export declare class GuardsCheckEnd extends RouterEvent {
/** @docsNotRequired */
urlAfterRedirects: string;
/** @docsNotRequired */
state: RouterStateSnapshot;
/** @docsNotRequired */
shouldActivate: boolean;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
urlAfterRedirects: string,
/** @docsNotRequired */
state: RouterStateSnapshot,
/** @docsNotRequired */
shouldActivate: boolean);
toString(): string;
}
/**
* An event triggered at the start of the Guard phase of routing.
*
* @see `GuardsCheckEnd`
*
* @publicApi
*/
export declare class GuardsCheckStart extends RouterEvent {
/** @docsNotRequired */
urlAfterRedirects: string;
/** @docsNotRequired */
state: RouterStateSnapshot;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
urlAfterRedirects: string,
/** @docsNotRequired */
state: RouterStateSnapshot);
toString(): string;
}
/**
* Allowed values in an `ExtraOptions` object that configure
* when the router performs the initial navigation operation.
*
* * 'enabledNonBlocking' - (default) The initial navigation starts after the
* root component has been created. The bootstrap is not blocked on the completion of the initial
* navigation.
* * 'enabledBlocking' - The initial navigation starts before the root component is created.
* The bootstrap is blocked until the initial navigation is complete. This value is required
* for [server-side rendering](guide/universal) to work.
* * 'disabled' - The initial navigation is not performed. The location listener is set up before
* the root component gets created. Use if there is a reason to have
* more control over when the router starts its initial navigation due to some complex
* initialization logic.
*
* The following values have been [deprecated](guide/releases#deprecation-practices) since v11,
* and should not be used for new applications.
*
* * 'enabled' - This option is 1:1 replaceable with `enabledBlocking`.
*
* @see `forRoot()`
*
* @publicApi
*/
export declare type InitialNavigation = 'disabled' | 'enabled' | 'enabledBlocking' | 'enabledNonBlocking';
/**
* A set of options which specify how to determine if a `UrlTree` is active, given the `UrlTree`
* for the current router state.
*
* @publicApi
* @see Router.isActive
*/
export declare interface IsActiveMatchOptions {
/**
* Defines the strategy for comparing the matrix parameters of two `UrlTree`s.
*
* The matrix parameter matching is dependent on the strategy for matching the
* segments. That is, if the `paths` option is set to `'subset'`, only
* the matrix parameters of the matching segments will be compared.
*
* - `'exact'`: Requires that matching segments also have exact matrix parameter
* matches.
* - `'subset'`: The matching segments in the router's active `UrlTree` may contain
* extra matrix parameters, but those that exist in the `UrlTree` in question must match.
* - `'ignored'`: When comparing `UrlTree`s, matrix params will be ignored.
*/
matrixParams: 'exact' | 'subset' | 'ignored';
/**
* Defines the strategy for comparing the query parameters of two `UrlTree`s.
*
* - `'exact'`: the query parameters must match exactly.
* - `'subset'`: the active `UrlTree` may contain extra parameters,
* but must match the key and value of any that exist in the `UrlTree` in question.
* - `'ignored'`: When comparing `UrlTree`s, query params will be ignored.
*/
queryParams: 'exact' | 'subset' | 'ignored';
/**
* Defines the strategy for comparing the `UrlSegment`s of the `UrlTree`s.
*
* - `'exact'`: all segments in each `UrlTree` must match.
* - `'subset'`: a `UrlTree` will be determined to be active if it
* is a subtree of the active route. That is, the active route may contain extra
* segments, but must at least have all the segements of the `UrlTree` in question.
*/
paths: 'exact' | 'subset';
/**
* - 'exact'`: indicates that the `UrlTree` fragments must be equal.
* - `'ignored'`: the fragments will not be compared when determining if a
* `UrlTree` is active.
*/
fragment: 'exact' | 'ignored';
}
/**
*
* A function that returns a set of routes to load.
*
* The string form of `LoadChildren` is deprecated (see `DeprecatedLoadChildren`). The function
* form (`LoadChildrenCallback`) should be used instead.
*
* @see `loadChildrenCallback`
* @publicApi
*/
export declare type LoadChildren = LoadChildrenCallback | DeprecatedLoadChildren;
/**
*
* A function that is called to resolve a collection of lazy-loaded routes.
* Must be an arrow function of the following form:
* `() => import('...').then(mod => mod.MODULE)`
*
* For example:
*
* ```
* [{
* path: 'lazy',
* loadChildren: () => import('./lazy-route/lazy.module').then(mod => mod.LazyModule),
* }];
* ```
*
* @see [Route.loadChildren](api/router/Route#loadChildren)
* @publicApi
*/
export declare type LoadChildrenCallback = () => Type<any> | NgModuleFactory<any> | Observable<Type<any>> | Promise<NgModuleFactory<any> | Type<any> | any>;
/**
* Information about a navigation operation.
* Retrieve the most recent navigation object with the
* [Router.getCurrentNavigation() method](api/router/Router#getcurrentnavigation) .
*
* * *id* : The unique identifier of the current navigation.
* * *initialUrl* : The target URL passed into the `Router#navigateByUrl()` call before navigation.
* This is the value before the router has parsed or applied redirects to it.
* * *extractedUrl* : The initial target URL after being parsed with `UrlSerializer.extract()`.
* * *finalUrl* : The extracted URL after redirects have been applied.
* This URL may not be available immediately, therefore this property can be `undefined`.
* It is guaranteed to be set after the `RoutesRecognized` event fires.
* * *trigger* : Identifies how this navigation was triggered.
* -- 'imperative'--Triggered by `router.navigateByUrl` or `router.navigate`.
* -- 'popstate'--Triggered by a popstate event.
* -- 'hashchange'--Triggered by a hashchange event.
* * *extras* : A `NavigationExtras` options object that controlled the strategy used for this
* navigation.
* * *previousNavigation* : The previously successful `Navigation` object. Only one previous
* navigation is available, therefore this previous `Navigation` object has a `null` value for its
* own `previousNavigation`.
*
* @publicApi
*/
export declare interface Navigation {
/**
* The unique identifier of the current navigation.
*/
id: number;
/**
* The target URL passed into the `Router#navigateByUrl()` call before navigation. This is
* the value before the router has parsed or applied redirects to it.
*/
initialUrl: string | UrlTree;
/**
* The initial target URL after being parsed with `UrlSerializer.extract()`.
*/
extractedUrl: UrlTree;
/**
* The extracted URL after redirects have been applied.
* This URL may not be available immediately, therefore this property can be `undefined`.
* It is guaranteed to be set after the `RoutesRecognized` event fires.
*/
finalUrl?: UrlTree;
/**
* Identifies how this navigation was triggered.
*
* * 'imperative'--Triggered by `router.navigateByUrl` or `router.navigate`.
* * 'popstate'--Triggered by a popstate event.
* * 'hashchange'--Triggered by a hashchange event.
*/
trigger: 'imperative' | 'popstate' | 'hashchange';
/**
* Options that controlled the strategy used for this navigation.
* See `NavigationExtras`.
*/
extras: NavigationExtras;
/**
* The previously successful `Navigation` object. Only one previous navigation
* is available, therefore this previous `Navigation` object has a `null` value
* for its own `previousNavigation`.
*/
previousNavigation: Navigation | null;
}
/**
* @description
*
* Options that modify the `Router` navigation strategy.
* Supply an object containing any of these properties to a `Router` navigation function to
* control how the navigation should be handled.
*
* @see [Router.navigate() method](api/router/Router#navigate)
* @see [Router.navigateByUrl() method](api/router/Router#navigatebyurl)
* @see [Routing and Navigation guide](guide/router)
*
* @publicApi
*/
export declare interface NavigationBehaviorOptions {
/**
* When true, navigates without pushing a new state into history.
*
* ```
* // Navigate silently to /view
* this.router.navigate(['/view'], { skipLocationChange: true });
* ```
*/
skipLocationChange?: boolean;
/**
* When true, navigates while replacing the current state in history.
*
* ```
* // Navigate to /view
* this.router.navigate(['/view'], { replaceUrl: true });
* ```
*/
replaceUrl?: boolean;
/**
* Developer-defined state that can be passed to any navigation.
* Access this value through the `Navigation.extras` object
* returned from the [Router.getCurrentNavigation()
* method](api/router/Router#getcurrentnavigation) while a navigation is executing.
*
* After a navigation completes, the router writes an object containing this
* value together with a `navigationId` to `history.state`.
* The value is written when `location.go()` or `location.replaceState()`
* is called before activating this route.
*
* Note that `history.state` does not pass an object equality test because
* the router adds the `navigationId` on each navigation.
*
*/
state?: {
[k: string]: any;
};
}
/**
* An event triggered when a navigation is canceled, directly or indirectly.
* This can happen for several reasons including when a route guard
* returns `false` or initiates a redirect by returning a `UrlTree`.
*
* @see `NavigationStart`
* @see `NavigationEnd`
* @see `NavigationError`
*
* @publicApi
*/
export declare class NavigationCancel extends RouterEvent {
/** @docsNotRequired */
reason: string;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
reason: string);
/** @docsNotRequired */
toString(): string;
}
/**
* An event triggered when a navigation ends successfully.
*
* @see `NavigationStart`
* @see `NavigationCancel`
* @see `NavigationError`
*
* @publicApi
*/
export declare class NavigationEnd extends RouterEvent {
/** @docsNotRequired */
urlAfterRedirects: string;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
urlAfterRedirects: string);
/** @docsNotRequired */
toString(): string;
}
/**
* An event triggered when a navigation fails due to an unexpected error.
*
* @see `NavigationStart`
* @see `NavigationEnd`
* @see `NavigationCancel`
*
* @publicApi
*/
export declare class NavigationError extends RouterEvent {
/** @docsNotRequired */
error: any;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
error: any);
/** @docsNotRequired */
toString(): string;
}
/**
* @description
*
* Options that modify the `Router` navigation strategy.
* Supply an object containing any of these properties to a `Router` navigation function to
* control how the target URL should be constructed or interpreted.
*
* @see [Router.navigate() method](api/router/Router#navigate)
* @see [Router.navigateByUrl() method](api/router/Router#navigatebyurl)
* @see [Router.createUrlTree() method](api/router/Router#createurltree)
* @see [Routing and Navigation guide](guide/router)
* @see UrlCreationOptions
* @see NavigationBehaviorOptions
*
* @publicApi
*/
export declare interface NavigationExtras extends UrlCreationOptions, NavigationBehaviorOptions {
}
/**
* An event triggered when a navigation starts.
*
* @publicApi
*/
export declare class NavigationStart extends RouterEvent {
/**
* Identifies the call or event that triggered the navigation.
* An `imperative` trigger is a call to `router.navigateByUrl()` or `router.navigate()`.
*
* @see `NavigationEnd`
* @see `NavigationCancel`
* @see `NavigationError`
*/
navigationTrigger?: 'imperative' | 'popstate' | 'hashchange';
/**
* The navigation state that was previously supplied to the `pushState` call,
* when the navigation is triggered by a `popstate` event. Otherwise null.
*
* The state object is defined by `NavigationExtras`, and contains any
* developer-defined state value, as well as a unique ID that
* the router assigns to every router transition/navigation.
*
* From the perspective of the router, the router never "goes back".
* When the user clicks on the back button in the browser,
* a new navigation ID is created.
*
* Use the ID in this previous-state object to differentiate between a newly created
* state and one returned to by a `popstate` event, so that you can restore some
* remembered state, such as scroll position.
*
*/
restoredState?: {
[k: string]: any;
navigationId: number;
} | null;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
navigationTrigger?: 'imperative' | 'popstate' | 'hashchange',
/** @docsNotRequired */
restoredState?: {
[k: string]: any;
navigationId: number;
} | null);
/** @docsNotRequired */
toString(): string;
}
/**
* @description
*
* Provides a preloading strategy that does not preload any modules.
*
* This strategy is enabled by default.
*
* @publicApi
*/
export declare class NoPreloading implements PreloadingStrategy {
preload(route: Route, fn: () => Observable<any>): Observable<any>;
}
/**
* Store contextual information about a `RouterOutlet`
*
* @publicApi
*/
export declare class OutletContext {
outlet: RouterOutletContract | null;
route: ActivatedRoute | null;
resolver: ComponentFactoryResolver | null;
children: ChildrenOutletContexts;
attachRef: ComponentRef<any> | null;
}
/**
* A map that provides access to the required and optional parameters
* specific to a route.
* The map supports retrieving a single value with `get()`
* or multiple values with `getAll()`.
*
* @see [URLSearchParams](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
*
* @publicApi
*/
export declare interface ParamMap {
/**
* Reports whether the map contains a given parameter.
* @param name The parameter name.
* @returns True if the map contains the given parameter, false otherwise.
*/
has(name: string): boolean;
/**
* Retrieves a single value for a parameter.
* @param name The parameter name.
* @return The parameter's single value,
* or the first value if the parameter has multiple values,
* or `null` when there is no such parameter.
*/
get(name: string): string | null;
/**
* Retrieves multiple values for a parameter.
* @param name The parameter name.
* @return An array containing one or more values,
* or an empty array if there is no such parameter.
*
*/
getAll(name: string): string[];
/** Names of the parameters in the map. */
readonly keys: string[];
}
/**
* A collection of matrix and query URL parameters.
* @see `convertToParamMap()`
* @see `ParamMap`
*
* @publicApi
*/
export declare type Params = {
[key: string]: any;
};
/**
* @description
*
* Provides a preloading strategy that preloads all modules as quickly as possible.
*
* ```
* RouterModule.forRoot(ROUTES, {preloadingStrategy: PreloadAllModules})
* ```
*
* @publicApi
*/
export declare class PreloadAllModules implements PreloadingStrategy {
preload(route: Route, fn: () => Observable<any>): Observable<any>;
}
/**
* @description
*
* Provides a preloading strategy.
*
* @publicApi
*/
export declare abstract class PreloadingStrategy {
abstract preload(route: Route, fn: () => Observable<any>): Observable<any>;
}
/**
* The primary routing outlet.
*
* @publicApi
*/
export declare const PRIMARY_OUTLET = "primary";
/**
* Registers a [DI provider](guide/glossary#provider) for a set of routes.
* @param routes The route configuration to provide.
*
* @usageNotes
*
* ```
* @NgModule({
* imports: [RouterModule.forChild(ROUTES)],
* providers: [provideRoutes(EXTRA_ROUTES)]
* })
* class MyNgModule {}
* ```
*
* @publicApi
*/
export declare function provideRoutes(routes: Routes): any;
/**
*
* How to handle query parameters in a router link.
* One of:
* - `merge` : Merge new with current parameters.
* - `preserve` : Preserve current parameters.
*
* @see `UrlCreationOptions#queryParamsHandling`
* @see `RouterLink`
* @publicApi
*/
export declare type QueryParamsHandling = 'merge' | 'preserve' | '';
/**
* @description
*
* Interface that classes can implement to be a data provider.
* A data provider class can be used with the router to resolve data during navigation.
* The interface defines a `resolve()` method that is invoked when the navigation starts.
* The router waits for the data to be resolved before the route is finally activated.
*
* The following example implements a `resolve()` method that retrieves the data
* needed to activate the requested route.
*
* ```
* @Injectable({ providedIn: 'root' })
* export class HeroResolver implements Resolve<Hero> {
* constructor(private service: HeroService) {}
*
* resolve(
* route: ActivatedRouteSnapshot,
* state: RouterStateSnapshot
* ): Observable<any>|Promise<any>|any {
* return this.service.getHero(route.paramMap.get('id'));
* }
* }
* ```
*
* Here, the defined `resolve()` function is provided as part of the `Route` object
* in the router configuration:
*
* ```
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'detail/:id',
* component: HeroDetailComponent,
* resolve: {
* hero: HeroResolver
* }
* }
* ])
* ],
* exports: [RouterModule]
* })
* export class AppRoutingModule {}
* ```
*
* You can alternatively provide an in-line function with the `resolve()` signature:
*
* ```
* export const myHero: Hero = {
* // ...
* }
*
* @NgModule({
* imports: [
* RouterModule.forRoot([
* {
* path: 'detail/:id',
* component: HeroComponent,
* resolve: {
* hero: 'heroResolver'
* }
* }
* ])
* ],
* providers: [
* {
* provide: 'heroResolver',
* useValue: (route: ActivatedRouteSnapshot, state: RouterStateSnapshot) => myHero
* }
* ]
* })
* export class AppModule {}
* ```
*
* @usageNotes
*
* When both guard and resolvers are specified, the resolvers are not executed until
* all guards have run and succeeded.
* For example, consider the following route configuration:
*
* ```
* {
* path: 'base'
* canActivate: [BaseGuard],
* resolve: {data: BaseDataResolver}
* children: [
* {
* path: 'child',
* guards: [ChildGuard],
* component: ChildComponent,
* resolve: {childData: ChildDataResolver}
* }
* ]
* }
* ```
* The order of execution is: BaseGuard, ChildGuard, BaseDataResolver, ChildDataResolver.
*
* @publicApi
*/
export declare interface Resolve<T> {
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<T> | Promise<T> | T;
}
/**
*
* Represents the resolved data associated with a particular route.
*
* @see `Route#resolve`.
*
* @publicApi
*/
export declare type ResolveData = {
[name: string]: any;
};
/**
* An event triggered at the end of the Resolve phase of routing.
* @see `ResolveStart`.
*
* @publicApi
*/
export declare class ResolveEnd extends RouterEvent {
/** @docsNotRequired */
urlAfterRedirects: string;
/** @docsNotRequired */
state: RouterStateSnapshot;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
urlAfterRedirects: string,
/** @docsNotRequired */
state: RouterStateSnapshot);
toString(): string;
}
/**
* An event triggered at the start of the Resolve phase of routing.
*
* Runs in the "resolve" phase whether or not there is anything to resolve.
* In future, may change to only run when there are things to be resolved.
*
* @see `ResolveEnd`
*
* @publicApi
*/
export declare class ResolveStart extends RouterEvent {
/** @docsNotRequired */
urlAfterRedirects: string;
/** @docsNotRequired */
state: RouterStateSnapshot;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
urlAfterRedirects: string,
/** @docsNotRequired */
state: RouterStateSnapshot);
toString(): string;
}
/**
* A configuration object that defines a single route.
* A set of routes are collected in a `Routes` array to define a `Router` configuration.
* The router attempts to match segments of a given URL against each route,
* using the configuration options defined in this object.
*
* Supports static, parameterized, redirect, and wildcard routes, as well as
* custom route data and resolve methods.
*
* For detailed usage information, see the [Routing Guide](guide/router).
*
* @usageNotes
*
* ### Simple Configuration
*
* The following route specifies that when navigating to, for example,
* `/team/11/user/bob`, the router creates the 'Team' component
* with the 'User' child component in it.
*
* ```
* [{
* path: 'team/:id',
* component: Team,
* children: [{
* path: 'user/:name',
* component: User
* }]
* }]
* ```
*
* ### Multiple Outlets
*
* The following route creates sibling components with multiple outlets.
* When navigating to `/team/11(aux:chat/jim)`, the router creates the 'Team' component next to
* the 'Chat' component. The 'Chat' component is placed into the 'aux' outlet.
*
* ```
* [{
* path: 'team/:id',
* component: Team
* }, {
* path: 'chat/:user',
* component: Chat
* outlet: 'aux'
* }]
* ```
*
* ### Wild Cards
*
* The following route uses wild-card notation to specify a component
* that is always instantiated regardless of where you navigate to.
*
* ```
* [{
* path: '**',
* component: WildcardComponent
* }]
* ```
*
* ### Redirects
*
* The following route uses the `redirectTo` property to ignore a segment of
* a given URL when looking for a child path.
*
* When navigating to '/team/11/legacy/user/jim', the router changes the URL segment
* '/team/11/legacy/user/jim' to '/team/11/user/jim', and then instantiates
* the Team component with the User child component in it.
*
* ```
* [{
* path: 'team/:id',
* component: Team,
* children: [{
* path: 'legacy/user/:name',
* redirectTo: 'user/:name'
* }, {
* path: 'user/:name',
* component: User
* }]
* }]
* ```
*
* The redirect path can be relative, as shown in this example, or absolute.
* If we change the `redirectTo` value in the example to the absolute URL segment '/user/:name',
* the result URL is also absolute, '/user/jim'.
* ### Empty Path
*
* Empty-path route configurations can be used to instantiate components that do not 'consume'
* any URL segments.
*
* In the following configuration, when navigating to
* `/team/11`, the router instantiates the 'AllUsers' component.
*
* ```
* [{
* path: 'team/:id',
* component: Team,
* children: [{
* path: '',
* component: AllUsers
* }, {
* path: 'user/:name',
* component: User
* }]
* }]
* ```
*
* Empty-path routes can have children. In the following example, when navigating
* to `/team/11/user/jim`, the router instantiates the wrapper component with
* the user component in it.
*
* Note that an empty path route inherits its parent's parameters and data.
*
* ```
* [{
* path: 'team/:id',
* component: Team,
* children: [{
* path: '',
* component: WrapperCmp,
* children: [{
* path: 'user/:name',
* component: User
* }]
* }]
* }]
* ```
*
* ### Matching Strategy
*
* The default path-match strategy is 'prefix', which means that the router
* checks URL elements from the left to see if the URL matches a specified path.
* For example, '/team/11/user' matches 'team/:id'.
*
* ```
* [{
* path: '',
* pathMatch: 'prefix', //default
* redirectTo: 'main'
* }, {
* path: 'main',
* component: Main
* }]
* ```
*
* You can specify the path-match strategy 'full' to make sure that the path
* covers the whole unconsumed URL. It is important to do this when redirecting
* empty-path routes. Otherwise, because an empty path is a prefix of any URL,
* the router would apply the redirect even when navigating to the redirect destination,
* creating an endless loop.
*
* In the following example, supplying the 'full' `pathMatch` strategy ensures
* that the router applies the redirect if and only if navigating to '/'.
*
* ```
* [{
* path: '',
* pathMatch: 'full',
* redirectTo: 'main'
* }, {
* path: 'main',
* component: Main
* }]
* ```
*
* ### Componentless Routes
*
* You can share parameters between sibling components.
* For example, suppose that two sibling components should go next to each other,
* and both of them require an ID parameter. You can accomplish this using a route
* that does not specify a component at the top level.
*
* In the following example, 'MainChild' and 'AuxChild' are siblings.
* When navigating to 'parent/10/(a//aux:b)', the route instantiates
* the main child and aux child components next to each other.
* For this to work, the application component must have the primary and aux outlets defined.
*
* ```
* [{
* path: 'parent/:id',
* children: [
* { path: 'a', component: MainChild },
* { path: 'b', component: AuxChild, outlet: 'aux' }
* ]
* }]
* ```
*
* The router merges the parameters, data, and resolve of the componentless
* parent into the parameters, data, and resolve of the children.
*
* This is especially useful when child components are defined
* with an empty path string, as in the following example.
* With this configuration, navigating to '/parent/10' creates
* the main child and aux components.
*
* ```
* [{
* path: 'parent/:id',
* children: [
* { path: '', component: MainChild },
* { path: '', component: AuxChild, outlet: 'aux' }
* ]
* }]
* ```
*
* ### Lazy Loading
*
* Lazy loading speeds up application load time by splitting the application
* into multiple bundles and loading them on demand.
* To use lazy loading, provide the `loadChildren` property in the `Route` object,
* instead of the `children` property.
*
* Given the following example route, the router will lazy load
* the associated module on demand using the browser native import system.
*
* ```
* [{
* path: 'lazy',
* loadChildren: () => import('./lazy-route/lazy.module').then(mod => mod.LazyModule),
* }];
* ```
*
* @publicApi
*/
export declare interface Route {
/**
* The path to match against. Cannot be used together with a custom `matcher` function.
* A URL string that uses router matching notation.
* Can be a wild card (`**`) that matches any URL (see Usage Notes below).
* Default is "/" (the root path).
*
*/
path?: string;
/**
* The path-matching strategy, one of 'prefix' or 'full'.
* Default is 'prefix'.
*
* By default, the router checks URL elements from the left to see if the URL
* matches a given path and stops when there is a config match. Importantly there must still be a
* config match for each segment of the URL. For example, '/team/11/user' matches the prefix
* 'team/:id' if one of the route's children matches the segment 'user'. That is, the URL
* '/team/11/user` matches the config
* `{path: 'team/:id', children: [{path: ':user', component: User}]}`
* but does not match when there are no children as in `{path: 'team/:id', component: Team}`.
*
* The path-match strategy 'full' matches against the entire URL.
* It is important to do this when redirecting empty-path routes.
* Otherwise, because an empty path is a prefix of any URL,
* the router would apply the redirect even when navigating
* to the redirect destination, creating an endless loop.
*
*/
pathMatch?: string;
/**
* A custom URL-matching function. Cannot be used together with `path`.
*/
matcher?: UrlMatcher;
/**
* The component to instantiate when the path matches.
* Can be empty if child routes specify components.
*/
component?: Type<any>;
/**
* A URL to redirect to when the path matches.
*
* Absolute if the URL begins with a slash (/), otherwise relative to the path URL.
* Note that no further redirects are evaluated after an absolute redirect.
*
* When not present, router does not redirect.
*/
redirectTo?: string;
/**
* Name of a `RouterOutlet` object where the component can be placed
* when the path matches.
*/
outlet?: string;
/**
* An array of dependency-injection tokens used to look up `CanActivate()`
* handlers, in order to determine if the current user is allowed to
* activate the component. By default, any user can activate.
*/
canActivate?: any[];
/**
* An array of DI tokens used to look up `CanActivateChild()` handlers,
* in order to determine if the current user is allowed to activate
* a child of the component. By default, any user can activate a child.
*/
canActivateChild?: any[];
/**
* An array of DI tokens used to look up `CanDeactivate()`
* handlers, in order to determine if the current user is allowed to
* deactivate the component. By default, any user can deactivate.
*
*/
canDeactivate?: any[];
/**
* An array of DI tokens used to look up `CanLoad()`
* handlers, in order to determine if the current user is allowed to
* load the component. By default, any user can load.
*/
canLoad?: any[];
/**
* Additional developer-defined data provided to the component via
* `ActivatedRoute`. By default, no additional data is passed.
*/
data?: Data;
/**
* A map of DI tokens used to look up data resolvers. See `Resolve`.
*/
resolve?: ResolveData;
/**
* An array of child `Route` objects that specifies a nested route
* configuration.
*/
children?: Routes;
/**
* An object specifying lazy-loaded child routes.
*/
loadChildren?: LoadChildren;
/**
* Defines when guards and resolvers will be run. One of
* - `paramsOrQueryParamsChange` : Run when query parameters change.
* - `always` : Run on every execution.
* By default, guards and resolvers run only when the matrix
* parameters of the route change.
*/
runGuardsAndResolvers?: RunGuardsAndResolvers;
}
/**
* An event triggered when a route has been lazy loaded.
*
* @see `RouteConfigLoadStart`
*
* @publicApi
*/
export declare class RouteConfigLoadEnd {
/** @docsNotRequired */
route: Route;
constructor(
/** @docsNotRequired */
route: Route);
toString(): string;
}
/**
* An event triggered before lazy loading a route configuration.
*
* @see `RouteConfigLoadEnd`
*
* @publicApi
*/
export declare class RouteConfigLoadStart {
/** @docsNotRequired */
route: Route;
constructor(
/** @docsNotRequired */
route: Route);
toString(): string;
}
/**
* @description
*
* A service that provides navigation among views and URL manipulation capabilities.
*
* @see `Route`.
* @see [Routing and Navigation Guide](guide/router).
*
* @ngModule RouterModule
*
* @publicApi
*/
export declare class Router {
private rootComponentType;
private urlSerializer;
private rootContexts;
private location;
config: Routes;
private currentUrlTree;
private rawUrlTree;
private browserUrlTree;
private readonly transitions;
private navigations;
private lastSuccessfulNavigation;
private currentNavigation;
private disposed;
private locationSubscription?;
/**
* Tracks the previously seen location change from the location subscription so we can compare
* the two latest to see if they are duplicates. See setUpLocationChangeListener.
*/
private lastLocationChangeInfo;
private navigationId;
private configLoader;
private ngModule;
private console;
private isNgZoneEnabled;
/**
* An event stream for routing events in this NgModule.
*/
readonly events: Observable<Event>;
/**
* The current state of routing in this NgModule.
*/
readonly routerState: RouterState;
/**
* A handler for navigation errors in this NgModule.
*/
errorHandler: ErrorHandler;
/**
* A handler for errors thrown by `Router.parseUrl(url)`
* when `url` contains an invalid character.
* The most common case is a `%` sign
* that's not encoded and is not part of a percent encoded sequence.
*/
malformedUriErrorHandler: (error: URIError, urlSerializer: UrlSerializer, url: string) => UrlTree;
/**
* True if at least one navigation event has occurred,
* false otherwise.
*/
navigated: boolean;
private lastSuccessfulId;
/**
* A strategy for extracting and merging URLs.
* Used for AngularJS to Angular migrations.
*/
urlHandlingStrategy: UrlHandlingStrategy;
/**
* A strategy for re-using routes.
*/
routeReuseStrategy: RouteReuseStrategy;
/**
* How to handle a navigation request to the current URL. One of:
* - `'ignore'` : The router ignores the request.
* - `'reload'` : The router reloads the URL. Use to implement a "refresh" feature.
*/
onSameUrlNavigation: 'reload' | 'ignore';
/**
* How to merge parameters, data, and resolved data from parent to child
* routes. One of:
*
* - `'emptyOnly'` : Inherit parent parameters, data, and resolved data
* for path-less or component-less routes.
* - `'always'` : Inherit parent parameters, data, and resolved data
* for all child routes.
*/
paramsInheritanceStrategy: 'emptyOnly' | 'always';
/**
* Determines when the router updates the browser URL.
* By default (`"deferred"`), updates the browser URL after navigation has finished.
* Set to `'eager'` to update the browser URL at the beginning of navigation.
* You can choose to update early so that, if navigation fails,
* you can show an error message with the URL that failed.
*/
urlUpdateStrategy: 'deferred' | 'eager';
/**
* Enables a bug fix that corrects relative link resolution in components with empty paths.
* @see `RouterModule`
*/
relativeLinkResolution: 'legacy' | 'corrected';
/**
* Creates the router service.
*/
constructor(rootComponentType: Type<any> | null, urlSerializer: UrlSerializer, rootContexts: ChildrenOutletContexts, location: Location, injector: Injector, loader: NgModuleFactoryLoader, compiler: Compiler, config: Routes);
private setupNavigations;
private getTransition;
private setTransition;
/**
* Sets up the location change listener and performs the initial navigation.
*/
initialNavigation(): void;
/**
* Sets up the location change listener. This listener detects navigations triggered from outside
* the Router (the browser back/forward buttons, for example) and schedules a corresponding Router
* navigation so that the correct events, guards, etc. are triggered.
*/
setUpLocationChangeListener(): void;
/** Extracts router-related information from a `PopStateEvent`. */
private extractLocationChangeInfoFromEvent;
/**
* Determines whether two events triggered by the Location subscription are due to the same
* navigation. The location subscription can fire two events (popstate and hashchange) for a
* single navigation. The second one should be ignored, that is, we should not schedule another
* navigation in the Router.
*/
private shouldScheduleNavigation;
/** The current URL. */
get url(): string;
/**
* Returns the current `Navigation` object when the router is navigating,
* and `null` when idle.
*/
getCurrentNavigation(): Navigation | null;
/**
* Resets the route configuration used for navigation and generating links.
*
* @param config The route array for the new configuration.
*
* @usageNotes
*
* ```
* router.resetConfig([
* { path: 'team/:id', component: TeamCmp, children: [
* { path: 'simple', component: SimpleCmp },
* { path: 'user/:name', component: UserCmp }
* ]}
* ]);
* ```
*/
resetConfig(config: Routes): void;
/** @nodoc */
ngOnDestroy(): void;
/** Disposes of the router. */
dispose(): void;
/**
* Appends URL segments to the current URL tree to create a new URL tree.
*
* @param commands An array of URL fragments with which to construct the new URL tree.
* If the path is static, can be the literal URL string. For a dynamic path, pass an array of path
* segments, followed by the parameters for each segment.
* The fragments are applied to the current URL tree or the one provided in the `relativeTo`
* property of the options object, if supplied.
* @param navigationExtras Options that control the navigation strategy.
* @returns The new URL tree.
*
* @usageNotes
*
* ```
* // create /team/33/user/11
* router.createUrlTree(['/team', 33, 'user', 11]);
*
* // create /team/33;expand=true/user/11
* router.createUrlTree(['/team', 33, {expand: true}, 'user', 11]);
*
* // you can collapse static segments like this (this works only with the first passed-in value):
* router.createUrlTree(['/team/33/user', userId]);
*
* // If the first segment can contain slashes, and you do not want the router to split it,
* // you can do the following:
* router.createUrlTree([{segmentPath: '/one/two'}]);
*
* // create /team/33/(user/11//right:chat)
* router.createUrlTree(['/team', 33, {outlets: {primary: 'user/11', right: 'chat'}}]);
*
* // remove the right secondary node
* router.createUrlTree(['/team', 33, {outlets: {primary: 'user/11', right: null}}]);
*
* // assuming the current url is `/team/33/user/11` and the route points to `user/11`
*
* // navigate to /team/33/user/11/details
* router.createUrlTree(['details'], {relativeTo: route});
*
* // navigate to /team/33/user/22
* router.createUrlTree(['../22'], {relativeTo: route});
*
* // navigate to /team/44/user/22
* router.createUrlTree(['../../team/44/user/22'], {relativeTo: route});
*
* Note that a value of `null` or `undefined` for `relativeTo` indicates that the
* tree should be created relative to the root.
* ```
*/
createUrlTree(commands: any[], navigationExtras?: UrlCreationOptions): UrlTree;
/**
* Navigates to a view using an absolute route path.
*
* @param url An absolute path for a defined route. The function does not apply any delta to the
* current URL.
* @param extras An object containing properties that modify the navigation strategy.
*
* @returns A Promise that resolves to 'true' when navigation succeeds,
* to 'false' when navigation fails, or is rejected on error.
*
* @usageNotes
*
* The following calls request navigation to an absolute path.
*
* ```
* router.navigateByUrl("/team/33/user/11");
*
* // Navigate without updating the URL
* router.navigateByUrl("/team/33/user/11", { skipLocationChange: true });
* ```
*
* @see [Routing and Navigation guide](guide/router)
*
*/
navigateByUrl(url: string | UrlTree, extras?: NavigationBehaviorOptions): Promise<boolean>;
/**
* Navigate based on the provided array of commands and a starting point.
* If no starting route is provided, the navigation is absolute.
*
* @param commands An array of URL fragments with which to construct the target URL.
* If the path is static, can be the literal URL string. For a dynamic path, pass an array of path
* segments, followed by the parameters for each segment.
* The fragments are applied to the current URL or the one provided in the `relativeTo` property
* of the options object, if supplied.
* @param extras An options object that determines how the URL should be constructed or
* interpreted.
*
* @returns A Promise that resolves to `true` when navigation succeeds, to `false` when navigation
* fails,
* or is rejected on error.
*
* @usageNotes
*
* The following calls request navigation to a dynamic route path relative to the current URL.
*
* ```
* router.navigate(['team', 33, 'user', 11], {relativeTo: route});
*
* // Navigate without updating the URL, overriding the default behavior
* router.navigate(['team', 33, 'user', 11], {relativeTo: route, skipLocationChange: true});
* ```
*
* @see [Routing and Navigation guide](guide/router)
*
*/
navigate(commands: any[], extras?: NavigationExtras): Promise<boolean>;
/** Serializes a `UrlTree` into a string */
serializeUrl(url: UrlTree): string;
/** Parses a string into a `UrlTree` */
parseUrl(url: string): UrlTree;
/**
* Returns whether the url is activated.
*
* @deprecated
* Use `IsActiveUrlTreeOptions` instead.
*
* - The equivalent `IsActiveUrlTreeOptions` for `true` is
* `{paths: 'exact', queryParams: 'exact', fragment: 'ignored', matrixParams: 'ignored'}`.
* - The equivalent for `false` is
* `{paths: 'subset', queryParams: 'subset', fragment: 'ignored', matrixParams: 'ignored'}`.
*/
isActive(url: string | UrlTree, exact: boolean): boolean;
/**
* Returns whether the url is activated.
*/
isActive(url: string | UrlTree, matchOptions: IsActiveMatchOptions): boolean;
private removeEmptyProps;
private processNavigations;
private scheduleNavigation;
private setBrowserUrl;
private resetStateAndUrl;
private resetUrlToCurrentUrlTree;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<Router, never>;
static ɵprov: ɵngcc0.ɵɵInjectableDeclaration<Router>;
}
/**
* A [DI token](guide/glossary/#di-token) for the router service.
*
* @publicApi
*/
export declare const ROUTER_CONFIGURATION: InjectionToken<ExtraOptions>;
/**
* A [DI token](guide/glossary/#di-token) for the router initializer that
* is called after the app is bootstrapped.
*
* @publicApi
*/
export declare const ROUTER_INITIALIZER: InjectionToken<(compRef: ComponentRef<any>) => void>;
/**
* @description
*
* Provides a way to customize when activated routes get reused.
*
* @publicApi
*/
export declare abstract class RouteReuseStrategy {
/** Determines if this route (and its subtree) should be detached to be reused later */
abstract shouldDetach(route: ActivatedRouteSnapshot): boolean;
/**
* Stores the detached route.
*
* Storing a `null` value should erase the previously stored value.
*/
abstract store(route: ActivatedRouteSnapshot, handle: DetachedRouteHandle | null): void;
/** Determines if this route (and its subtree) should be reattached */
abstract shouldAttach(route: ActivatedRouteSnapshot): boolean;
/** Retrieves the previously stored route */
abstract retrieve(route: ActivatedRouteSnapshot): DetachedRouteHandle | null;
/** Determines if a route should be reused */
abstract shouldReuseRoute(future: ActivatedRouteSnapshot, curr: ActivatedRouteSnapshot): boolean;
}
/**
* Base for events the router goes through, as opposed to events tied to a specific
* route. Fired one time for any given navigation.
*
* The following code shows how a class subscribes to router events.
*
* ```ts
* class MyService {
* constructor(public router: Router, logger: Logger) {
* router.events.pipe(
* filter((e: Event): e is RouterEvent => e instanceof RouterEvent)
* ).subscribe((e: RouterEvent) => {
* logger.log(e.id, e.url);
* });
* }
* }
* ```
*
* @see `Event`
* @see [Router events summary](guide/router-reference#router-events)
* @publicApi
*/
export declare class RouterEvent {
/** A unique ID that the router assigns to every router navigation. */
id: number;
/** The URL that is the destination for this navigation. */
url: string;
constructor(
/** A unique ID that the router assigns to every router navigation. */
id: number,
/** The URL that is the destination for this navigation. */
url: string);
}
/**
* @description
*
* When applied to an element in a template, makes that element a link
* that initiates navigation to a route. Navigation opens one or more routed components
* in one or more `<router-outlet>` locations on the page.
*
* Given a route configuration `[{ path: 'user/:name', component: UserCmp }]`,
* the following creates a static link to the route:
* `<a routerLink="/user/bob">link to user component</a>`
*
* You can use dynamic values to generate the link.
* For a dynamic link, pass an array of path segments,
* followed by the params for each segment.
* For example, `['/team', teamId, 'user', userName, {details: true}]`
* generates a link to `/team/11/user/bob;details=true`.
*
* Multiple static segments can be merged into one term and combined with dynamic segements.
* For example, `['/team/11/user', userName, {details: true}]`
*
* The input that you provide to the link is treated as a delta to the current URL.
* For instance, suppose the current URL is `/user/(box//aux:team)`.
* The link `<a [routerLink]="['/user/jim']">Jim</a>` creates the URL
* `/user/(jim//aux:team)`.
* See {@link Router#createUrlTree createUrlTree} for more information.
*
* @usageNotes
*
* You can use absolute or relative paths in a link, set query parameters,
* control how parameters are handled, and keep a history of navigation states.
*
* ### Relative link paths
*
* The first segment name can be prepended with `/`, `./`, or `../`.
* * If the first segment begins with `/`, the router looks up the route from the root of the
* app.
* * If the first segment begins with `./`, or doesn't begin with a slash, the router
* looks in the children of the current activated route.
* * If the first segment begins with `../`, the router goes up one level in the route tree.
*
* ### Setting and handling query params and fragments
*
* The following link adds a query parameter and a fragment to the generated URL:
*
* ```
* <a [routerLink]="['/user/bob']" [queryParams]="{debug: true}" fragment="education">
* link to user component
* </a>
* ```
* By default, the directive constructs the new URL using the given query parameters.
* The example generates the link: `/user/bob?debug=true#education`.
*
* You can instruct the directive to handle query parameters differently
* by specifying the `queryParamsHandling` option in the link.
* Allowed values are:
*
* - `'merge'`: Merge the given `queryParams` into the current query params.
* - `'preserve'`: Preserve the current query params.
*
* For example:
*
* ```
* <a [routerLink]="['/user/bob']" [queryParams]="{debug: true}" queryParamsHandling="merge">
* link to user component
* </a>
* ```
*
* See {@link UrlCreationOptions.queryParamsHandling UrlCreationOptions#queryParamsHandling}.
*
* ### Preserving navigation history
*
* You can provide a `state` value to be persisted to the browser's
* [`History.state` property](https://developer.mozilla.org/en-US/docs/Web/API/History#Properties).
* For example:
*
* ```
* <a [routerLink]="['/user/bob']" [state]="{tracingId: 123}">
* link to user component
* </a>
* ```
*
* Use {@link Router.getCurrentNavigation() Router#getCurrentNavigation} to retrieve a saved
* navigation-state value. For example, to capture the `tracingId` during the `NavigationStart`
* event:
*
* ```
* // Get NavigationStart events
* router.events.pipe(filter(e => e instanceof NavigationStart)).subscribe(e => {
* const navigation = router.getCurrentNavigation();
* tracingService.trace({id: navigation.extras.state.tracingId});
* });
* ```
*
* @ngModule RouterModule
*
* @publicApi
*/
export declare class RouterLink implements OnChanges {
private router;
private route;
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* @see {@link UrlCreationOptions#queryParams UrlCreationOptions#queryParams}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
queryParams?: Params | null;
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* @see {@link UrlCreationOptions#fragment UrlCreationOptions#fragment}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
fragment?: string;
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* @see {@link UrlCreationOptions#queryParamsHandling UrlCreationOptions#queryParamsHandling}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
queryParamsHandling?: QueryParamsHandling | null;
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* @see {@link UrlCreationOptions#preserveFragment UrlCreationOptions#preserveFragment}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
preserveFragment: boolean;
/**
* Passed to {@link Router#navigateByUrl Router#navigateByUrl} as part of the
* `NavigationBehaviorOptions`.
* @see {@link NavigationBehaviorOptions#skipLocationChange NavigationBehaviorOptions#skipLocationChange}
* @see {@link Router#navigateByUrl Router#navigateByUrl}
*/
skipLocationChange: boolean;
/**
* Passed to {@link Router#navigateByUrl Router#navigateByUrl} as part of the
* `NavigationBehaviorOptions`.
* @see {@link NavigationBehaviorOptions#replaceUrl NavigationBehaviorOptions#replaceUrl}
* @see {@link Router#navigateByUrl Router#navigateByUrl}
*/
replaceUrl: boolean;
/**
* Passed to {@link Router#navigateByUrl Router#navigateByUrl} as part of the
* `NavigationBehaviorOptions`.
* @see {@link NavigationBehaviorOptions#state NavigationBehaviorOptions#state}
* @see {@link Router#navigateByUrl Router#navigateByUrl}
*/
state?: {
[k: string]: any;
};
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* Specify a value here when you do not want to use the default value
* for `routerLink`, which is the current activated route.
* Note that a value of `undefined` here will use the `routerLink` default.
* @see {@link UrlCreationOptions#relativeTo UrlCreationOptions#relativeTo}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
relativeTo?: ActivatedRoute | null;
private commands;
private preserve;
constructor(router: Router, route: ActivatedRoute, tabIndex: string, renderer: Renderer2, el: ElementRef);
/** @nodoc */
ngOnChanges(changes: SimpleChanges): void;
/**
* Commands to pass to {@link Router#createUrlTree Router#createUrlTree}.
* - **array**: commands to pass to {@link Router#createUrlTree Router#createUrlTree}.
* - **string**: shorthand for array of commands with just the string, i.e. `['/route']`
* - **null|undefined**: shorthand for an empty array of commands, i.e. `[]`
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
set routerLink(commands: any[] | string | null | undefined);
/** @nodoc */
onClick(): boolean;
get urlTree(): UrlTree;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<RouterLink, [null, null, { attribute: "tabindex"; }, null, null]>;
static ɵdir: ɵngcc0.ɵɵDirectiveDeclaration<RouterLink, ":not(a):not(area)[routerLink]", never, { "routerLink": "routerLink"; "queryParams": "queryParams"; "fragment": "fragment"; "queryParamsHandling": "queryParamsHandling"; "preserveFragment": "preserveFragment"; "skipLocationChange": "skipLocationChange"; "replaceUrl": "replaceUrl"; "state": "state"; "relativeTo": "relativeTo"; }, {}, never>;
}
/**
*
* @description
*
* Tracks whether the linked route of an element is currently active, and allows you
* to specify one or more CSS classes to add to the element when the linked route
* is active.
*
* Use this directive to create a visual distinction for elements associated with an active route.
* For example, the following code highlights the word "Bob" when the router
* activates the associated route:
*
* ```
* <a routerLink="/user/bob" routerLinkActive="active-link">Bob</a>
* ```
*
* Whenever the URL is either '/user' or '/user/bob', the "active-link" class is
* added to the anchor tag. If the URL changes, the class is removed.
*
* You can set more than one class using a space-separated string or an array.
* For example:
*
* ```
* <a routerLink="/user/bob" routerLinkActive="class1 class2">Bob</a>
* <a routerLink="/user/bob" [routerLinkActive]="['class1', 'class2']">Bob</a>
* ```
*
* To add the classes only when the URL matches the link exactly, add the option `exact: true`:
*
* ```
* <a routerLink="/user/bob" routerLinkActive="active-link" [routerLinkActiveOptions]="{exact:
* true}">Bob</a>
* ```
*
* To directly check the `isActive` status of the link, assign the `RouterLinkActive`
* instance to a template variable.
* For example, the following checks the status without assigning any CSS classes:
*
* ```
* <a routerLink="/user/bob" routerLinkActive #rla="routerLinkActive">
* Bob {{ rla.isActive ? '(already open)' : ''}}
* </a>
* ```
*
* You can apply the `RouterLinkActive` directive to an ancestor of linked elements.
* For example, the following sets the active-link class on the `<div>` parent tag
* when the URL is either '/user/jim' or '/user/bob'.
*
* ```
* <div routerLinkActive="active-link" [routerLinkActiveOptions]="{exact: true}">
* <a routerLink="/user/jim">Jim</a>
* <a routerLink="/user/bob">Bob</a>
* </div>
* ```
*
* @ngModule RouterModule
*
* @publicApi
*/
export declare class RouterLinkActive implements OnChanges, OnDestroy, AfterContentInit {
private router;
private element;
private renderer;
private readonly cdr;
private link?;
private linkWithHref?;
links: QueryList<RouterLink>;
linksWithHrefs: QueryList<RouterLinkWithHref>;
private classes;
private routerEventsSubscription;
private linkInputChangesSubscription?;
readonly isActive: boolean;
/**
* Options to configure how to determine if the router link is active.
*
* These options are passed to the `Router.isActive()` function.
*
* @see Router.isActive
*/
routerLinkActiveOptions: {
exact: boolean;
} | IsActiveMatchOptions;
constructor(router: Router, element: ElementRef, renderer: Renderer2, cdr: ChangeDetectorRef, link?: RouterLink | undefined, linkWithHref?: RouterLinkWithHref | undefined);
/** @nodoc */
ngAfterContentInit(): void;
private subscribeToEachLinkOnChanges;
set routerLinkActive(data: string[] | string);
/** @nodoc */
ngOnChanges(changes: SimpleChanges): void;
/** @nodoc */
ngOnDestroy(): void;
private update;
private isLinkActive;
private hasActiveLinks;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<RouterLinkActive, [null, null, null, null, { optional: true; }, { optional: true; }]>;
static ɵdir: ɵngcc0.ɵɵDirectiveDeclaration<RouterLinkActive, "[routerLinkActive]", ["routerLinkActive"], { "routerLinkActiveOptions": "routerLinkActiveOptions"; "routerLinkActive": "routerLinkActive"; }, {}, ["links", "linksWithHrefs"]>;
}
/**
* @description
*
* Lets you link to specific routes in your app.
*
* See `RouterLink` for more information.
*
* @ngModule RouterModule
*
* @publicApi
*/
export declare class RouterLinkWithHref implements OnChanges, OnDestroy {
private router;
private route;
private locationStrategy;
target: string;
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* @see {@link UrlCreationOptions#queryParams UrlCreationOptions#queryParams}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
queryParams?: Params | null;
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* @see {@link UrlCreationOptions#fragment UrlCreationOptions#fragment}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
fragment?: string;
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* @see {@link UrlCreationOptions#queryParamsHandling UrlCreationOptions#queryParamsHandling}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
queryParamsHandling?: QueryParamsHandling | null;
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* @see {@link UrlCreationOptions#preserveFragment UrlCreationOptions#preserveFragment}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
preserveFragment: boolean;
/**
* Passed to {@link Router#navigateByUrl Router#navigateByUrl} as part of the
* `NavigationBehaviorOptions`.
* @see {@link NavigationBehaviorOptions#skipLocationChange NavigationBehaviorOptions#skipLocationChange}
* @see {@link Router#navigateByUrl Router#navigateByUrl}
*/
skipLocationChange: boolean;
/**
* Passed to {@link Router#navigateByUrl Router#navigateByUrl} as part of the
* `NavigationBehaviorOptions`.
* @see {@link NavigationBehaviorOptions#replaceUrl NavigationBehaviorOptions#replaceUrl}
* @see {@link Router#navigateByUrl Router#navigateByUrl}
*/
replaceUrl: boolean;
/**
* Passed to {@link Router#navigateByUrl Router#navigateByUrl} as part of the
* `NavigationBehaviorOptions`.
* @see {@link NavigationBehaviorOptions#state NavigationBehaviorOptions#state}
* @see {@link Router#navigateByUrl Router#navigateByUrl}
*/
state?: {
[k: string]: any;
};
/**
* Passed to {@link Router#createUrlTree Router#createUrlTree} as part of the
* `UrlCreationOptions`.
* Specify a value here when you do not want to use the default value
* for `routerLink`, which is the current activated route.
* Note that a value of `undefined` here will use the `routerLink` default.
* @see {@link UrlCreationOptions#relativeTo UrlCreationOptions#relativeTo}
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
relativeTo?: ActivatedRoute | null;
private commands;
private subscription;
private preserve;
href: string;
constructor(router: Router, route: ActivatedRoute, locationStrategy: LocationStrategy);
/**
* Commands to pass to {@link Router#createUrlTree Router#createUrlTree}.
* - **array**: commands to pass to {@link Router#createUrlTree Router#createUrlTree}.
* - **string**: shorthand for array of commands with just the string, i.e. `['/route']`
* - **null|undefined**: shorthand for an empty array of commands, i.e. `[]`
* @see {@link Router#createUrlTree Router#createUrlTree}
*/
set routerLink(commands: any[] | string | null | undefined);
/** @nodoc */
ngOnChanges(changes: SimpleChanges): any;
/** @nodoc */
ngOnDestroy(): any;
/** @nodoc */
onClick(button: number, ctrlKey: boolean, shiftKey: boolean, altKey: boolean, metaKey: boolean): boolean;
private updateTargetUrlAndHref;
get urlTree(): UrlTree;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<RouterLinkWithHref, never>;
static ɵdir: ɵngcc0.ɵɵDirectiveDeclaration<RouterLinkWithHref, "a[routerLink],area[routerLink]", never, { "routerLink": "routerLink"; "target": "target"; "queryParams": "queryParams"; "fragment": "fragment"; "queryParamsHandling": "queryParamsHandling"; "preserveFragment": "preserveFragment"; "skipLocationChange": "skipLocationChange"; "replaceUrl": "replaceUrl"; "state": "state"; "relativeTo": "relativeTo"; }, {}, never>;
}
/**
* @description
*
* Adds directives and providers for in-app navigation among views defined in an application.
* Use the Angular `Router` service to declaratively specify application states and manage state
* transitions.
*
* You can import this NgModule multiple times, once for each lazy-loaded bundle.
* However, only one `Router` service can be active.
* To ensure this, there are two ways to register routes when importing this module:
*
* * The `forRoot()` method creates an `NgModule` that contains all the directives, the given
* routes, and the `Router` service itself.
* * The `forChild()` method creates an `NgModule` that contains all the directives and the given
* routes, but does not include the `Router` service.
*
* @see [Routing and Navigation guide](guide/router) for an
* overview of how the `Router` service should be used.
*
* @publicApi
*/
export declare class RouterModule {
constructor(guard: any, router: Router);
/**
* Creates and configures a module with all the router providers and directives.
* Optionally sets up an application listener to perform an initial navigation.
*
* When registering the NgModule at the root, import as follows:
*
* ```
* @NgModule({
* imports: [RouterModule.forRoot(ROUTES)]
* })
* class MyNgModule {}
* ```
*
* @param routes An array of `Route` objects that define the navigation paths for the application.
* @param config An `ExtraOptions` configuration object that controls how navigation is performed.
* @return The new `NgModule`.
*
*/
static forRoot(routes: Routes, config?: ExtraOptions): ModuleWithProviders<RouterModule>;
/**
* Creates a module with all the router directives and a provider registering routes,
* without creating a new Router service.
* When registering for submodules and lazy-loaded submodules, create the NgModule as follows:
*
* ```
* @NgModule({
* imports: [RouterModule.forChild(ROUTES)]
* })
* class MyNgModule {}
* ```
*
* @param routes An array of `Route` objects that define the navigation paths for the submodule.
* @return The new NgModule.
*
*/
static forChild(routes: Routes): ModuleWithProviders<RouterModule>;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<RouterModule, [{ optional: true; }, { optional: true; }]>;
static ɵmod: ɵngcc0.ɵɵNgModuleDeclaration<RouterModule, [typeof RouterOutlet, typeof RouterLink, typeof RouterLinkWithHref, typeof RouterLinkActive, typeof ɵEmptyOutletComponent], never, [typeof RouterOutlet, typeof RouterLink, typeof RouterLinkWithHref, typeof RouterLinkActive, typeof ɵEmptyOutletComponent]>;
static ɵinj: ɵngcc0.ɵɵInjectorDeclaration<RouterModule>;
}
/**
* @description
*
* Acts as a placeholder that Angular dynamically fills based on the current router state.
*
* Each outlet can have a unique name, determined by the optional `name` attribute.
* The name cannot be set or changed dynamically. If not set, default value is "primary".
*
* ```
* <router-outlet></router-outlet>
* <router-outlet name='left'></router-outlet>
* <router-outlet name='right'></router-outlet>
* ```
*
* Named outlets can be the targets of secondary routes.
* The `Route` object for a secondary route has an `outlet` property to identify the target outlet:
*
* `{path: <base-path>, component: <component>, outlet: <target_outlet_name>}`
*
* Using named outlets and secondary routes, you can target multiple outlets in
* the same `RouterLink` directive.
*
* The router keeps track of separate branches in a navigation tree for each named outlet and
* generates a representation of that tree in the URL.
* The URL for a secondary route uses the following syntax to specify both the primary and secondary
* routes at the same time:
*
* `http://base-path/primary-route-path(outlet-name:route-path)`
*
* A router outlet emits an activate event when a new component is instantiated,
* and a deactivate event when a component is destroyed.
*
* ```
* <router-outlet
* (activate)='onActivate($event)'
* (deactivate)='onDeactivate($event)'></router-outlet>
* ```
*
* @see [Routing tutorial](guide/router-tutorial-toh#named-outlets "Example of a named
* outlet and secondary route configuration").
* @see `RouterLink`
* @see `Route`
* @ngModule RouterModule
*
* @publicApi
*/
export declare class RouterOutlet implements OnDestroy, OnInit, RouterOutletContract {
private parentContexts;
private location;
private resolver;
private changeDetector;
private activated;
private _activatedRoute;
private name;
activateEvents: EventEmitter<any>;
deactivateEvents: EventEmitter<any>;
constructor(parentContexts: ChildrenOutletContexts, location: ViewContainerRef, resolver: ComponentFactoryResolver, name: string, changeDetector: ChangeDetectorRef);
/** @nodoc */
ngOnDestroy(): void;
/** @nodoc */
ngOnInit(): void;
get isActivated(): boolean;
/**
* @returns The currently activated component instance.
* @throws An error if the outlet is not activated.
*/
get component(): Object;
get activatedRoute(): ActivatedRoute;
get activatedRouteData(): Data;
/**
* Called when the `RouteReuseStrategy` instructs to detach the subtree
*/
detach(): ComponentRef<any>;
/**
* Called when the `RouteReuseStrategy` instructs to re-attach a previously detached subtree
*/
attach(ref: ComponentRef<any>, activatedRoute: ActivatedRoute): void;
deactivate(): void;
activateWith(activatedRoute: ActivatedRoute, resolver: ComponentFactoryResolver | null): void;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<RouterOutlet, [null, null, null, { attribute: "name"; }, null]>;
static ɵdir: ɵngcc0.ɵɵDirectiveDeclaration<RouterOutlet, "router-outlet", ["outlet"], {}, { "activateEvents": "activate"; "deactivateEvents": "deactivate"; }, never>;
}
/**
* An interface that defines the contract for developing a component outlet for the `Router`.
*
* An outlet acts as a placeholder that Angular dynamically fills based on the current router state.
*
* A router outlet should register itself with the `Router` via
* `ChildrenOutletContexts#onChildOutletCreated` and unregister with
* `ChildrenOutletContexts#onChildOutletDestroyed`. When the `Router` identifies a matched `Route`,
* it looks for a registered outlet in the `ChildrenOutletContexts` and activates it.
*
* @see `ChildrenOutletContexts`
* @publicApi
*/
export declare interface RouterOutletContract {
/**
* Whether the given outlet is activated.
*
* An outlet is considered "activated" if it has an active component.
*/
isActivated: boolean;
/** The instance of the activated component or `null` if the outlet is not activated. */
component: Object | null;
/**
* The `Data` of the `ActivatedRoute` snapshot.
*/
activatedRouteData: Data;
/**
* The `ActivatedRoute` for the outlet or `null` if the outlet is not activated.
*/
activatedRoute: ActivatedRoute | null;
/**
* Called by the `Router` when the outlet should activate (create a component).
*/
activateWith(activatedRoute: ActivatedRoute, resolver: ComponentFactoryResolver | null): void;
/**
* A request to destroy the currently activated component.
*
* When a `RouteReuseStrategy` indicates that an `ActivatedRoute` should be removed but stored for
* later re-use rather than destroyed, the `Router` will call `detach` instead.
*/
deactivate(): void;
/**
* Called when the `RouteReuseStrategy` instructs to detach the subtree.
*
* This is similar to `deactivate`, but the activated component should _not_ be destroyed.
* Instead, it is returned so that it can be reattached later via the `attach` method.
*/
detach(): ComponentRef<unknown>;
/**
* Called when the `RouteReuseStrategy` instructs to re-attach a previously detached subtree.
*/
attach(ref: ComponentRef<unknown>, activatedRoute: ActivatedRoute): void;
}
/**
* The preloader optimistically loads all router configurations to
* make navigations into lazily-loaded sections of the application faster.
*
* The preloader runs in the background. When the router bootstraps, the preloader
* starts listening to all navigation events. After every such event, the preloader
* will check if any configurations can be loaded lazily.
*
* If a route is protected by `canLoad` guards, the preloaded will not load it.
*
* @publicApi
*/
export declare class RouterPreloader implements OnDestroy {
private router;
private injector;
private preloadingStrategy;
private loader;
private subscription?;
constructor(router: Router, moduleLoader: NgModuleFactoryLoader, compiler: Compiler, injector: Injector, preloadingStrategy: PreloadingStrategy);
setUpPreloading(): void;
preload(): Observable<any>;
/** @nodoc */
ngOnDestroy(): void;
private processRoutes;
private preloadConfig;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<RouterPreloader, never>;
static ɵprov: ɵngcc0.ɵɵInjectableDeclaration<RouterPreloader>;
}
/**
* Represents the state of the router as a tree of activated routes.
*
* @usageNotes
*
* Every node in the route tree is an `ActivatedRoute` instance
* that knows about the "consumed" URL segments, the extracted parameters,
* and the resolved data.
* Use the `ActivatedRoute` properties to traverse the tree from any node.
*
* The following fragment shows how a component gets the root node
* of the current state to establish its own route tree:
*
* ```
* @Component({templateUrl:'template.html'})
* class MyComponent {
* constructor(router: Router) {
* const state: RouterState = router.routerState;
* const root: ActivatedRoute = state.root;
* const child = root.firstChild;
* const id: Observable<string> = child.params.map(p => p.id);
* //...
* }
* }
* ```
*
* @see `ActivatedRoute`
* @see [Getting route information](guide/router#getting-route-information)
*
* @publicApi
*/
export declare class RouterState extends ɵangular_packages_router_router_m<ActivatedRoute> {
/** The current snapshot of the router state */
snapshot: RouterStateSnapshot;
toString(): string;
}
/**
* @description
*
* Represents the state of the router at a moment in time.
*
* This is a tree of activated route snapshots. Every node in this tree knows about
* the "consumed" URL segments, the extracted parameters, and the resolved data.
*
* The following example shows how a component is initialized with information
* from the snapshot of the root node's state at the time of creation.
*
* ```
* @Component({templateUrl:'template.html'})
* class MyComponent {
* constructor(router: Router) {
* const state: RouterState = router.routerState;
* const snapshot: RouterStateSnapshot = state.snapshot;
* const root: ActivatedRouteSnapshot = snapshot.root;
* const child = root.firstChild;
* const id: Observable<string> = child.params.map(p => p.id);
* //...
* }
* }
* ```
*
* @publicApi
*/
export declare class RouterStateSnapshot extends ɵangular_packages_router_router_m<ActivatedRouteSnapshot> {
/** The url from which this snapshot was created */
url: string;
toString(): string;
}
/**
* The [DI token](guide/glossary/#di-token) for a router configuration.
*
* `ROUTES` is a low level API for router configuration via dependency injection.
*
* We recommend that in almost all cases to use higher level APIs such as `RouterModule.forRoot()`,
* `RouterModule.forChild()`, `provideRoutes`, or `Router.resetConfig()`.
*
* @publicApi
*/
export declare const ROUTES: InjectionToken<Route[][]>;
/**
* Represents a route configuration for the Router service.
* An array of `Route` objects, used in `Router.config` and for nested route configurations
* in `Route.children`.
*
* @see `Route`
* @see `Router`
* @see [Router configuration guide](guide/router-reference#configuration)
* @publicApi
*/
export declare type Routes = Route[];
/**
* An event triggered when routes are recognized.
*
* @publicApi
*/
export declare class RoutesRecognized extends RouterEvent {
/** @docsNotRequired */
urlAfterRedirects: string;
/** @docsNotRequired */
state: RouterStateSnapshot;
constructor(
/** @docsNotRequired */
id: number,
/** @docsNotRequired */
url: string,
/** @docsNotRequired */
urlAfterRedirects: string,
/** @docsNotRequired */
state: RouterStateSnapshot);
/** @docsNotRequired */
toString(): string;
}
/**
*
* A policy for when to run guards and resolvers on a route.
*
* @see [Route.runGuardsAndResolvers](api/router/Route#runGuardsAndResolvers)
* @publicApi
*/
export declare type RunGuardsAndResolvers = 'pathParamsChange' | 'pathParamsOrQueryParamsChange' | 'paramsChange' | 'paramsOrQueryParamsChange' | 'always' | ((from: ActivatedRouteSnapshot, to: ActivatedRouteSnapshot) => boolean);
/**
|
*/
export declare class Scroll {
/** @docsNotRequired */
readonly routerEvent: NavigationEnd;
/** @docsNotRequired */
readonly position: [number, number] | null;
/** @docsNotRequired */
readonly anchor: string | null;
constructor(
/** @docsNotRequired */
routerEvent: NavigationEnd,
/** @docsNotRequired */
position: [number, number] | null,
/** @docsNotRequired */
anchor: string | null);
toString(): string;
}
/**
* @description
*
* Options that modify the `Router` URL.
* Supply an object containing any of these properties to a `Router` navigation function to
* control how the target URL should be constructed.
*
* @see [Router.navigate() method](api/router/Router#navigate)
* @see [Router.createUrlTree() method](api/router/Router#createurltree)
* @see [Routing and Navigation guide](guide/router)
*
* @publicApi
*/
export declare interface UrlCreationOptions {
/**
* Specifies a root URI to use for relative navigation.
*
* For example, consider the following route configuration where the parent route
* has two children.
*
* ```
* [{
* path: 'parent',
* component: ParentComponent,
* children: [{
* path: 'list',
* component: ListComponent
* },{
* path: 'child',
* component: ChildComponent
* }]
* }]
* ```
*
* The following `go()` function navigates to the `list` route by
* interpreting the destination URI as relative to the activated `child` route
*
* ```
* @Component({...})
* class ChildComponent {
* constructor(private router: Router, private route: ActivatedRoute) {}
*
* go() {
* this.router.navigate(['../list'], { relativeTo: this.route });
* }
* }
* ```
*
* A value of `null` or `undefined` indicates that the navigation commands should be applied
* relative to the root.
*/
relativeTo?: ActivatedRoute | null;
/**
* Sets query parameters to the URL.
*
* ```
* // Navigate to /results?page=1
* this.router.navigate(['/results'], { queryParams: { page: 1 } });
* ```
*/
queryParams?: Params | null;
/**
* Sets the hash fragment for the URL.
*
* ```
* // Navigate to /results#top
* this.router.navigate(['/results'], { fragment: 'top' });
* ```
*/
fragment?: string;
/**
* How to handle query parameters in the router link for the next navigation.
* One of:
* * `preserve` : Preserve current parameters.
* * `merge` : Merge new with current parameters.
*
* The "preserve" option discards any new query params:
* ```
* // from /view1?page=1 to/view2?page=1
* this.router.navigate(['/view2'], { queryParams: { page: 2 }, queryParamsHandling: "preserve"
* });
* ```
* The "merge" option appends new query params to the params from the current URL:
* ```
* // from /view1?page=1 to/view2?page=1&otherKey=2
* this.router.navigate(['/view2'], { queryParams: { otherKey: 2 }, queryParamsHandling: "merge"
* });
* ```
* In case of a key collision between current parameters and those in the `queryParams` object,
* the new value is used.
*
*/
queryParamsHandling?: QueryParamsHandling | null;
/**
* When true, preserves the URL fragment for the next navigation
*
* ```
* // Preserve fragment from /results#top to /view#top
* this.router.navigate(['/view'], { preserveFragment: true });
* ```
*/
preserveFragment?: boolean;
}
/**
* @description
*
* Provides a way to migrate AngularJS applications to Angular.
*
* @publicApi
*/
export declare abstract class UrlHandlingStrategy {
/**
* Tells the router if this URL should be processed.
*
* When it returns true, the router will execute the regular navigation.
* When it returns false, the router will set the router state to an empty state.
* As a result, all the active components will be destroyed.
*
*/
abstract shouldProcessUrl(url: UrlTree): boolean;
/**
* Extracts the part of the URL that should be handled by the router.
* The rest of the URL will remain untouched.
*/
abstract extract(url: UrlTree): UrlTree;
/**
* Merges the URL fragment with the rest of the URL.
*/
abstract merge(newUrlPart: UrlTree, rawUrl: UrlTree): UrlTree;
}
/**
* A function for matching a route against URLs. Implement a custom URL matcher
* for `Route.matcher` when a combination of `path` and `pathMatch`
* is not expressive enough. Cannot be used together with `path` and `pathMatch`.
*
* The function takes the following arguments and returns a `UrlMatchResult` object.
* * *segments* : An array of URL segments.
* * *group* : A segment group.
* * *route* : The route to match against.
*
* The following example implementation matches HTML files.
*
* ```
* export function htmlFiles(url: UrlSegment[]) {
* return url.length === 1 && url[0].path.endsWith('.html') ? ({consumed: url}) : null;
* }
*
* export const routes = [{ matcher: htmlFiles, component: AnyComponent }];
* ```
*
* @publicApi
*/
export declare type UrlMatcher = (segments: UrlSegment[], group: UrlSegmentGroup, route: Route) => UrlMatchResult | null;
/**
* Represents the result of matching URLs with a custom matching function.
*
* * `consumed` is an array of the consumed URL segments.
* * `posParams` is a map of positional parameters.
*
* @see `UrlMatcher()`
* @publicApi
*/
export declare type UrlMatchResult = {
consumed: UrlSegment[];
posParams?: {
[name: string]: UrlSegment;
};
};
/**
* @description
*
* Represents a single URL segment.
*
* A UrlSegment is a part of a URL between the two slashes. It contains a path and the matrix
* parameters associated with the segment.
*
* @usageNotes
* ### Example
*
* ```
* @Component({templateUrl:'template.html'})
* class MyComponent {
* constructor(router: Router) {
* const tree: UrlTree = router.parseUrl('/team;id=33');
* const g: UrlSegmentGroup = tree.root.children[PRIMARY_OUTLET];
* const s: UrlSegment[] = g.segments;
* s[0].path; // returns 'team'
* s[0].parameters; // returns {id: 33}
* }
* }
* ```
*
* @publicApi
*/
export declare class UrlSegment {
/** The path part of a URL segment */
path: string;
/** The matrix parameters associated with a segment */
parameters: {
[name: string]: string;
};
constructor(
/** The path part of a URL segment */
path: string,
/** The matrix parameters associated with a segment */
parameters: {
[name: string]: string;
});
get parameterMap(): ParamMap;
/** @docsNotRequired */
toString(): string;
}
/**
* @description
*
* Represents the parsed URL segment group.
*
* See `UrlTree` for more information.
*
* @publicApi
*/
export declare class UrlSegmentGroup {
/** The URL segments of this group. See `UrlSegment` for more information */
segments: UrlSegment[];
/** The list of children of this group */
children: {
[key: string]: UrlSegmentGroup;
};
/** The parent node in the url tree */
parent: UrlSegmentGroup | null;
constructor(
/** The URL segments of this group. See `UrlSegment` for more information */
segments: UrlSegment[],
/** The list of children of this group */
children: {
[key: string]: UrlSegmentGroup;
});
/** Whether the segment has child segments */
hasChildren(): boolean;
/** Number of child segments */
get numberOfChildren(): number;
/** @docsNotRequired */
toString(): string;
}
/**
* @description
*
* Serializes and deserializes a URL string into a URL tree.
*
* The url serialization strategy is customizable. You can
* make all URLs case insensitive by providing a custom UrlSerializer.
*
* See `DefaultUrlSerializer` for an example of a URL serializer.
*
* @publicApi
*/
export declare abstract class UrlSerializer {
/** Parse a url into a `UrlTree` */
abstract parse(url: string): UrlTree;
/** Converts a `UrlTree` into a url */
abstract serialize(tree: UrlTree): string;
}
/**
* @description
*
* Represents the parsed URL.
*
* Since a router state is a tree, and the URL is nothing but a serialized state, the URL is a
* serialized tree.
* UrlTree is a data structure that provides a lot of affordances in dealing with URLs
*
* @usageNotes
* ### Example
*
* ```
* @Component({templateUrl:'template.html'})
* class MyComponent {
* constructor(router: Router) {
* const tree: UrlTree =
* router.parseUrl('/team/33/(user/victor//support:help)?debug=true#fragment');
* const f = tree.fragment; // return 'fragment'
* const q = tree.queryParams; // returns {debug: 'true'}
* const g: UrlSegmentGroup = tree.root.children[PRIMARY_OUTLET];
* const s: UrlSegment[] = g.segments; // returns 2 segments 'team' and '33'
* g.children[PRIMARY_OUTLET].segments; // returns 2 segments 'user' and 'victor'
* g.children['support'].segments; // return 1 segment 'help'
* }
* }
* ```
*
* @publicApi
*/
export declare class UrlTree {
/** The root segment group of the URL tree */
root: UrlSegmentGroup;
/** The query params of the URL */
queryParams: Params;
/** The fragment of the URL */
fragment: string | null;
get queryParamMap(): ParamMap;
/** @docsNotRequired */
toString(): string;
}
/**
* @publicApi
*/
export declare const VERSION: Version;
/**
* @docsNotRequired
*/
export declare const ɵangular_packages_router_router_a: InjectionToken<void>;
export declare function ɵangular_packages_router_router_b(): NgProbeToken;
export declare function ɵangular_packages_router_router_c(router: Router, viewportScroller: ViewportScroller, config: ExtraOptions): ɵangular_packages_router_router_o;
export declare function ɵangular_packages_router_router_d(platformLocationStrategy: PlatformLocation, baseHref: string, options?: ExtraOptions): HashLocationStrategy | PathLocationStrategy;
export declare function ɵangular_packages_router_router_e(router: Router): any;
export declare function ɵangular_packages_router_router_f(urlSerializer: UrlSerializer, contexts: ChildrenOutletContexts, location: Location, injector: Injector, loader: NgModuleFactoryLoader, compiler: Compiler, config: Route[][], opts?: ExtraOptions, urlHandlingStrategy?: UrlHandlingStrategy, routeReuseStrategy?: RouteReuseStrategy): Router;
export declare function ɵangular_packages_router_router_g(router: Router): ActivatedRoute;
/**
* Router initialization requires two steps:
*
* First, we start the navigation in a `APP_INITIALIZER` to block the bootstrap if
* a resolver or a guard executes asynchronously.
*
* Next, we actually run activation in a `BOOTSTRAP_LISTENER`, using the
* `afterPreactivation` hook provided by the router.
* The router navigation starts, reaches the point when preactivation is done, and then
* pauses. It waits for the hook to be resolved. We then resolve it only in a bootstrap listener.
*/
export declare class ɵangular_packages_router_router_h {
private injector;
private initNavigation;
private resultOfPreactivationDone;
constructor(injector: Injector);
appInitializer(): Promise<any>;
bootstrapListener(bootstrappedComponentRef: ComponentRef<any>): void;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<ɵangular_packages_router_router_h, never>;
static ɵprov: ɵngcc0.ɵɵInjectableDeclaration<ɵangular_packages_router_router_h>;
}
export declare function ɵangular_packages_router_router_i(r: ɵangular_packages_router_router_h): () => Promise<any>;
export declare function ɵangular_packages_router_router_j(r: ɵangular_packages_router_router_h): (bootstrappedComponentRef: ComponentRef<any>) => void;
export declare function ɵangular_packages_router_router_k(): ReadonlyArray<Provider>;
export declare class ɵangular_packages_router_router_m<T> {
constructor(root: ɵangular_packages_router_router_n<T>);
get root(): T;
}
export declare class ɵangular_packages_router_router_n<T> {
value: T;
children: ɵangular_packages_router_router_n<T>[];
constructor(value: T, children: ɵangular_packages_router_router_n<T>[]);
toString(): string;
}
export declare class ɵangular_packages_router_router_o implements OnDestroy {
private router;
/** @docsNotRequired */ readonly viewportScroller: ViewportScroller;
private options;
private routerEventsSubscription;
private scrollEventsSubscription;
private lastId;
private lastSource;
private restoredId;
private store;
constructor(router: Router,
/** @docsNotRequired */ viewportScroller: ViewportScroller, options?: {
scrollPositionRestoration?: 'disabled' | 'enabled' | 'top';
anchorScrolling?: 'disabled' | 'enabled';
});
init(): void;
private createScrollEvents;
private consumeScrollEvents;
private scheduleScrollEvent;
/** @nodoc */
ngOnDestroy(): void;
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<ɵangular_packages_router_router_o, never>;
static ɵprov: ɵngcc0.ɵɵInjectableDeclaration<ɵangular_packages_router_router_o>;
}
export declare function ɵassignExtraOptionsToRouter(opts: ExtraOptions, router: Router): void;
/**
* This component is used internally within the router to be a placeholder when an empty
* router-outlet is needed. For example, with a config such as:
*
* `{path: 'parent', outlet: 'nav', children: [...]}`
*
* In order to render, there needs to be a component on this config, which will default
* to this `EmptyOutletComponent`.
*/
declare class ɵEmptyOutletComponent {
static ɵfac: ɵngcc0.ɵɵFactoryDeclaration<ɵEmptyOutletComponent, never>;
static ɵcmp: ɵngcc0.ɵɵComponentDeclaration<ɵEmptyOutletComponent, "ng-component", never, {}, {}, never, never>;
}
export { ɵEmptyOutletComponent }
export { ɵEmptyOutletComponent as ɵangular_packages_router_router_l }
/**
* Flattens single-level nested arrays.
*/
export declare function ɵflatten<T>(arr: T[][]): T[];
export declare const ɵROUTER_PROVIDERS: Provider[];
export { }
//# sourceMappingURL=router.d.ts.map
|
* An event triggered by scrolling.
*
* @publicApi
|
views.py
|
import datetime
from django.shortcuts import render, get_object_or_404
# Create your views here.
from rest_framework.generics import ListAPIView
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK
from rest_framework.views import APIView
from content.models import Content
from content.serializers import ContentSerializer, ContentsSerializer, ContentsDetailSerializer
from lesson.models import Lesson, Contents
class ContentCreateAPIView(APIView):
def post(self, request, *args, **kwargs):
user = request.user
data = request.data
lesson = get_object_or_404(Lesson, pk=kwargs.get("pk"))
content = {'text': data['text'], "sub_title": data['title']}
if "file" in request.FILES:
file = request.FILES['file']
content['file'] = file
serializer = ContentSerializer(data=content)
serializer.is_valid(raise_exception=True)
serializer.save()
place = Contents.objects.filter(lesson=lesson.id).count() + 1
contents = {
"lesson": kwargs.get("pk"),
"content": serializer.instance.pk,
"owner": user.pk,
"place": place,
}
serializer = ContentsSerializer(data=contents)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=HTTP_200_OK)
class ContentsListAPIView(ListAPIView):
serializer_class = ContentsDetailSerializer
queryset = Contents.objects.all()
def get_queryset(self):
return Contents.objects.filter(lesson=self.kwargs.get("pk"), is_active=True).order_by("place")
class ContentsAPIView(APIView):
def put(self, request, *args, **kwargs):
user = request.user
data = request.data
pk = kwargs['pk']
contents = get_object_or_404(Contents, pk=pk)
if not contents.is_authorized(request.user):
return Response(status=401)
content = contents.content
content_data = {'text': data['text'], "last_edited_at": datetime.datetime.now(), "sub_title": data['title']}
if "file" in request.FILES:
file = request.FILES['file']
content_data['file'] = file
serializer = ContentSerializer(content, data=content_data)
serializer.is_valid(raise_exception=True)
serializer.save()
contents_data = {
"lesson": contents.lesson_id,
"content": content.pk,
"owner": contents.owner.pk,
"last_edited_at": datetime.datetime.now(),
}
serializer = ContentsSerializer(contents, data=contents_data)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=HTTP_200_OK)
def get(self, request, *args, **kwargs):
pk = kwargs['pk']
contents = get_object_or_404(Contents, pk=pk)
serializer = ContentsDetailSerializer(contents)
return Response(serializer.data, status=HTTP_200_OK)
class ContentsInactivateAPIView(APIView):
def put(self, request, *args, **kwargs):
pk = kwargs['pk']
content = get_object_or_404(Contents, pk=pk)
if not content.is_authorized(request.user):
return Response(status=401)
content.is_active = False
content.save()
return Response(status=HTTP_200_OK)
class ContentsActivateAPIView(APIView):
def put(self, request, *args, **kwargs):
pk = kwargs['pk']
content = get_object_or_404(Contents, pk=pk)
if not content.is_authorized(request.user):
return Response(status=401)
content.is_active = True
content.save()
return Response(status=HTTP_200_OK)
class TeacherContentsListAPIView(ListAPIView):
serializer_class = ContentsDetailSerializer
queryset = Contents.objects.all()
def get_queryset(self):
return Contents.objects.filter(lesson=self.kwargs.get("pk")).order_by("place")
class ChangePlaceAPIView(APIView):
def
|
(self, request, *args, **kwargs):
pk = kwargs['pk']
content = get_object_or_404(Contents, pk=pk)
if not content.is_authorized(request.user):
return Response(status=401)
place=request.data['place']
lte=content.place
gte=place
change=1
if lte < gte:
lte=place
gte=content.place
change=-1
contents=Contents.objects.filter(place__gte=gte).filter(place__lte=lte)
for l in contents:
l.place=l.place+change
l.save()
content.place=place
content.save()
return Response(status=HTTP_200_OK)
|
put
|
main.rs
|
#[macro_use] extern crate log;
#[macro_use] extern crate bitflags;
use actix_web::{get,post, web, App, HttpServer, Responder, middleware, Error, HttpResponse};
use actix_files as fs;
use std::sync::atomic::{AtomicU64, Ordering};
use lib_fbuffers;
use lib_comm;
//#[derive(Default)]
struct State{
|
seq_h:AtomicU64,
seq_l:AtomicU64,
}
impl Default for State{
fn default() -> Self {
State{seq_h:AtomicU64::new(0), seq_l:AtomicU64::new(0)}
}
}
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
std::env::set_var("RUST_BACKTRACE", "1");
//load from command line...
let mut opt = lib_data::OptConf::default();
//setup logger...
match opt.verbosity{
0 => std::env::set_var("RUST_LOG", "warn"),
1 => std::env::set_var("RUST_LOG", "info"),
2 => std::env::set_var("RUST_LOG", "debug"),
_ => std::env::set_var("RUST_LOG", "trace"),
}
//std::env::set_var("RUST_LOG", "actix_web=debug");
env_logger::init();
//load from file...
opt.load(env!("CARGO_PKG_NAME"));
opt.validate().unwrap();
let client = lib_db::get_conn(&opt);
let url = format!("{}:{}", opt.http_ip.unwrap(), opt.http_port.unwrap());
println!("starting server at {}...", url);
let state = web::Data::new(State::default());
HttpServer::new(move ||
App::new()
.wrap(
middleware::Logger::default()
.exclude("/health")
)
.data(client.clone())
.app_data(state.clone())
.service(chart)
.service(data)
.service(health)
.service(fs::Files::new("chart", "./www/static"))
)
.bind(&url)?
.run()
.await
}
use futures::StreamExt;
#[post("/data")]
async fn data(mut body: web::Payload, state: web::Data<State>, db: web::Data<lib_db::Pool>) -> Result<HttpResponse, Error>{
let mut bytes = web::BytesMut::new();
while let Some(item) = body.next().await {
bytes.extend_from_slice(&item?);
}
let vbytes = bytes.to_vec();
bytes.freeze();
let envelop = lib_comm::get_root_as_message(&vbytes);
{
let seq = state.seq_h.swap(envelop.seq(), Ordering::Relaxed);
if seq >= envelop.seq(){
println!("Envelop: out of order cur:{} >= inc:{}, gap dn:{}", seq, envelop.seq(), seq - envelop.seq());
}else if (seq+1) != envelop.seq() {
println!("Envelop: out of order cur:{} >= inc:{}, gap up:{}", seq, envelop.seq(), envelop.seq() - seq);
}
}
info!("sensor:{}, mseq:{}, uptime:{}, message_type:{}",
envelop.sensor_id().unwrap(),
envelop.seq(),
envelop.uptime(),
envelop.ptype()
);
let payload = envelop.payload().unwrap();
let vpayload:Vec<u8> = payload.into();
//traceroute
if envelop.ptype() == 1{
//this will be offloaded to plugin
let msg = lib_fbuffers::traceroute_generated::get_root_as_message(&vpayload);
{
let seq = state.seq_l.swap(msg.seq(), Ordering::Relaxed);
if seq >= msg.seq(){
println!("Message: out of order cur:{} >= inc:{}, gap dn:{}", seq, msg.seq(), seq - msg.seq());
}else if (seq+1) != msg.seq() {
println!("Message: out of order cur:{} >= inc:{}, gap up:{}", seq, msg.seq(), msg.seq() - seq);
}
}
if let Some(routes) = msg.routes(){
routes.iter().for_each(|r|{
let route = lib_data::AppTraceRoute::new(
r.route_id(),
std::net::Ipv4Addr::from(r.src()),
std::net::Ipv4Addr::from(r.dst())
);
//println!("+++++++++++{}", route);
lib_db::add_route_l(&route);
// if let Ok(mut conn) = db.get_connection(){
// lib_db::add_route(&mut conn, &route);
// }else{
// println!("Is database running? Error connecting to db...");
// }
})
}else if let Some(hops) = msg.hops(){
hops.iter().for_each(|r|{
let hop = lib_data::AppHop::new(
r.route_id(),
std::net::Ipv4Addr::from(r.src()),
std::net::Ipv4Addr::from(r.this()),
r.ttl(),
r.rtt()
);
lib_db::add_hop_l(&hop);
// //println!("==========\t{}", hop);
// if let Ok(mut conn) = db.get_connection(){
// lib_db::add_hop(&mut conn, &hop);
// }else{
// println!("Is database running? Error connecting to db...");
// }
})
}
}else if envelop.ptype() == 2{
let msg = lib_fbuffers::allipv4_generated::get_root_as_message(&vpayload);
if let Some(packets) = msg.packets(){
packets.iter().for_each(|p|{
println!("[{}] {} {}->{} [{}] {} {:?}",
p.proto().unwrap(),
p.id(),
std::net::Ipv4Addr::from(p.src()),
std::net::Ipv4Addr::from(p.dst()),
p.len(),
bitflags_to_string(p.flags()),
p.opts().unwrap(),
);
})
}
}
Ok(HttpResponse::Ok().finish())
}
#[get("/health")]
async fn health() -> impl Responder {
"UP"
.with_header("Content-Type", "text/plain; charset=utf-8")
.with_status(actix_web::http::StatusCode::OK)
}
#[get("/")]
async fn chart() -> Result<HttpResponse, Error> {
// response
Ok(HttpResponse::build(actix_web::http::StatusCode::OK)
.content_type("text/html; charset=utf-8")
.body(include_str!("../www/static/chart.html")))
}
use pnet::packet::tcp::TcpFlags;
bitflags! {
struct Flags: u16 {
const FIN = TcpFlags::FIN; //1
const SYN = TcpFlags::SYN; //2
const RST = TcpFlags::RST; //4
const PSH = TcpFlags::PSH; //8
const ACK = TcpFlags::ACK; //16
const URG = TcpFlags::URG; //32
const CWR = TcpFlags::CWR; //
const ECE = TcpFlags::ECE; //
}
}
fn bitflags_to_string(flags:u16) -> String{
if let Some(s) = Flags::from_bits(flags) {
return format!("{:?}", s);
}
String::new()
}
| |
createBucket.py
|
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import boto3
import os, logging, json
from botocore.retries import bucket
#from pkg_resources import Version
from crhelper import CfnResource
from botocore.exceptions import ClientError
# declare helper and logging
helper = CfnResource()
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# get env variables
DrRegion = os.environ['DrRegion']
S3BucketName = os.environ['S3BucketName']
OriginalLambdaRoleName = os.environ['OriginalLambdaRoleName']
OriginalPolicyName = os.environ['OriginalPolicyName']
def
|
(event, context):
helper(event, context)
@helper.create
@helper.update
def create_resources(event, context):
s3 = boto3.client('s3', region_name = DrRegion)
account_id = event['ResourceProperties']['AccountID']
#format bucket name
dr_bucket_name = str(f'disasterrecovery-{S3BucketName}')
#Create S3 bucket for DR
response = create_bucket(s3, dr_bucket_name, DrRegion)
logger.info(f'response from bucket creation{response}')
#enable bucket versioning
response = enable_bucket_versioning(s3, dr_bucket_name)
logger.info(f'response to setting bucket versioning: {response}')
bucket_arn = str(f'arn:aws:s3:::{dr_bucket_name}')
helper.Data['bucket_arn'] = bucket_arn
def create_bucket(s3, dr_bucket_name, DrRegion):
try:
response = s3.create_bucket(
Bucket = dr_bucket_name,
CreateBucketConfiguration = {
'LocationConstraint': DrRegion
}
)
return response
except ClientError as e:
logger.info(e.response)
def enable_bucket_versioning(s3, dr_bucket_name):
try:
response = s3.put_bucket_versioning(
Bucket = dr_bucket_name,
VersioningConfiguration={
'Status': 'Enabled'
}
)
return response
except ClientError as e:
logger.info(e.response)
@helper.delete
def delete_bucket(event,context):
try:
bucket_name = str(f'disasterrecovery-{S3BucketName}')
s3_resource = boto3.resource('s3', region_name = DrRegion)
s3 = boto3.client('s3', region_name = DrRegion)
bucket = s3_resource.Bucket(bucket_name)
#Delete bucket objects 1st
logger.info(f'Deleting bucket objects from bucket:{bucket_name}')
bucket.objects.all().delete()
logger.info(f'objects deleted successfully from bucket:{bucket_name}')
#Delete bucket 2nd
logger.info(f'Deleting s3 bucket {bucket_name}')
response = s3.delete_bucket(
Bucket = bucket_name)
logger.info(response)
logger.info(f'deleted s3 bucket {bucket_name}')
return response
except ClientError as e:
logger.info(e.response)
|
lambda_handler
|
test_mech_driver.py
|
# Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils
from neutron.common import utils as n_utils
from neutron.db import ovn_revision_numbers_db as db_rev
from neutron.tests.functional import base
class TestPortBinding(base.TestOVNFunctionalBase):
def setUp(self):
super(TestPortBinding, self).setUp()
self.ovs_host = 'ovs-host'
self.dpdk_host = 'dpdk-host'
self.invalid_dpdk_host = 'invalid-host'
self.vhu_mode = 'server'
self.add_fake_chassis(self.ovs_host)
self.add_fake_chassis(
self.dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,dpdkvhostuser'})
self.add_fake_chassis(
self.invalid_dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,geneve,vxlan'})
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.deserialize(self.fmt, res)
def _create_or_update_port(self, port_id=None, hostname=None):
if port_id is None:
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if hostname:
port_data['port']['device_id'] = uuidutils.generate_uuid()
port_data['port']['device_owner'] = 'compute:None'
port_data['port']['binding:host_id'] = hostname
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
port_id = p['port']['id']
else:
port_data = {
'port': {'device_id': uuidutils.generate_uuid(),
'device_owner': 'compute:None',
'binding:host_id': hostname}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.deserialize(self.fmt, port_res)
return port_id
def _verify_vif_details(self, port_id, expected_host_name,
expected_vif_type, expected_vif_details):
port_req = self.new_show_request('ports', port_id)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
self.assertEqual(expected_host_name, p['port']['binding:host_id'])
self.assertEqual(expected_vif_type, p['port']['binding:vif_type'])
self.assertEqual(expected_vif_details,
p['port']['binding:vif_details'])
def test_port_binding_create_port(self):
port_id = self._create_or_update_port(hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
def test_port_binding_update_port(self):
port_id = self._create_or_update_port()
self._verify_vif_details(port_id, '', 'unbound', {})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
class TestPortBindingOverTcp(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'tcp'
# TODO(mjozefcz): This test class hangs during execution.
class TestPortBindingOverSsl(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'ssl'
class TestNetworkMTUUpdate(base.TestOVNFunctionalBase):
def setUp(self):
super(TestNetworkMTUUpdate, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def test_update_network_mtu(self):
mtu_value = self.n1['network']['mtu'] - 100
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertNotEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
def test_no_update_network_mtu(self):
mtu_value = self.n1['network']['mtu']
base_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
second_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
self.assertEqual(
base_revision.updated_at,
second_revision.updated_at)
@mock.patch('neutron.plugins.ml2.drivers.ovn.mech_driver.'
'ovsdb.ovn_client.OVNClient._is_virtual_port_supported',
lambda *args: True)
class TestVirtualPorts(base.TestOVNFunctionalBase):
def setUp(self):
super(TestVirtualPorts, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def _create_port(self, fixed_ip=None, allowed_address=None):
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if fixed_ip:
port_data['port']['fixed_ips'] = [{'ip_address': fixed_ip}]
if allowed_address:
port_data['port']['allowed_address_pairs'] = [
{'ip_address': allowed_address}]
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(201, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _update_allowed_address_pair(self, port_id, data):
port_data = {
'port': {'allowed_address_pairs': data}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(200, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _set_allowed_address_pair(self, port_id, ip):
return self._update_allowed_address_pair(port_id, [{'ip_address': ip}])
def _unset_allowed_address_pair(self, port_id):
return self._update_allowed_address_pair(port_id, [])
def _find_port_row(self, port_id):
cmd = self.nb_api.db_find_rows(
'Logical_Switch_Port', ('name', '=', port_id))
rows = cmd.execute(check_error=True)
return rows[0] if rows else None
def _is_ovn_port_type(self, port_id, port_type):
ovn_vport = self._find_port_row(port_id)
return port_type == ovn_vport.type
def _check_port_type(self, port_id, type):
check = functools.partial(self._is_ovn_port_type, port_id, type)
n_utils.wait_until_true(check, timeout=10)
def test_virtual_port_created_before(self):
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Create the master port with the VIP address already set in
# the allowed_address_pairs field
master = self._create_port(allowed_address=virt_ip)
# Assert the virt port has the type virtual and master is set
# as parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Create the backport parent port
backup = self._create_port(allowed_address=virt_ip)
# Assert the virt port now also includes the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_update_address_pairs(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Assert the virt port does not yet have the type virtual (no
# address pairs were set yet)
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
# Set the virt IP to the allowed address pairs of the master port
self._set_allowed_address_pair(master['id'], virt_ip)
# Assert the virt port is now updated
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Set the virt IP to the allowed address pairs of the backup port
self._set_allowed_address_pair(backup['id'], virt_ip)
# Assert the virt port now includes the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Remove the address pairs from the master port
self._unset_allowed_address_pair(master['id'])
# Assert the virt port now only has the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Remove the address pairs from the backup port
self._unset_allowed_address_pair(backup['id'])
|
# port options are cleared
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
def test_virtual_port_created_after(self):
master = self._create_port(fixed_ip='10.0.0.11')
backup = self._create_port(fixed_ip='10.0.0.12')
virt_ip = '10.0.0.55'
# Set the virt IP to the master and backup ports *before* creating
# the virtual port
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
virt_port = self._create_port(fixed_ip=virt_ip)
# Assert the virtual port has been created with the
# right type and parents
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_delete_parents(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Assert the virt port does not yet have the type virtual (no
# address pairs were set yet)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
# Set allowed address paris to the master and backup ports
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
# Assert the virtual port is correct
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Delete the backup port
self._delete('ports', backup['id'])
# Assert the virt port now only has the master port as a parent
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Delete the master port
self._delete('ports', master['id'])
# Assert the virt port is not type virtual anymore and the virtual
# port options are cleared
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
|
# Assert the virt port is not type virtual anymore and the virtual
|
task.go
|
package task
type Artifact interface {
Path() string
}
type artifact struct {
path string
}
func
|
(path string) Artifact {
return &artifact{path}
}
func (a *artifact) Path() string {
return a.path
}
type Task interface {
Name() string
Run(Artifact) (Artifact, error)
}
|
NewArtifact
|
prefix.rs
|
//! RDF-related languages (e.g. Turtle, SPARQL) often use prefixes to shorten IRIs.
//! This crate provides generic traits to handle prefix maps.
use crate::term::{SimpleIri, TTerm, TermKind};
use mownstr::MownStr;
use std::borrow::Borrow;
use std::collections::HashMap;
use std::hash::Hash;
/// A prefix map associates prefixes (`&'a str`) to namespaces.
pub trait PrefixMap<'a> {
|
/// Return the IRI associated to this prefix, if any.
///
/// It must be guaranteed that the returned term is indeed an IRI.
fn get_namespace(&self, prefix: &str) -> Option<&Self::Term>;
/// Return a prefix-suffix pair describing the given IRI, if any.
///
/// If `iri` is another kind of term, implementations MUST return None.
fn get_prefixed_pair<'s, T: TTerm>(&'s self, iri: &'s T) -> Option<(&'s str, MownStr<'s>)>;
}
impl<'a, PF> PrefixMap<'a> for HashMap<PF, SimpleIri<'a>>
where
PF: Borrow<str> + Eq + Hash + 'a,
{
type Term = SimpleIri<'a>;
fn get_namespace(&self, prefix: &str) -> Option<&Self::Term> {
self.get(prefix)
}
fn get_prefixed_pair<'s, T: TTerm>(&'s self, iri: &'s T) -> Option<(&'s str, MownStr<'s>)> {
match iri.kind() {
TermKind::Iri => {
let raw_value = iri.value_raw();
let len = raw_value.len();
self.iter()
.filter_map(|(prefix, ns)| {
let ns = ns.value_raw();
if raw_value.starts_with(ns.bytes()) {
Some((prefix.borrow(), raw_value.slice(ns.len()..)))
} else {
None
}
})
.max_by_key(|(_, suffix)| len - suffix.len())
}
_ => None,
}
}
}
#[cfg(test)]
mod test {
use super::*;
use test_case::test_case;
#[test_case("http://something.else.com/", None, None; "something else")]
#[test_case("http://schema.org/Person", None, Some(("s", "Person")); "s:Person")]
#[test_case("http://example.org/", None, Some(("", "")); "single colon")]
#[test_case("http://example.org/a/c", None, Some(("a", "c")); "a:c")]
#[test_case("http://example.org/a/b#c", None, Some(("ab", "c")); "b:c")]
#[test_case("http://example.org/a#c", None, Some(("", "a#c")); ":a#c")]
fn get_prefixed_pair(ns: &str, sf: Option<&str>, expected: Option<(&str, &str)>) {
let mut map = HashMap::new();
map.insert("s", SimpleIri::new_unchecked("http://schema.org/", None));
map.insert(
"ab",
SimpleIri::new_unchecked("http://example.org/", Some("a/b#")),
);
map.insert(
"a",
SimpleIri::new_unchecked("http://example.org/", Some("a/")),
);
map.insert("", SimpleIri::new_unchecked("http://example.org/", None));
let expected = expected.map(|(pf, sf)| (pf, MownStr::from(sf)));
let iri = SimpleIri::new_unchecked(ns, sf);
assert_eq!(map.get_prefixed_pair(&iri), expected);
}
}
|
/// The type of term returned by this prefix map.
type Term: TTerm + 'a;
|
container_internal_unsupported.go
|
// +build !linux
package libpod
import (
"context"
spec "github.com/opencontainers/runtime-spec/specs-go"
|
return ErrNotImplemented
}
func (c *Container) unmountSHM(mount string) error {
return ErrNotImplemented
}
func (c *Container) prepare() (err error) {
return ErrNotImplemented
}
func (c *Container) cleanupNetwork() error {
return ErrNotImplemented
}
func (c *Container) generateSpec(ctx context.Context) (*spec.Spec, error) {
return nil, ErrNotImplemented
}
func (c *Container) checkpoint(ctx context.Context, options ContainerCheckpointOptions) error {
return ErrNotImplemented
}
func (c *Container) restore(ctx context.Context, options ContainerCheckpointOptions) error {
return ErrNotImplemented
}
func (c *Container) copyOwnerAndPerms(source, dest string) error {
return nil
}
|
)
func (c *Container) mountSHM(shmOptions string) error {
|
clone.ts
|
import { type } from '../../helpers'
export function clone<A>(obj: A): A {
|
function _clone(value: any, refFrom: Array<any>, refTo: Array<any>, deep: boolean): any {
function copy(copiedValue: any) {
const length = refFrom.length
let i = 0
for (; i < length; ++i)
if (value === refFrom[i])
return refTo[i]
refFrom[i + 1] = value
refTo[i + 1] = copiedValue
for (const key in value)
{
if (!value.hasOwnProperty(key))
continue
copiedValue[key] = deep ?
_clone(value[key] as any, refFrom, refTo, true) : value[key]
}
return copiedValue
}
switch (type(value)) {
case 'Object': return copy({})
case 'Array': return copy([])
case 'Date': return new Date(value.valueOf())
case 'RegExp': return cloneRegexp(value)
default: return value
}
}
function cloneRegexp(pattern: RegExp): RegExp {
return new RegExp(
pattern.source,
(pattern.global ? 'g' : '') +
(pattern.ignoreCase ? 'i' : '') +
(pattern.multiline ? 'm' : '') +
(pattern.sticky ? 'y' : '') +
(pattern.unicode ? 'u' : ''),
)
}
|
return _clone(obj, [], [], true)
}
|
resource_api_proxy_deployment.go
|
package apigee
import (
"fmt"
"log"
"strconv"
"strings"
"github.com/gofrs/uuid"
"github.com/hashicorp/terraform/helper/schema"
"github.com/zambien/go-apigee-edge"
)
func resourceApiProxyDeployment() *schema.Resource {
return &schema.Resource{
Create: resourceApiProxyDeploymentCreate,
Read: resourceApiProxyDeploymentRead,
Update: resourceApiProxyDeploymentUpdate,
Delete: resourceApiProxyDeploymentDelete,
Importer: &schema.ResourceImporter{
State: resourceApiProxyDeploymentImport,
},
Schema: map[string]*schema.Schema{
"proxy_name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"org": {
Type: schema.TypeString,
Optional: true,
Deprecated: "org is not required, the value from the provider is used.",
},
"env": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"revision": {
Type: schema.TypeString,
Required: true,
},
"delay": {
Type: schema.TypeInt,
Optional: true,
Default: 0,
},
"override": {
Type: schema.TypeBool,
Optional: true,
Default: false,
},
},
}
}
func resourceApiProxyDeploymentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
log.Print("[DEBUG] resourceApiProxyDeploymentImport START")
client := meta.(*apigee.EdgeClient)
splits := strings.Split(d.Id(), "_")
if len(splits) < 2 {
return []*schema.ResourceData{}, fmt.Errorf("[ERR] Wrong format of resource: %s. Please follow '{name}_{env}_deployment'", d.Id())
}
nameOffset := len(splits[len(splits)-1]) + len(splits[len(splits)-2])
envOffset := len(splits[len(splits)-1])
name := d.Id()[:(len(d.Id())-nameOffset)-2]
IDEnv := d.Id()[len(name)+1 : (len(d.Id())-envOffset)-1]
deployment, _, err := client.Proxies.GetDeployments(name)
if err != nil {
log.Printf("[DEBUG] resourceApiProxyDeploymentImport. Error getting deployment api: %v", err)
return nil, nil
}
d.Set("org", deployment.Organization)
d.Set("proxy_name", deployment.Name)
d.Set("env", IDEnv)
return []*schema.ResourceData{d}, nil
}
func resourceApiProxyDeploymentRead(d *schema.ResourceData, meta interface{}) (e error)
|
func resourceApiProxyDeploymentCreate(d *schema.ResourceData, meta interface{}) error {
log.Print("[DEBUG] resourceApiProxyDeploymentCreate START")
client := meta.(*apigee.EdgeClient)
proxy_name := d.Get("proxy_name").(string)
env := d.Get("env").(string)
rev_int, _ := strconv.Atoi(d.Get("revision").(string))
rev := apigee.Revision(rev_int)
delay := int(d.Get("delay").(int))
override := bool(d.Get("override").(bool))
if d.Get("revision").(string) == "latest" {
// deploy latest
rev_int, err := getLatestRevision(client, proxy_name)
rev = apigee.Revision(rev_int)
if err != nil {
return fmt.Errorf("[ERROR] resourceApiProxyDeploymentUpdate error getting latest revision: %v", err)
}
}
proxyDep, _, err := client.Proxies.Deploy(proxy_name, env, rev, delay, override)
if err != nil {
if strings.Contains(err.Error(), "conflicts with existing deployment path") {
//create, fail, update
log.Printf("[ERROR] resourceApiProxyDeploymentCreate error deploying: %s", err.Error())
log.Print("[DEBUG] resourceApiProxyDeploymentCreate something got out of sync... maybe someone messing around in apigee directly. Terraform OVERRIDE!!!")
resourceApiProxyDeploymentUpdate(d, meta)
} else {
log.Printf("[ERROR] resourceApiProxyDeploymentCreate error deploying: %s", err.Error())
return fmt.Errorf("[ERROR] resourceApiProxyDeploymentCreate error deploying: %s", err.Error())
}
}
id, _ := uuid.NewV4()
d.SetId(id.String())
d.Set("revision", proxyDep.Revision.String())
log.Printf("[DEBUG] resourceApiProxyDeploymentUpdate Deployed revision %d of %s", rev, proxy_name)
return resourceApiProxyDeploymentRead(d, meta)
}
func resourceApiProxyDeploymentUpdate(d *schema.ResourceData, meta interface{}) error {
log.Print("[DEBUG] resourceApiProxyDeploymentUpdate START")
client := meta.(*apigee.EdgeClient)
proxy_name := d.Get("proxy_name").(string)
env := d.Get("env").(string)
delay := int(d.Get("delay").(int))
override := bool(d.Get("override").(bool))
//We must set delay and override here if not set.
if delay == 0 {
delay = 15 //seconds
}
if override == false {
override = true
}
rev_int, _ := strconv.Atoi(d.Get("revision").(string))
rev := apigee.Revision(rev_int)
if d.Get("revision").(string) == "latest" {
// deploy latest
rev_int, err := getLatestRevision(client, proxy_name)
rev = apigee.Revision(rev_int)
if err != nil {
return fmt.Errorf("[ERROR] resourceApiProxyDeploymentUpdate error getting latest revision: %v", err)
}
}
_, _, err := client.Proxies.ReDeploy(proxy_name, env, rev, delay, override)
if err != nil {
log.Printf("[ERROR] resourceApiProxyDeploymentUpdate error redeploying: %s", err.Error())
if strings.Contains(err.Error(), " is already deployed into environment ") {
return resourceApiProxyDeploymentRead(d, meta)
}
return fmt.Errorf("[ERROR] resourceApiProxyDeploymentUpdate error redeploying: %s", err.Error())
}
log.Printf("[DEBUG] resourceApiProxyDeploymentUpdate Deployed revision %d of %s", rev, proxy_name)
return resourceApiProxyDeploymentRead(d, meta)
}
func resourceApiProxyDeploymentDelete(d *schema.ResourceData, meta interface{}) error {
log.Print("[DEBUG] resourceApiProxyDeploymentDelete START")
client := meta.(*apigee.EdgeClient)
proxy_name := d.Get("proxy_name").(string)
env := d.Get("env").(string)
rev_int, _ := strconv.Atoi(d.Get("revision").(string))
rev := apigee.Revision(rev_int)
if d.Get("revision").(string) == "latest" {
// deploy latest
rev_int, err := getLatestRevision(client, proxy_name)
rev = apigee.Revision(rev_int)
if err != nil {
return fmt.Errorf("[ERROR] resourceApiProxyDeploymentDelete error getting latest revision: %v", err)
}
}
_, _, err := client.Proxies.Undeploy(proxy_name, env, rev)
if err != nil {
log.Printf("[ERROR] resourceApiProxyDeploymentDelete error undeploying: %s", err.Error())
return fmt.Errorf("[ERROR] resourceApiProxyDeploymentDelete error undeploying: %s", err.Error())
}
log.Printf("[DEBUG] resourceApiProxyDeploymentDelete Deleted revision %d of %s", rev, proxy_name)
return resourceApiProxyDeploymentRead(d, meta)
}
func getLatestRevision(client *apigee.EdgeClient, proxyName string) (int, error) {
proxy, _, err := client.Proxies.Get(proxyName)
if err != nil {
return -1, fmt.Errorf("[ERROR] resourceApiProxyRead error reading proxies: %s", err.Error())
}
latestRev, err := strconv.Atoi(proxy.Revisions[len(proxy.Revisions)-1].String())
if err != nil {
return -1, fmt.Errorf("[ERROR] resourceApiProxyRead error reading proxies: %s", err.Error())
}
return latestRev, nil
}
|
{
log.Print("[DEBUG] resourceApiProxyDeploymentRead START")
log.Printf("[DEBUG] resourceApiProxyDeploymentRead proxy_name: %#v", d.Get("proxy_name").(string))
client := meta.(*apigee.EdgeClient)
found := false
matchedRevision := "0"
if deployments, _, err := client.Proxies.GetDeployments(d.Get("proxy_name").(string)); err != nil {
log.Printf("[ERROR] resourceApiProxyDeploymentRead error getting deployments: %s", err.Error())
if strings.Contains(err.Error(), "404 ") {
log.Printf("[DEBUG] resourceApiProxyDeploymentRead 404 encountered. Removing state for deployment proxy_name: %#v", d.Get("proxy_name").(string))
d.SetId("")
return nil
} else {
return fmt.Errorf("[ERROR] resourceApiProxyDeploymentRead error reading deployments: %s", err.Error())
}
} else {
log.Printf("[DEBUG] resourceApiProxyDeploymentRead deployments call fired for proxy_name: %#v", d.Get("proxy_name").(string))
for _, environment := range deployments.Environments {
log.Printf("[DEBUG] resourceApiProxyDeploymentRead checking revisions in deployed environment: %#v for expected environment: %#v\n", environment.Name, d.Get("env").(string))
if environment.Name == d.Get("env").(string) {
//We don't break. Always get the last one if there are multiple deployments.
for _, revision := range environment.Revision {
found = true
log.Printf("[DEBUG] resourceApiProxyDeploymentRead checking deployed revision: %#v for expected revision: %#v\n", revision.Number.String(), d.Get("revision").(string))
if d.Get("revision").(string) != "latest" && d.Get("revision").(string) == revision.Number.String() {
matchedRevision = revision.Number.String()
break
} else {
matchedRevision = revision.Number.String()
}
}
}
}
}
if found {
if d.Get("revision").(string) == "latest" {
d.SetId(matchedRevision)
} else {
d.Set("revision", matchedRevision)
}
log.Printf("[DEBUG] resourceApiProxyDeploymentRead - deployment found. Revision is: %#v", d.Get("revision").(string))
} else {
log.Print("[DEBUG] resourceApiProxyDeploymentRead - no deployment found")
d.SetId("")
}
return nil
}
|
fields_request_builder.go
|
package fields
import (
ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9 "github.com/microsoft/kiota/abstractions/go"
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87 "github.com/microsoftgraph/msgraph-sdk-go/models/microsoft/graph"
)
// FieldsRequestBuilder builds and executes requests for operations under \drives\{drive-id}\list\items\{listItem-id}\fields
type FieldsRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string;
// The request adapter to use to execute the requests.
requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter;
// Url template to use to build the URL for the current request builder
urlTemplate string;
}
// FieldsRequestBuilderDeleteOptions options for Delete
type FieldsRequestBuilderDeleteOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// FieldsRequestBuilderGetOptions options for Get
type FieldsRequestBuilderGetOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Request query parameters
Q *FieldsRequestBuilderGetQueryParameters;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// FieldsRequestBuilderGetQueryParameters the values of the columns set on this list item.
type FieldsRequestBuilderGetQueryParameters struct {
// Expand related entities
Expand []string;
// Select properties to be returned
Select_escaped []string;
}
// FieldsRequestBuilderPatchOptions options for Patch
type FieldsRequestBuilderPatchOptions struct {
//
Body *i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.FieldValueSet;
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// NewFieldsRequestBuilderInternal instantiates a new FieldsRequestBuilder and sets the default values.
func NewFieldsRequestBuilderInternal(pathParameters map[string]string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*FieldsRequestBuilder)
|
// NewFieldsRequestBuilder instantiates a new FieldsRequestBuilder and sets the default values.
func NewFieldsRequestBuilder(rawUrl string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*FieldsRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewFieldsRequestBuilderInternal(urlParams, requestAdapter)
}
// CreateDeleteRequestInformation the values of the columns set on this list item.
func (m *FieldsRequestBuilder) CreateDeleteRequestInformation(options *FieldsRequestBuilderDeleteOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.DELETE
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreateGetRequestInformation the values of the columns set on this list item.
func (m *FieldsRequestBuilder) CreateGetRequestInformation(options *FieldsRequestBuilderGetOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.GET
if options != nil && options.Q != nil {
requestInfo.AddQueryParameters(*(options.Q))
}
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreatePatchRequestInformation the values of the columns set on this list item.
func (m *FieldsRequestBuilder) CreatePatchRequestInformation(options *FieldsRequestBuilderPatchOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.PATCH
requestInfo.SetContentFromParsable(m.requestAdapter, "application/json", options.Body)
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// Delete the values of the columns set on this list item.
func (m *FieldsRequestBuilder) Delete(options *FieldsRequestBuilderDeleteOptions)(error) {
requestInfo, err := m.CreateDeleteRequestInformation(options);
if err != nil {
return err
}
err = m.requestAdapter.SendNoContentAsync(*requestInfo, nil)
if err != nil {
return err
}
return nil
}
// Get the values of the columns set on this list item.
func (m *FieldsRequestBuilder) Get(options *FieldsRequestBuilderGetOptions)(*i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.FieldValueSet, error) {
requestInfo, err := m.CreateGetRequestInformation(options);
if err != nil {
return nil, err
}
res, err := m.requestAdapter.SendAsync(*requestInfo, func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.NewFieldValueSet() }, nil)
if err != nil {
return nil, err
}
return res.(*i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.FieldValueSet), nil
}
// Patch the values of the columns set on this list item.
func (m *FieldsRequestBuilder) Patch(options *FieldsRequestBuilderPatchOptions)(error) {
requestInfo, err := m.CreatePatchRequestInformation(options);
if err != nil {
return err
}
err = m.requestAdapter.SendNoContentAsync(*requestInfo, nil)
if err != nil {
return err
}
return nil
}
|
{
m := &FieldsRequestBuilder{
}
m.urlTemplate = "{+baseurl}/drives/{drive_id}/list/items/{listItem_id}/fields{?select,expand}";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = pathParameters;
m.requestAdapter = requestAdapter;
return m
}
|
data_utils.py
|
import os.path
from typing import Union, Optional
import torch
_TEST_DIR_PATH = os.path.realpath(os.path.join(os.path.dirname(__file__), ".."))
def get_asset_path(*paths):
"""Return full path of a test asset"""
return os.path.join(_TEST_DIR_PATH, "assets", *paths)
def convert_tensor_encoding(
tensor: torch.tensor,
dtype: torch.dtype,
):
"""Convert input tensor with values between -1 and 1 to integer encoding
Args:
tensor: input tensor, assumed between -1 and 1
dtype: desired output tensor dtype
Returns:
Tensor: shape of (n_channels, sample_rate * duration)
"""
if dtype == torch.int32:
tensor *= (tensor > 0) * 2147483647 + (tensor < 0) * 2147483648
if dtype == torch.int16:
tensor *= (tensor > 0) * 32767 + (tensor < 0) * 32768
if dtype == torch.uint8:
tensor *= (tensor > 0) * 127 + (tensor < 0) * 128
tensor += 128
tensor = tensor.to(dtype)
return tensor
def get_whitenoise(
*,
sample_rate: int = 16000,
duration: float = 1, # seconds
n_channels: int = 1,
seed: int = 0,
dtype: Union[str, torch.dtype] = "float32",
device: Union[str, torch.device] = "cpu",
channels_first=True,
scale_factor: float = 1,
):
|
def get_sinusoid(
*,
frequency: float = 300,
sample_rate: int = 16000,
duration: float = 1, # seconds
n_channels: int = 1,
dtype: Union[str, torch.dtype] = "float32",
device: Union[str, torch.device] = "cpu",
channels_first: bool = True,
):
"""Generate pseudo audio data with sine wave.
Args:
frequency: Frequency of sine wave
sample_rate: Sampling rate
duration: Length of the resulting Tensor in seconds.
n_channels: Number of channels
dtype: Torch dtype
device: device
Returns:
Tensor: shape of (n_channels, sample_rate * duration)
"""
if isinstance(dtype, str):
dtype = getattr(torch, dtype)
pie2 = 2 * 3.141592653589793
end = pie2 * frequency * duration
num_frames = int(sample_rate * duration)
# Randomize the initial phase. (except the first channel)
theta0 = pie2 * torch.randn(n_channels, 1, dtype=torch.float32, device=device)
theta0[0, :] = 0
theta = torch.linspace(0, end, num_frames, dtype=torch.float32, device=device)
theta = theta0 + theta
tensor = torch.sin(theta, out=None)
if not channels_first:
tensor = tensor.t()
return convert_tensor_encoding(tensor, dtype)
def get_spectrogram(
waveform,
*,
n_fft: int = 2048,
hop_length: Optional[int] = None,
win_length: Optional[int] = None,
window: Optional[torch.Tensor] = None,
center: bool = True,
pad_mode: str = "reflect",
power: Optional[float] = None,
):
"""Generate a spectrogram of the given Tensor
Args:
n_fft: The number of FFT bins.
hop_length: Stride for sliding window. default: ``n_fft // 4``.
win_length: The size of window frame and STFT filter. default: ``n_fft``.
winwdow: Window function. default: Hann window
center: Pad the input sequence if True. See ``torch.stft`` for the detail.
pad_mode: Padding method used when center is True. Default: "reflect".
power: If ``None``, raw spectrogram with complex values are returned,
otherwise the norm of the spectrogram is returned.
"""
hop_length = hop_length or n_fft // 4
win_length = win_length or n_fft
window = torch.hann_window(win_length, device=waveform.device) if window is None else window
spec = torch.stft(
waveform,
n_fft=n_fft,
hop_length=hop_length,
win_length=win_length,
center=center,
window=window,
pad_mode=pad_mode,
return_complex=True,
)
if power is not None:
spec = spec.abs() ** power
return spec
|
"""Generate pseudo audio data with whitenoise
Args:
sample_rate: Sampling rate
duration: Length of the resulting Tensor in seconds.
n_channels: Number of channels
seed: Seed value used for random number generation.
Note that this function does not modify global random generator state.
dtype: Torch dtype
device: device
channels_first: whether first dimension is n_channels
scale_factor: scale the Tensor before clamping and quantization
Returns:
Tensor: shape of (n_channels, sample_rate * duration)
"""
if isinstance(dtype, str):
dtype = getattr(torch, dtype)
if dtype not in [torch.float64, torch.float32, torch.int32, torch.int16, torch.uint8]:
raise NotImplementedError(f"dtype {dtype} is not supported.")
# According to the doc, folking rng on all CUDA devices is slow when there are many CUDA devices,
# so we only fork on CPU, generate values and move the data to the given device
with torch.random.fork_rng([]):
torch.random.manual_seed(seed)
tensor = torch.randn([n_channels, int(sample_rate * duration)], dtype=torch.float32, device="cpu")
tensor /= 2.0
tensor *= scale_factor
tensor.clamp_(-1.0, 1.0)
if not channels_first:
tensor = tensor.t()
tensor = tensor.to(device)
return convert_tensor_encoding(tensor, dtype)
|
slurp_scan.py
|
"""Provides a scanner that will group files together under a common prefix"""
import copy
from .abstract_scanner import AbstractScanner
class SlurpScanner(AbstractScanner):
"""SlurpScanner groups files together by a common prefix.
This works by looking at the first slash (or if there is no slash, the first dot) in
each file path, and using that as the acquisition label.
"""
def __init__(self, config):
"""Class that handles generic acquisition slurping"""
super(SlurpScanner, self).__init__(config)
def discover(self, walker, context, container_factory, path_prefix=None, audit_log=None):
# Discover files first
files = list(sorted(walker.files(subdir=path_prefix)))
prefix_len = len(path_prefix or '')
current_prefix = None
current_files = []
for path in files:
path = path.lstrip('/')
prefix = SlurpScanner._get_prefix(path[prefix_len:])
if prefix == current_prefix:
current_files.append(path)
else:
self._add_acquisition(container_factory, context, current_prefix, current_files)
current_prefix = prefix
current_files = [path]
self._add_acquisition(container_factory, context, current_prefix, current_files)
@staticmethod
def _get_prefix(path):
"""Get the appropriate prefix for the given file"""
try:
idx = path.rindex('/')
except ValueError:
try:
idx = path.index('.')
except ValueError:
idx = len(path)
return path[:idx].strip('/').replace('/', '_')
def
|
(self, container_factory, context, label, files):
if not label or not files:
return
acquisition_context = copy.deepcopy(context)
acquisition_context.setdefault('acquisition', {})['label'] = label
container = container_factory.resolve(acquisition_context)
container.files.extend(files)
|
_add_acquisition
|
test_get_descendants.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from edward.models import Bernoulli, Normal
from edward.util import get_descendants
class test_get_descendants_class(tf.test.TestCase):
def test_v_structure(self):
"""a -> b -> e <- d <- c"""
with self.test_session():
a = Normal(0.0, 1.0)
b = Normal(a, 1.0)
c = Normal(0.0, 1.0)
d = Normal(c, 1.0)
e = Normal(b * d, 1.0)
self.assertEqual(set(get_descendants(a)), set([b, e]))
self.assertEqual(get_descendants(b), [e])
self.assertEqual(set(get_descendants(c)), set([d, e]))
self.assertEqual(get_descendants(d), [e])
self.assertEqual(get_descendants(e), [])
def test_a_structure(self):
|
def test_chain_structure(self):
"""a -> b -> c -> d -> e"""
with self.test_session():
a = Normal(0.0, 1.0)
b = Normal(a, 1.0)
c = Normal(b, 1.0)
d = Normal(c, 1.0)
e = Normal(d, 1.0)
self.assertEqual(set(get_descendants(a)), set([b, c, d, e]))
self.assertEqual(set(get_descendants(b)), set([c, d, e]))
self.assertEqual(set(get_descendants(c)), set([d, e]))
self.assertEqual(get_descendants(d), [e])
self.assertEqual(get_descendants(e), [])
def test_tensor(self):
with self.test_session():
a = Normal(0.0, 1.0)
b = tf.constant(2.0)
c = a + b
d = Normal(c, 1.0)
self.assertEqual(get_descendants(a), [d])
self.assertEqual(get_descendants(b), [d])
self.assertEqual(get_descendants(c), [d])
self.assertEqual(get_descendants(d), [])
def test_control_flow(self):
with self.test_session():
a = Bernoulli(0.5)
b = Normal(0.0, 1.0)
c = tf.constant(0.0)
d = tf.cond(tf.cast(a, tf.bool), lambda: b, lambda: c)
e = Normal(d, 1.0)
self.assertEqual(get_descendants(a), [e])
self.assertEqual(get_descendants(b), [e])
self.assertEqual(get_descendants(c), [e])
self.assertEqual(get_descendants(d), [e])
self.assertEqual(get_descendants(e), [])
def test_scan(self):
"""copied from test_chain_structure"""
def cumsum(x):
return tf.scan(lambda a, x: a + x, x)
with self.test_session():
a = Normal(tf.ones([3]), tf.ones([3]))
b = Normal(cumsum(a), tf.ones([3]))
c = Normal(cumsum(b), tf.ones([3]))
d = Normal(cumsum(c), tf.ones([3]))
e = Normal(cumsum(d), tf.ones([3]))
self.assertEqual(set(get_descendants(a)), set([b, c, d, e]))
self.assertEqual(set(get_descendants(b)), set([c, d, e]))
self.assertEqual(set(get_descendants(c)), set([d, e]))
self.assertEqual(get_descendants(d), [e])
self.assertEqual(get_descendants(e), [])
if __name__ == '__main__':
tf.test.main()
|
"""e <- d <- a -> b -> c"""
with self.test_session():
a = Normal(0.0, 1.0)
b = Normal(a, 1.0)
c = Normal(b, 1.0)
d = Normal(a, 1.0)
e = Normal(d, 1.0)
self.assertEqual(set(get_descendants(a)), set([b, c, d, e]))
self.assertEqual(get_descendants(b), [c])
self.assertEqual(get_descendants(c), [])
self.assertEqual(get_descendants(d), [e])
self.assertEqual(get_descendants(e), [])
|
quota_linux_common.go
|
// +build linux
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package common
import (
"regexp"
)
// QuotaID is generic quota identifier.
// Data type based on quotactl(2).
type QuotaID int32
const (
// UnknownQuotaID -- cannot determine whether a quota is in force
UnknownQuotaID QuotaID = -1
// BadQuotaID -- Invalid quota
BadQuotaID QuotaID = 0
)
const (
acct = iota
enforcing = iota
)
// QuotaType -- type of quota to be applied
type QuotaType int
const (
// FSQuotaAccounting for quotas for accounting only
FSQuotaAccounting QuotaType = 1 << iota
// FSQuotaEnforcing for quotas for enforcement
FSQuotaEnforcing QuotaType = 1 << iota
)
// FirstQuota is the quota ID we start with.
// XXXXXXX Need a better way of doing this...
var FirstQuota QuotaID = 1048577
// MountsFile is the location of the system mount data
var MountsFile = "/proc/self/mounts"
// MountParseRegexp parses out /proc/sys/self/mounts
var MountParseRegexp = regexp.MustCompilePOSIX("^([^ ]*)[ \t]*([^ ]*)[ \t]*([^ ]*)") // Ignore options etc.
|
// GetQuotaApplier retrieves an object that can apply
// quotas (or nil if this provider cannot support quotas
// on the device)
GetQuotaApplier(mountpoint string, backingDev string) LinuxVolumeQuotaApplier
}
// LinuxVolumeQuotaApplier is a generic interface to any quota
// mechanism supported by Linux
type LinuxVolumeQuotaApplier interface {
// GetQuotaOnDir gets the quota ID (if any) that applies to
// this directory
GetQuotaOnDir(path string) (QuotaID, error)
// SetQuotaOnDir applies the specified quota ID to a directory.
// Negative value for bytes means that a non-enforcing quota
// should be applied (perhaps by setting a quota too large to
// be hit)
SetQuotaOnDir(path string, id QuotaID, bytes int64) error
// QuotaIDIsInUse determines whether the quota ID is in use.
// Implementations should not check /etc/project or /etc/projid,
// only whether their underlying mechanism already has the ID in
// use.
// Return value of false with no error means that the ID is not
// in use; true means that it is already in use. An error
// return means that any quota ID will fail.
QuotaIDIsInUse(id QuotaID) (bool, error)
// GetConsumption returns the consumption (in bytes) of the
// directory, determined by the implementation's quota-based
// mechanism. If it is unable to do so using that mechanism,
// it should return an error and allow higher layers to
// enumerate the directory.
GetConsumption(path string, id QuotaID) (int64, error)
// GetInodes returns the number of inodes used by the
// directory, determined by the implementation's quota-based
// mechanism. If it is unable to do so using that mechanism,
// it should return an error and allow higher layers to
// enumerate the directory.
GetInodes(path string, id QuotaID) (int64, error)
}
|
// LinuxVolumeQuotaProvider returns an appropriate quota applier
// object if we can support quotas on this device
type LinuxVolumeQuotaProvider interface {
|
mod.rs
|
pub mod whitespace_text_steganography;
pub mod lsb_text_png_steganography;
|
||
series_frame_iterator.go
|
// Copyright (c) 2020 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package tile
import (
"errors"
"fmt"
"time"
"github.com/m3db/m3/src/dbnode/persist/fs"
xtime "github.com/m3db/m3/src/x/time"
)
type seriesFrameIter struct {
err error
exhausted bool
started bool
curr SeriesBlockFrame
iter fs.CrossBlockIterator
frameStep time.Duration
frameStart xtime.UnixNano
}
func newSeriesFrameIterator(recorder *recorder) SeriesFrameIterator {
return &seriesFrameIter{
err: errors.New("unset"),
curr: newSeriesBlockFrame(recorder),
}
}
func (b *seriesFrameIter) Reset(
start xtime.UnixNano,
frameStep time.Duration,
iter fs.CrossBlockIterator,
) error {
if frameStep <= 0 {
b.err = fmt.Errorf("frame step must be > 0, is %d", frameStep)
return b.err
}
b.err = nil
b.iter = iter
b.exhausted = false
b.started = false
b.frameStart = start
b.frameStep = frameStep
b.curr.reset(start, start+xtime.UnixNano(frameStep))
return nil
}
func (b *seriesFrameIter) Err() error {
return b.err
}
func (b *seriesFrameIter) Close() error {
if b.iter != nil {
b.iter = nil
}
return nil
}
func (b *seriesFrameIter) Next() bool {
if b.err != nil || b.exhausted {
return false
}
if !b.started {
b.started = true
// NB: initialize iterator to valid value to frameStart.
if !b.iter.Next() {
return false
}
} else {
b.curr.reset(b.frameStart, b.frameStart+xtime.UnixNano(b.frameStep))
}
cutover := b.frameStart + xtime.UnixNano(b.frameStep)
b.curr.FrameStartInclusive = b.frameStart
b.curr.FrameEndExclusive = cutover
b.frameStart = cutover
firstPoint, firstUnit, firstAnnotation := b.iter.Current()
if firstPoint.TimestampNanos >= cutover {
// NB: empty block.
return true
}
var hasAny, hasMore bool
b.curr.record(firstPoint, firstUnit, firstAnnotation)
for b.iter.Next() {
hasAny = true
dp, unit, annotation := b.iter.Current()
if dp.TimestampNanos >= cutover {
hasMore = true
break
}
b.curr.record(dp, unit, annotation)
}
if !hasAny {
b.exhausted = true
return true
}
if err := b.iter.Err(); err != nil
|
if !hasMore {
b.exhausted = true
}
return true
}
func (b *seriesFrameIter) Current() SeriesBlockFrame {
return b.curr
}
|
{
b.err = err
return false
}
|
context.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! ExecutionContext contains methods for registering data sources and executing queries
use crate::{
catalog::{
catalog::{CatalogList, MemoryCatalogList},
information_schema::CatalogWithInformationSchema,
},
logical_plan::{PlanType, ToStringifiedPlan},
optimizer::eliminate_limit::EliminateLimit,
physical_optimizer::{
aggregate_statistics::AggregateStatistics,
hash_build_probe_order::HashBuildProbeOrder, optimizer::PhysicalOptimizerRule,
},
};
use log::debug;
use std::fs;
use std::path::Path;
use std::string::String;
use std::sync::Arc;
use std::{
collections::{HashMap, HashSet},
sync::Mutex,
};
use futures::{StreamExt, TryStreamExt};
use tokio::task::{self, JoinHandle};
use arrow::csv;
use crate::catalog::{
catalog::{CatalogProvider, MemoryCatalogProvider},
schema::{MemorySchemaProvider, SchemaProvider},
ResolvedTableReference, TableReference,
};
use crate::datasource::csv::CsvFile;
use crate::datasource::object_store::{ObjectStore, ObjectStoreRegistry};
use crate::datasource::parquet::ParquetTable;
use crate::datasource::TableProvider;
use crate::error::{DataFusionError, Result};
use crate::execution::dataframe_impl::DataFrameImpl;
use crate::logical_plan::{
FunctionRegistry, LogicalPlan, LogicalPlanBuilder, UNNAMED_TABLE,
};
use crate::optimizer::common_subexpr_eliminate::CommonSubexprEliminate;
use crate::optimizer::constant_folding::ConstantFolding;
use crate::optimizer::filter_push_down::FilterPushDown;
use crate::optimizer::limit_push_down::LimitPushDown;
use crate::optimizer::optimizer::OptimizerRule;
use crate::optimizer::projection_push_down::ProjectionPushDown;
use crate::optimizer::simplify_expressions::SimplifyExpressions;
use crate::physical_optimizer::coalesce_batches::CoalesceBatches;
use crate::physical_optimizer::merge_exec::AddCoalescePartitionsExec;
use crate::physical_optimizer::repartition::Repartition;
use crate::datasource::avro::AvroFile;
use crate::physical_plan::avro::AvroReadOptions;
use crate::physical_plan::csv::CsvReadOptions;
use crate::physical_plan::planner::DefaultPhysicalPlanner;
use crate::physical_plan::udf::ScalarUDF;
use crate::physical_plan::ExecutionPlan;
use crate::physical_plan::PhysicalPlanner;
use crate::sql::{
parser::{DFParser, FileType},
planner::{ContextProvider, SqlToRel},
};
use crate::variable::{VarProvider, VarType};
use crate::{dataframe::DataFrame, physical_plan::udaf::AggregateUDF};
use chrono::{DateTime, Utc};
use parquet::arrow::ArrowWriter;
use parquet::file::properties::WriterProperties;
/// ExecutionContext is the main interface for executing queries with DataFusion. The context
/// provides the following functionality:
///
/// * Create DataFrame from a CSV or Parquet data source.
/// * Register a CSV or Parquet data source as a table that can be referenced from a SQL query.
/// * Register a custom data source that can be referenced from a SQL query.
/// * Execution a SQL query
///
/// The following example demonstrates how to use the context to execute a query against a CSV
/// data source using the DataFrame API:
///
/// ```
/// use datafusion::prelude::*;
/// # use datafusion::error::Result;
/// # fn main() -> Result<()> {
/// let mut ctx = ExecutionContext::new();
/// let df = ctx.read_csv("tests/example.csv", CsvReadOptions::new())?;
/// let df = df.filter(col("a").lt_eq(col("b")))?
/// .aggregate(vec![col("a")], vec![min(col("b"))])?
/// .limit(100)?;
/// let results = df.collect();
/// # Ok(())
/// # }
/// ```
///
/// The following example demonstrates how to execute the same query using SQL:
///
/// ```
/// use datafusion::prelude::*;
///
/// # use datafusion::error::Result;
/// # fn main() -> Result<()> {
/// let mut ctx = ExecutionContext::new();
/// ctx.register_csv("example", "tests/example.csv", CsvReadOptions::new())?;
/// let results = ctx.sql("SELECT a, MIN(b) FROM example GROUP BY a LIMIT 100")?;
/// # Ok(())
/// # }
/// ```
#[derive(Clone)]
pub struct ExecutionContext {
/// Internal state for the context
pub state: Arc<Mutex<ExecutionContextState>>,
}
impl ExecutionContext {
/// Creates a new execution context using a default configuration.
pub fn new() -> Self {
Self::with_config(ExecutionConfig::new())
}
/// Creates a new execution context using the provided configuration.
pub fn with_config(config: ExecutionConfig) -> Self {
let catalog_list = Arc::new(MemoryCatalogList::new()) as Arc<dyn CatalogList>;
if config.create_default_catalog_and_schema {
let default_catalog = MemoryCatalogProvider::new();
default_catalog.register_schema(
config.default_schema.clone(),
Arc::new(MemorySchemaProvider::new()),
);
let default_catalog: Arc<dyn CatalogProvider> = if config.information_schema {
Arc::new(CatalogWithInformationSchema::new(
Arc::downgrade(&catalog_list),
Arc::new(default_catalog),
))
} else {
Arc::new(default_catalog)
};
catalog_list
.register_catalog(config.default_catalog.clone(), default_catalog);
}
Self {
state: Arc::new(Mutex::new(ExecutionContextState {
catalog_list,
scalar_functions: HashMap::new(),
var_provider: HashMap::new(),
aggregate_functions: HashMap::new(),
config,
execution_props: ExecutionProps::new(),
object_store_registry: Arc::new(ObjectStoreRegistry::new()),
})),
}
}
/// Creates a dataframe that will execute a SQL query.
pub fn sql(&mut self, sql: &str) -> Result<Arc<dyn DataFrame>> {
let plan = self.create_logical_plan(sql)?;
match plan {
LogicalPlan::CreateExternalTable {
ref schema,
ref name,
ref location,
ref file_type,
ref has_header,
} => match file_type {
FileType::CSV => {
self.register_csv(
name,
location,
CsvReadOptions::new()
.schema(&schema.as_ref().to_owned().into())
.has_header(*has_header),
)?;
let plan = LogicalPlanBuilder::empty(false).build()?;
Ok(Arc::new(DataFrameImpl::new(self.state.clone(), &plan)))
}
FileType::Parquet => {
self.register_parquet(name, location)?;
let plan = LogicalPlanBuilder::empty(false).build()?;
Ok(Arc::new(DataFrameImpl::new(self.state.clone(), &plan)))
}
FileType::Avro => {
self.register_avro(name, location, AvroReadOptions::default())?;
let plan = LogicalPlanBuilder::empty(false).build()?;
Ok(Arc::new(DataFrameImpl::new(self.state.clone(), &plan)))
}
_ => Err(DataFusionError::NotImplemented(format!(
"Unsupported file type {:?}.",
file_type
))),
},
plan => Ok(Arc::new(DataFrameImpl::new(
self.state.clone(),
&self.optimize(&plan)?,
))),
}
}
/// Creates a logical plan.
///
/// This function is intended for internal use and should not be called directly.
pub fn create_logical_plan(&self, sql: &str) -> Result<LogicalPlan> {
let statements = DFParser::parse_sql(sql)?;
if statements.len() != 1 {
return Err(DataFusionError::NotImplemented(
"The context currently only supports a single SQL statement".to_string(),
));
}
// create a query planner
let state = self.state.lock().unwrap().clone();
let query_planner = SqlToRel::new(&state);
query_planner.statement_to_plan(&statements[0])
}
/// Registers a variable provider within this context.
pub fn register_variable(
&mut self,
variable_type: VarType,
provider: Arc<dyn VarProvider + Send + Sync>,
) {
self.state
.lock()
.unwrap()
.var_provider
.insert(variable_type, provider);
}
/// Registers a scalar UDF within this context.
///
/// Note in SQL queries, function names are looked up using
/// lowercase unless the query uses quotes. For example,
///
/// `SELECT MY_FUNC(x)...` will look for a function named `"my_func"`
/// `SELECT "my_FUNC"(x)` will look for a function named `"my_FUNC"`
pub fn register_udf(&mut self, f: ScalarUDF) {
self.state
.lock()
.unwrap()
.scalar_functions
.insert(f.name.clone(), Arc::new(f));
}
/// Registers an aggregate UDF within this context.
///
/// Note in SQL queries, aggregate names are looked up using
/// lowercase unless the query uses quotes. For example,
///
/// `SELECT MY_UDAF(x)...` will look for an aggregate named `"my_udaf"`
/// `SELECT "my_UDAF"(x)` will look for an aggregate named `"my_UDAF"`
pub fn register_udaf(&mut self, f: AggregateUDF) {
self.state
.lock()
.unwrap()
.aggregate_functions
.insert(f.name.clone(), Arc::new(f));
}
/// Creates a DataFrame for reading an Avro data source.
pub fn read_avro(
&mut self,
filename: impl Into<String>,
options: AvroReadOptions,
) -> Result<Arc<dyn DataFrame>> {
Ok(Arc::new(DataFrameImpl::new(
self.state.clone(),
&LogicalPlanBuilder::scan_avro(filename, options, None)?.build()?,
)))
}
/// Creates a DataFrame for reading a CSV data source.
pub fn read_csv(
&mut self,
filename: impl Into<String>,
options: CsvReadOptions,
) -> Result<Arc<dyn DataFrame>> {
Ok(Arc::new(DataFrameImpl::new(
self.state.clone(),
&LogicalPlanBuilder::scan_csv(filename, options, None)?.build()?,
)))
}
/// Creates a DataFrame for reading a Parquet data source.
pub fn read_parquet(
&mut self,
filename: impl Into<String>,
) -> Result<Arc<dyn DataFrame>> {
Ok(Arc::new(DataFrameImpl::new(
self.state.clone(),
&LogicalPlanBuilder::scan_parquet(
filename,
None,
self.state.lock().unwrap().config.target_partitions,
)?
.build()?,
)))
}
/// Creates a DataFrame for reading a custom TableProvider.
pub fn read_table(
&mut self,
provider: Arc<dyn TableProvider>,
) -> Result<Arc<dyn DataFrame>> {
Ok(Arc::new(DataFrameImpl::new(
self.state.clone(),
&LogicalPlanBuilder::scan(UNNAMED_TABLE, provider, None)?.build()?,
)))
}
/// Registers a CSV data source so that it can be referenced from SQL statements
/// executed against this context.
pub fn register_csv(
&mut self,
name: &str,
filename: &str,
options: CsvReadOptions,
) -> Result<()> {
self.register_table(name, Arc::new(CsvFile::try_new(filename, options)?))?;
Ok(())
}
/// Registers a Parquet data source so that it can be referenced from SQL statements
/// executed against this context.
pub fn register_parquet(&mut self, name: &str, filename: &str) -> Result<()> {
let table = {
let m = self.state.lock().unwrap();
ParquetTable::try_new(filename, m.config.target_partitions)?
.with_enable_pruning(m.config.parquet_pruning)
};
self.register_table(name, Arc::new(table))?;
Ok(())
}
/// Registers an Avro data source so that it can be referenced from SQL statements
/// executed against this context.
pub fn register_avro(
&mut self,
name: &str,
filename: &str,
options: AvroReadOptions,
) -> Result<()> {
self.register_table(name, Arc::new(AvroFile::try_new(filename, options)?))?;
Ok(())
}
/// Registers a named catalog using a custom `CatalogProvider` so that
/// it can be referenced from SQL statements executed against this
/// context.
///
/// Returns the `CatalogProvider` previously registered for this
/// name, if any
pub fn register_catalog(
&self,
name: impl Into<String>,
catalog: Arc<dyn CatalogProvider>,
) -> Option<Arc<dyn CatalogProvider>> {
let name = name.into();
let state = self.state.lock().unwrap();
let catalog = if state.config.information_schema {
Arc::new(CatalogWithInformationSchema::new(
Arc::downgrade(&state.catalog_list),
catalog,
))
} else {
catalog
};
state.catalog_list.register_catalog(name, catalog)
}
/// Retrieves a `CatalogProvider` instance by name
pub fn catalog(&self, name: &str) -> Option<Arc<dyn CatalogProvider>> {
self.state.lock().unwrap().catalog_list.catalog(name)
}
/// Registers a object store with scheme using a custom `ObjectStore` so that
/// an external file system or object storage system could be used against this context.
///
/// Returns the `ObjectStore` previously registered for this scheme, if any
pub fn register_object_store(
&self,
scheme: impl Into<String>,
object_store: Arc<dyn ObjectStore>,
) -> Option<Arc<dyn ObjectStore>> {
let scheme = scheme.into();
self.state
.lock()
.unwrap()
.object_store_registry
.register_store(scheme, object_store)
}
/// Retrieves a `ObjectStore` instance by scheme
pub fn object_store(&self, scheme: &str) -> Option<Arc<dyn ObjectStore>> {
self.state.lock().unwrap().object_store_registry.get(scheme)
}
/// Registers a table using a custom `TableProvider` so that
/// it can be referenced from SQL statements executed against this
/// context.
///
/// Returns the `TableProvider` previously registered for this
/// reference, if any
pub fn register_table<'a>(
&'a mut self,
table_ref: impl Into<TableReference<'a>>,
provider: Arc<dyn TableProvider>,
) -> Result<Option<Arc<dyn TableProvider>>> {
let table_ref = table_ref.into();
self.state
.lock()
.unwrap()
.schema_for_ref(table_ref)?
.register_table(table_ref.table().to_owned(), provider)
}
/// Deregisters the given table.
///
/// Returns the registered provider, if any
pub fn deregister_table<'a>(
&'a mut self,
table_ref: impl Into<TableReference<'a>>,
) -> Result<Option<Arc<dyn TableProvider>>> {
let table_ref = table_ref.into();
self.state
.lock()
.unwrap()
.schema_for_ref(table_ref)?
.deregister_table(table_ref.table())
}
/// Retrieves a DataFrame representing a table previously registered by calling the
/// register_table function.
///
/// Returns an error if no table has been registered with the provided reference.
pub fn table<'a>(
&self,
table_ref: impl Into<TableReference<'a>>,
) -> Result<Arc<dyn DataFrame>> {
let table_ref = table_ref.into();
let schema = self.state.lock().unwrap().schema_for_ref(table_ref)?;
match schema.table(table_ref.table()) {
Some(ref provider) => {
let plan = LogicalPlanBuilder::scan(
table_ref.table(),
Arc::clone(provider),
None,
)?
.build()?;
Ok(Arc::new(DataFrameImpl::new(self.state.clone(), &plan)))
}
_ => Err(DataFusionError::Plan(format!(
"No table named '{}'",
table_ref.table()
))),
}
}
/// Returns the set of available tables in the default catalog and schema.
///
/// Use [`table`] to get a specific table.
///
/// [`table`]: ExecutionContext::table
#[deprecated(
note = "Please use the catalog provider interface (`ExecutionContext::catalog`) to examine available catalogs, schemas, and tables"
)]
pub fn tables(&self) -> Result<HashSet<String>> {
Ok(self
.state
.lock()
.unwrap()
// a bare reference will always resolve to the default catalog and schema
.schema_for_ref(TableReference::Bare { table: "" })?
.table_names()
.iter()
.cloned()
.collect())
}
/// Optimizes the logical plan by applying optimizer rules.
pub fn optimize(&self, plan: &LogicalPlan) -> Result<LogicalPlan> {
if let LogicalPlan::Explain {
verbose,
plan,
stringified_plans,
schema,
} = plan
{
let mut stringified_plans = stringified_plans.clone();
// optimize the child plan, capturing the output of each optimizer
let plan = self.optimize_internal(plan, |optimized_plan, optimizer| {
let optimizer_name = optimizer.name().to_string();
let plan_type = PlanType::OptimizedLogicalPlan { optimizer_name };
stringified_plans.push(optimized_plan.to_stringified(plan_type));
})?;
Ok(LogicalPlan::Explain {
verbose: *verbose,
plan: Arc::new(plan),
stringified_plans,
schema: schema.clone(),
})
} else {
self.optimize_internal(plan, |_, _| {})
}
}
/// Creates a physical plan from a logical plan.
pub fn create_physical_plan(
&self,
logical_plan: &LogicalPlan,
) -> Result<Arc<dyn ExecutionPlan>> {
let mut state = self.state.lock().unwrap();
state.execution_props.start_execution();
state
.config
.query_planner
.create_physical_plan(logical_plan, &state)
}
/// Executes a query and writes the results to a partitioned CSV file.
pub async fn write_csv(
&self,
plan: Arc<dyn ExecutionPlan>,
path: impl AsRef<str>,
) -> Result<()> {
let path = path.as_ref();
// create directory to contain the CSV files (one per partition)
let fs_path = Path::new(path);
match fs::create_dir(fs_path) {
Ok(()) => {
let mut tasks = vec![];
for i in 0..plan.output_partitioning().partition_count() {
let plan = plan.clone();
let filename = format!("part-{}.csv", i);
let path = fs_path.join(&filename);
let file = fs::File::create(path)?;
let mut writer = csv::Writer::new(file);
let stream = plan.execute(i).await?;
let handle: JoinHandle<Result<()>> = task::spawn(async move {
stream
.map(|batch| writer.write(&batch?))
.try_collect()
.await
.map_err(DataFusionError::from)
});
tasks.push(handle);
}
futures::future::join_all(tasks).await;
Ok(())
}
Err(e) => Err(DataFusionError::Execution(format!(
"Could not create directory {}: {:?}",
path, e
))),
}
}
/// Executes a query and writes the results to a partitioned Parquet file.
pub async fn write_parquet(
&self,
plan: Arc<dyn ExecutionPlan>,
path: impl AsRef<str>,
writer_properties: Option<WriterProperties>,
) -> Result<()> {
let path = path.as_ref();
// create directory to contain the Parquet files (one per partition)
let fs_path = Path::new(path);
match fs::create_dir(fs_path) {
Ok(()) => {
let mut tasks = vec![];
for i in 0..plan.output_partitioning().partition_count() {
let plan = plan.clone();
let filename = format!("part-{}.parquet", i);
let path = fs_path.join(&filename);
let file = fs::File::create(path)?;
let mut writer = ArrowWriter::try_new(
file.try_clone().unwrap(),
plan.schema(),
writer_properties.clone(),
)?;
let stream = plan.execute(i).await?;
let handle: JoinHandle<Result<()>> = task::spawn(async move {
stream
.map(|batch| writer.write(&batch?))
.try_collect()
.await
.map_err(DataFusionError::from)?;
writer.close().map_err(DataFusionError::from).map(|_| ())
});
tasks.push(handle);
}
futures::future::join_all(tasks).await;
Ok(())
}
Err(e) => Err(DataFusionError::Execution(format!(
"Could not create directory {}: {:?}",
path, e
))),
}
}
/// Optimizes the logical plan by applying optimizer rules, and
/// invoking observer function after each call
fn optimize_internal<F>(
&self,
plan: &LogicalPlan,
mut observer: F,
) -> Result<LogicalPlan>
where
F: FnMut(&LogicalPlan, &dyn OptimizerRule),
{
let state = &mut self.state.lock().unwrap();
let execution_props = &mut state.execution_props.clone();
let optimizers = &state.config.optimizers;
let execution_props = execution_props.start_execution();
let mut new_plan = plan.clone();
debug!("Logical plan:\n {:?}", plan);
for optimizer in optimizers {
new_plan = optimizer.optimize(&new_plan, execution_props)?;
observer(&new_plan, optimizer.as_ref());
}
debug!("Optimized logical plan:\n {:?}", new_plan);
Ok(new_plan)
}
}
impl From<Arc<Mutex<ExecutionContextState>>> for ExecutionContext {
fn from(state: Arc<Mutex<ExecutionContextState>>) -> Self {
ExecutionContext { state }
}
}
impl FunctionRegistry for ExecutionContext {
fn udfs(&self) -> HashSet<String> {
self.state.lock().unwrap().udfs()
}
fn udf(&self, name: &str) -> Result<Arc<ScalarUDF>> {
self.state.lock().unwrap().udf(name)
}
fn udaf(&self, name: &str) -> Result<Arc<AggregateUDF>> {
self.state.lock().unwrap().udaf(name)
}
}
/// A planner used to add extensions to DataFusion logical and physical plans.
pub trait QueryPlanner {
/// Given a `LogicalPlan`, create an `ExecutionPlan` suitable for execution
fn create_physical_plan(
&self,
logical_plan: &LogicalPlan,
ctx_state: &ExecutionContextState,
) -> Result<Arc<dyn ExecutionPlan>>;
}
/// The query planner used if no user defined planner is provided
struct DefaultQueryPlanner {}
impl QueryPlanner for DefaultQueryPlanner {
/// Given a `LogicalPlan`, create an `ExecutionPlan` suitable for execution
fn create_physical_plan(
&self,
logical_plan: &LogicalPlan,
ctx_state: &ExecutionContextState,
) -> Result<Arc<dyn ExecutionPlan>> {
let planner = DefaultPhysicalPlanner::default();
planner.create_physical_plan(logical_plan, ctx_state)
}
}
/// Configuration options for execution context
#[derive(Clone)]
pub struct ExecutionConfig {
/// Number of partitions for query execution. Increasing partitions can increase concurrency.
pub target_partitions: usize,
/// Default batch size when reading data sources
pub batch_size: usize,
/// Responsible for optimizing a logical plan
optimizers: Vec<Arc<dyn OptimizerRule + Send + Sync>>,
/// Responsible for optimizing a physical execution plan
pub physical_optimizers: Vec<Arc<dyn PhysicalOptimizerRule + Send + Sync>>,
/// Responsible for planning `LogicalPlan`s, and `ExecutionPlan`
query_planner: Arc<dyn QueryPlanner + Send + Sync>,
/// Default catalog name for table resolution
default_catalog: String,
/// Default schema name for table resolution
default_schema: String,
/// Whether the default catalog and schema should be created automatically
create_default_catalog_and_schema: bool,
/// Should DataFusion provide access to `information_schema`
/// virtual tables for displaying schema information
information_schema: bool,
/// Should DataFusion repartition data using the join keys to execute joins in parallel
/// using the provided `target_partitions` level
pub repartition_joins: bool,
/// Should DataFusion repartition data using the aggregate keys to execute aggregates in parallel
/// using the provided `target_partitions` level
pub repartition_aggregations: bool,
/// Should DataFusion repartition data using the partition keys to execute window functions in
/// parallel using the provided `target_partitions` level
pub repartition_windows: bool,
/// Should Datafusion parquet reader using the predicate to prune data
parquet_pruning: bool,
}
impl Default for ExecutionConfig {
fn default() -> Self {
Self {
target_partitions: num_cpus::get(),
batch_size: 8192,
optimizers: vec![
Arc::new(ConstantFolding::new()),
Arc::new(CommonSubexprEliminate::new()),
Arc::new(EliminateLimit::new()),
Arc::new(ProjectionPushDown::new()),
Arc::new(FilterPushDown::new()),
Arc::new(SimplifyExpressions::new()),
Arc::new(LimitPushDown::new()),
],
physical_optimizers: vec![
Arc::new(AggregateStatistics::new()),
Arc::new(HashBuildProbeOrder::new()),
Arc::new(CoalesceBatches::new()),
Arc::new(Repartition::new()),
Arc::new(AddCoalescePartitionsExec::new()),
],
query_planner: Arc::new(DefaultQueryPlanner {}),
default_catalog: "datafusion".to_owned(),
default_schema: "public".to_owned(),
create_default_catalog_and_schema: true,
information_schema: false,
repartition_joins: true,
repartition_aggregations: true,
repartition_windows: true,
parquet_pruning: true,
}
}
}
impl ExecutionConfig {
/// Create an execution config with default setting
pub fn new() -> Self {
Default::default()
}
/// Deprecated. Use with_target_partitions instead.
#[deprecated(
since = "5.1.0",
note = "This method is deprecated in favor of `with_target_partitions`."
)]
pub fn with_concurrency(self, n: usize) -> Self {
self.with_target_partitions(n)
}
/// Customize target_partitions
pub fn with_target_partitions(mut self, n: usize) -> Self {
// partition count must be greater than zero
assert!(n > 0);
self.target_partitions = n;
self
}
/// Customize batch size
pub fn with_batch_size(mut self, n: usize) -> Self {
// batch size must be greater than zero
assert!(n > 0);
self.batch_size = n;
self
}
/// Replace the default query planner
pub fn with_query_planner(
mut self,
query_planner: Arc<dyn QueryPlanner + Send + Sync>,
) -> Self {
self.query_planner = query_planner;
self
}
/// Replace the physical optimizer rules
pub fn with_physical_optimizer_rules(
mut self,
physical_optimizers: Vec<Arc<dyn PhysicalOptimizerRule + Send + Sync>>,
) -> Self {
self.physical_optimizers = physical_optimizers;
self
}
/// Adds a new [`OptimizerRule`]
pub fn add_optimizer_rule(
mut self,
optimizer_rule: Arc<dyn OptimizerRule + Send + Sync>,
) -> Self {
self.optimizers.push(optimizer_rule);
self
}
/// Adds a new [`PhysicalOptimizerRule`]
pub fn add_physical_optimizer_rule(
mut self,
optimizer_rule: Arc<dyn PhysicalOptimizerRule + Send + Sync>,
) -> Self {
self.physical_optimizers.push(optimizer_rule);
self
}
/// Selects a name for the default catalog and schema
pub fn with_default_catalog_and_schema(
mut self,
catalog: impl Into<String>,
schema: impl Into<String>,
) -> Self {
self.default_catalog = catalog.into();
self.default_schema = schema.into();
self
}
/// Controls whether the default catalog and schema will be automatically created
pub fn create_default_catalog_and_schema(mut self, create: bool) -> Self {
self.create_default_catalog_and_schema = create;
self
}
/// Enables or disables the inclusion of `information_schema` virtual tables
pub fn with_information_schema(mut self, enabled: bool) -> Self {
self.information_schema = enabled;
self
}
/// Enables or disables the use of repartitioning for joins to improve parallelism
pub fn with_repartition_joins(mut self, enabled: bool) -> Self {
self.repartition_joins = enabled;
self
}
/// Enables or disables the use of repartitioning for aggregations to improve parallelism
pub fn with_repartition_aggregations(mut self, enabled: bool) -> Self {
self.repartition_aggregations = enabled;
self
}
/// Enables or disables the use of repartitioning for window functions to improve parallelism
pub fn with_repartition_windows(mut self, enabled: bool) -> Self {
self.repartition_windows = enabled;
self
}
/// Enables or disables the use of pruning predicate for parquet readers to skip row groups
pub fn with_parquet_pruning(mut self, enabled: bool) -> Self {
self.parquet_pruning = enabled;
self
}
}
/// Holds per-execution properties and data (such as starting timestamps, etc).
/// An instance of this struct is created each time a [`LogicalPlan`] is prepared for
/// execution (optimized). If the same plan is optimized multiple times, a new
/// `ExecutionProps` is created each time.
#[derive(Clone)]
pub struct ExecutionProps {
pub(crate) query_execution_start_time: DateTime<Utc>,
}
/// Execution context for registering data sources and executing queries
#[derive(Clone)]
pub struct ExecutionContextState {
/// Collection of catalogs containing schemas and ultimately TableProviders
pub catalog_list: Arc<dyn CatalogList>,
/// Scalar functions that are registered with the context
pub scalar_functions: HashMap<String, Arc<ScalarUDF>>,
/// Variable provider that are registered with the context
pub var_provider: HashMap<VarType, Arc<dyn VarProvider + Send + Sync>>,
/// Aggregate functions registered in the context
pub aggregate_functions: HashMap<String, Arc<AggregateUDF>>,
/// Context configuration
pub config: ExecutionConfig,
/// Execution properties
pub execution_props: ExecutionProps,
/// Object Store that are registered with the context
pub object_store_registry: Arc<ObjectStoreRegistry>,
}
impl ExecutionProps {
/// Creates a new execution props
pub fn new() -> Self
|
/// Marks the execution of query started timestamp
pub fn start_execution(&mut self) -> &Self {
self.query_execution_start_time = chrono::Utc::now();
&*self
}
}
impl ExecutionContextState {
/// Returns new ExecutionContextState
pub fn new() -> Self {
ExecutionContextState {
catalog_list: Arc::new(MemoryCatalogList::new()),
scalar_functions: HashMap::new(),
var_provider: HashMap::new(),
aggregate_functions: HashMap::new(),
config: ExecutionConfig::new(),
execution_props: ExecutionProps::new(),
object_store_registry: Arc::new(ObjectStoreRegistry::new()),
}
}
fn resolve_table_ref<'a>(
&'a self,
table_ref: impl Into<TableReference<'a>>,
) -> ResolvedTableReference<'a> {
table_ref
.into()
.resolve(&self.config.default_catalog, &self.config.default_schema)
}
fn schema_for_ref<'a>(
&'a self,
table_ref: impl Into<TableReference<'a>>,
) -> Result<Arc<dyn SchemaProvider>> {
let resolved_ref = self.resolve_table_ref(table_ref.into());
self.catalog_list
.catalog(resolved_ref.catalog)
.ok_or_else(|| {
DataFusionError::Plan(format!(
"failed to resolve catalog: {}",
resolved_ref.catalog
))
})?
.schema(resolved_ref.schema)
.ok_or_else(|| {
DataFusionError::Plan(format!(
"failed to resolve schema: {}",
resolved_ref.schema
))
})
}
}
impl ContextProvider for ExecutionContextState {
fn get_table_provider(&self, name: TableReference) -> Option<Arc<dyn TableProvider>> {
let resolved_ref = self.resolve_table_ref(name);
let schema = self.schema_for_ref(resolved_ref).ok()?;
schema.table(resolved_ref.table)
}
fn get_function_meta(&self, name: &str) -> Option<Arc<ScalarUDF>> {
self.scalar_functions.get(name).cloned()
}
fn get_aggregate_meta(&self, name: &str) -> Option<Arc<AggregateUDF>> {
self.aggregate_functions.get(name).cloned()
}
}
impl FunctionRegistry for ExecutionContextState {
fn udfs(&self) -> HashSet<String> {
self.scalar_functions.keys().cloned().collect()
}
fn udf(&self, name: &str) -> Result<Arc<ScalarUDF>> {
let result = self.scalar_functions.get(name);
result.cloned().ok_or_else(|| {
DataFusionError::Plan(format!(
"There is no UDF named \"{}\" in the registry",
name
))
})
}
fn udaf(&self, name: &str) -> Result<Arc<AggregateUDF>> {
let result = self.aggregate_functions.get(name);
result.cloned().ok_or_else(|| {
DataFusionError::Plan(format!(
"There is no UDAF named \"{}\" in the registry",
name
))
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::logical_plan::{binary_expr, lit, Operator};
use crate::physical_plan::functions::make_scalar_function;
use crate::physical_plan::{collect, collect_partitioned};
use crate::test;
use crate::variable::VarType;
use crate::{
assert_batches_eq, assert_batches_sorted_eq,
logical_plan::{col, create_udf, sum, Expr},
};
use crate::{
datasource::{empty::EmptyTable, MemTable, TableType},
logical_plan::create_udaf,
physical_plan::expressions::AvgAccumulator,
};
use arrow::array::{
Array, ArrayRef, BinaryArray, DictionaryArray, Float32Array, Float64Array,
Int16Array, Int32Array, Int64Array, Int8Array, LargeBinaryArray,
LargeStringArray, StringArray, TimestampNanosecondArray, UInt16Array,
UInt32Array, UInt64Array, UInt8Array,
};
use arrow::compute::add;
use arrow::datatypes::*;
use arrow::record_batch::RecordBatch;
use std::fs::File;
use std::sync::Weak;
use std::thread::{self, JoinHandle};
use std::{io::prelude::*, sync::Mutex};
use tempfile::TempDir;
use test::*;
#[test]
fn optimize_explain() {
let schema = Schema::new(vec![Field::new("id", DataType::Int32, false)]);
let plan = LogicalPlanBuilder::scan_empty(Some("employee"), &schema, None)
.unwrap()
.explain(true, false)
.unwrap()
.build()
.unwrap();
if let LogicalPlan::Explain {
stringified_plans, ..
} = &plan
{
assert_eq!(stringified_plans.len(), 1);
} else {
panic!("plan was not an explain: {:?}", plan);
}
// now optimize the plan and expect to see more plans
let optimized_plan = ExecutionContext::new().optimize(&plan).unwrap();
if let LogicalPlan::Explain {
stringified_plans, ..
} = &optimized_plan
{
// should have more than one plan
assert!(
stringified_plans.len() > 1,
"plans: {:#?}",
stringified_plans
);
// should have at least one optimized plan
let opt = stringified_plans
.iter()
.any(|p| matches!(p.plan_type, PlanType::OptimizedLogicalPlan { .. }));
assert!(opt, "plans: {:#?}", stringified_plans);
} else {
panic!("plan was not an explain: {:?}", plan);
}
}
#[tokio::test]
async fn parallel_projection() -> Result<()> {
let partition_count = 4;
let results = execute("SELECT c1, c2 FROM test", partition_count).await?;
let expected = vec![
"+----+----+",
"| c1 | c2 |",
"+----+----+",
"| 3 | 1 |",
"| 3 | 2 |",
"| 3 | 3 |",
"| 3 | 4 |",
"| 3 | 5 |",
"| 3 | 6 |",
"| 3 | 7 |",
"| 3 | 8 |",
"| 3 | 9 |",
"| 3 | 10 |",
"| 2 | 1 |",
"| 2 | 2 |",
"| 2 | 3 |",
"| 2 | 4 |",
"| 2 | 5 |",
"| 2 | 6 |",
"| 2 | 7 |",
"| 2 | 8 |",
"| 2 | 9 |",
"| 2 | 10 |",
"| 1 | 1 |",
"| 1 | 2 |",
"| 1 | 3 |",
"| 1 | 4 |",
"| 1 | 5 |",
"| 1 | 6 |",
"| 1 | 7 |",
"| 1 | 8 |",
"| 1 | 9 |",
"| 1 | 10 |",
"| 0 | 1 |",
"| 0 | 2 |",
"| 0 | 3 |",
"| 0 | 4 |",
"| 0 | 5 |",
"| 0 | 6 |",
"| 0 | 7 |",
"| 0 | 8 |",
"| 0 | 9 |",
"| 0 | 10 |",
"+----+----+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn create_variable_expr() -> Result<()> {
let tmp_dir = TempDir::new()?;
let partition_count = 4;
let mut ctx = create_ctx(&tmp_dir, partition_count)?;
let variable_provider = test::variable::SystemVar::new();
ctx.register_variable(VarType::System, Arc::new(variable_provider));
let variable_provider = test::variable::UserDefinedVar::new();
ctx.register_variable(VarType::UserDefined, Arc::new(variable_provider));
let provider = test::create_table_dual();
ctx.register_table("dual", provider)?;
let results =
plan_and_collect(&mut ctx, "SELECT @@version, @name FROM dual").await?;
let expected = vec![
"+----------------------+------------------------+",
"| @@version | @name |",
"+----------------------+------------------------+",
"| system-var-@@version | user-defined-var-@name |",
"+----------------------+------------------------+",
];
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn register_deregister() -> Result<()> {
let tmp_dir = TempDir::new()?;
let partition_count = 4;
let mut ctx = create_ctx(&tmp_dir, partition_count)?;
let provider = test::create_table_dual();
ctx.register_table("dual", provider)?;
assert!(ctx.deregister_table("dual")?.is_some());
assert!(ctx.deregister_table("dual")?.is_none());
Ok(())
}
#[tokio::test]
async fn parallel_query_with_filter() -> Result<()> {
let tmp_dir = TempDir::new()?;
let partition_count = 4;
let ctx = create_ctx(&tmp_dir, partition_count)?;
let logical_plan =
ctx.create_logical_plan("SELECT c1, c2 FROM test WHERE c1 > 0 AND c1 < 3")?;
let logical_plan = ctx.optimize(&logical_plan)?;
let physical_plan = ctx.create_physical_plan(&logical_plan)?;
let results = collect_partitioned(physical_plan).await?;
// note that the order of partitions is not deterministic
let mut num_rows = 0;
for partition in &results {
for batch in partition {
num_rows += batch.num_rows();
}
}
assert_eq!(20, num_rows);
let results: Vec<RecordBatch> = results.into_iter().flatten().collect();
let expected = vec![
"+----+----+",
"| c1 | c2 |",
"+----+----+",
"| 1 | 1 |",
"| 1 | 10 |",
"| 1 | 2 |",
"| 1 | 3 |",
"| 1 | 4 |",
"| 1 | 5 |",
"| 1 | 6 |",
"| 1 | 7 |",
"| 1 | 8 |",
"| 1 | 9 |",
"| 2 | 1 |",
"| 2 | 10 |",
"| 2 | 2 |",
"| 2 | 3 |",
"| 2 | 4 |",
"| 2 | 5 |",
"| 2 | 6 |",
"| 2 | 7 |",
"| 2 | 8 |",
"| 2 | 9 |",
"+----+----+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn projection_on_table_scan() -> Result<()> {
let tmp_dir = TempDir::new()?;
let partition_count = 4;
let ctx = create_ctx(&tmp_dir, partition_count)?;
let table = ctx.table("test")?;
let logical_plan = LogicalPlanBuilder::from(table.to_logical_plan())
.project(vec![col("c2")])?
.build()?;
let optimized_plan = ctx.optimize(&logical_plan)?;
match &optimized_plan {
LogicalPlan::Projection { input, .. } => match &**input {
LogicalPlan::TableScan {
source,
projected_schema,
..
} => {
assert_eq!(source.schema().fields().len(), 3);
assert_eq!(projected_schema.fields().len(), 1);
}
_ => panic!("input to projection should be TableScan"),
},
_ => panic!("expect optimized_plan to be projection"),
}
let expected = "Projection: #test.c2\
\n TableScan: test projection=Some([1])";
assert_eq!(format!("{:?}", optimized_plan), expected);
let physical_plan = ctx.create_physical_plan(&optimized_plan)?;
assert_eq!(1, physical_plan.schema().fields().len());
assert_eq!("c2", physical_plan.schema().field(0).name().as_str());
let batches = collect(physical_plan).await?;
assert_eq!(40, batches.iter().map(|x| x.num_rows()).sum::<usize>());
Ok(())
}
#[test]
fn preserve_nullability_on_projection() -> Result<()> {
let tmp_dir = TempDir::new()?;
let ctx = create_ctx(&tmp_dir, 1)?;
let schema: Schema = ctx.table("test").unwrap().schema().clone().into();
assert!(!schema.field_with_name("c1")?.is_nullable());
let plan = LogicalPlanBuilder::scan_empty(None, &schema, None)?
.project(vec![col("c1")])?
.build()?;
let plan = ctx.optimize(&plan)?;
let physical_plan = ctx.create_physical_plan(&Arc::new(plan))?;
assert!(!physical_plan.schema().field_with_name("c1")?.is_nullable());
Ok(())
}
#[tokio::test]
async fn projection_on_memory_scan() -> Result<()> {
let schema = Schema::new(vec![
Field::new("a", DataType::Int32, false),
Field::new("b", DataType::Int32, false),
Field::new("c", DataType::Int32, false),
]);
let schema = SchemaRef::new(schema);
let partitions = vec![vec![RecordBatch::try_new(
schema.clone(),
vec![
Arc::new(Int32Array::from(vec![1, 10, 10, 100])),
Arc::new(Int32Array::from(vec![2, 12, 12, 120])),
Arc::new(Int32Array::from(vec![3, 12, 12, 120])),
],
)?]];
let plan = LogicalPlanBuilder::scan_memory(partitions, schema, None)?
.project(vec![col("b")])?
.build()?;
assert_fields_eq(&plan, vec!["b"]);
let ctx = ExecutionContext::new();
let optimized_plan = ctx.optimize(&plan)?;
match &optimized_plan {
LogicalPlan::Projection { input, .. } => match &**input {
LogicalPlan::TableScan {
source,
projected_schema,
..
} => {
assert_eq!(source.schema().fields().len(), 3);
assert_eq!(projected_schema.fields().len(), 1);
}
_ => panic!("input to projection should be InMemoryScan"),
},
_ => panic!("expect optimized_plan to be projection"),
}
let expected = format!(
"Projection: #{}.b\
\n TableScan: {} projection=Some([1])",
UNNAMED_TABLE, UNNAMED_TABLE
);
assert_eq!(format!("{:?}", optimized_plan), expected);
let physical_plan = ctx.create_physical_plan(&optimized_plan)?;
assert_eq!(1, physical_plan.schema().fields().len());
assert_eq!("b", physical_plan.schema().field(0).name().as_str());
let batches = collect(physical_plan).await?;
assert_eq!(1, batches.len());
assert_eq!(1, batches[0].num_columns());
assert_eq!(4, batches[0].num_rows());
Ok(())
}
#[tokio::test]
async fn sort() -> Result<()> {
let results =
execute("SELECT c1, c2 FROM test ORDER BY c1 DESC, c2 ASC", 4).await?;
assert_eq!(results.len(), 1);
let expected: Vec<&str> = vec![
"+----+----+",
"| c1 | c2 |",
"+----+----+",
"| 3 | 1 |",
"| 3 | 2 |",
"| 3 | 3 |",
"| 3 | 4 |",
"| 3 | 5 |",
"| 3 | 6 |",
"| 3 | 7 |",
"| 3 | 8 |",
"| 3 | 9 |",
"| 3 | 10 |",
"| 2 | 1 |",
"| 2 | 2 |",
"| 2 | 3 |",
"| 2 | 4 |",
"| 2 | 5 |",
"| 2 | 6 |",
"| 2 | 7 |",
"| 2 | 8 |",
"| 2 | 9 |",
"| 2 | 10 |",
"| 1 | 1 |",
"| 1 | 2 |",
"| 1 | 3 |",
"| 1 | 4 |",
"| 1 | 5 |",
"| 1 | 6 |",
"| 1 | 7 |",
"| 1 | 8 |",
"| 1 | 9 |",
"| 1 | 10 |",
"| 0 | 1 |",
"| 0 | 2 |",
"| 0 | 3 |",
"| 0 | 4 |",
"| 0 | 5 |",
"| 0 | 6 |",
"| 0 | 7 |",
"| 0 | 8 |",
"| 0 | 9 |",
"| 0 | 10 |",
"+----+----+",
];
// Note it is important to NOT use assert_batches_sorted_eq
// here as we are testing the sortedness of the output
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn sort_empty() -> Result<()> {
// The predicate on this query purposely generates no results
let results = execute(
"SELECT c1, c2 FROM test WHERE c1 > 100000 ORDER BY c1 DESC, c2 ASC",
4,
)
.await
.unwrap();
assert_eq!(results.len(), 0);
Ok(())
}
#[tokio::test]
async fn left_join_using() -> Result<()> {
let results = execute(
"SELECT t1.c1, t2.c2 FROM test t1 JOIN test t2 USING (c2) ORDER BY t2.c2",
1,
)
.await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+----+----+",
"| c1 | c2 |",
"+----+----+",
"| 0 | 1 |",
"| 0 | 2 |",
"| 0 | 3 |",
"| 0 | 4 |",
"| 0 | 5 |",
"| 0 | 6 |",
"| 0 | 7 |",
"| 0 | 8 |",
"| 0 | 9 |",
"| 0 | 10 |",
"+----+----+",
];
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn left_join_using_join_key_projection() -> Result<()> {
let results = execute(
"SELECT t1.c1, t1.c2, t2.c2 FROM test t1 JOIN test t2 USING (c2) ORDER BY t2.c2",
1,
)
.await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+----+----+----+",
"| c1 | c2 | c2 |",
"+----+----+----+",
"| 0 | 1 | 1 |",
"| 0 | 2 | 2 |",
"| 0 | 3 | 3 |",
"| 0 | 4 | 4 |",
"| 0 | 5 | 5 |",
"| 0 | 6 | 6 |",
"| 0 | 7 | 7 |",
"| 0 | 8 | 8 |",
"| 0 | 9 | 9 |",
"| 0 | 10 | 10 |",
"+----+----+----+",
];
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn left_join() -> Result<()> {
let results = execute(
"SELECT t1.c1, t1.c2, t2.c2 FROM test t1 JOIN test t2 ON t1.c2 = t2.c2 ORDER BY t1.c2",
1,
)
.await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+----+----+----+",
"| c1 | c2 | c2 |",
"+----+----+----+",
"| 0 | 1 | 1 |",
"| 0 | 2 | 2 |",
"| 0 | 3 | 3 |",
"| 0 | 4 | 4 |",
"| 0 | 5 | 5 |",
"| 0 | 6 | 6 |",
"| 0 | 7 | 7 |",
"| 0 | 8 | 8 |",
"| 0 | 9 | 9 |",
"| 0 | 10 | 10 |",
"+----+----+----+",
];
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn window() -> Result<()> {
let results = execute(
"SELECT \
c1, \
c2, \
SUM(c2) OVER (), \
COUNT(c2) OVER (), \
MAX(c2) OVER (), \
MIN(c2) OVER (), \
AVG(c2) OVER () \
FROM test \
ORDER BY c1, c2 \
LIMIT 5",
4,
)
.await?;
// result in one batch, although e.g. having 2 batches do not change
// result semantics, having a len=1 assertion upfront keeps surprises
// at bay
assert_eq!(results.len(), 1);
let expected = vec![
"+----+----+--------------+----------------+--------------+--------------+--------------+",
"| c1 | c2 | SUM(test.c2) | COUNT(test.c2) | MAX(test.c2) | MIN(test.c2) | AVG(test.c2) |",
"+----+----+--------------+----------------+--------------+--------------+--------------+",
"| 0 | 1 | 220 | 40 | 10 | 1 | 5.5 |",
"| 0 | 2 | 220 | 40 | 10 | 1 | 5.5 |",
"| 0 | 3 | 220 | 40 | 10 | 1 | 5.5 |",
"| 0 | 4 | 220 | 40 | 10 | 1 | 5.5 |",
"| 0 | 5 | 220 | 40 | 10 | 1 | 5.5 |",
"+----+----+--------------+----------------+--------------+--------------+--------------+",
];
// window function shall respect ordering
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn window_order_by() -> Result<()> {
let results = execute(
"SELECT \
c1, \
c2, \
ROW_NUMBER() OVER (ORDER BY c1, c2), \
FIRST_VALUE(c2) OVER (ORDER BY c1, c2), \
LAST_VALUE(c2) OVER (ORDER BY c1, c2), \
NTH_VALUE(c2, 2) OVER (ORDER BY c1, c2), \
SUM(c2) OVER (ORDER BY c1, c2), \
COUNT(c2) OVER (ORDER BY c1, c2), \
MAX(c2) OVER (ORDER BY c1, c2), \
MIN(c2) OVER (ORDER BY c1, c2), \
AVG(c2) OVER (ORDER BY c1, c2) \
FROM test \
ORDER BY c1, c2 \
LIMIT 5",
4,
)
.await?;
// result in one batch, although e.g. having 2 batches do not change
// result semantics, having a len=1 assertion upfront keeps surprises
// at bay
assert_eq!(results.len(), 1);
let expected = vec![
"+----+----+--------------+----------------------+---------------------+-----------------------------+--------------+----------------+--------------+--------------+--------------+",
"| c1 | c2 | ROW_NUMBER() | FIRST_VALUE(test.c2) | LAST_VALUE(test.c2) | NTH_VALUE(test.c2,Int64(2)) | SUM(test.c2) | COUNT(test.c2) | MAX(test.c2) | MIN(test.c2) | AVG(test.c2) |",
"+----+----+--------------+----------------------+---------------------+-----------------------------+--------------+----------------+--------------+--------------+--------------+",
"| 0 | 1 | 1 | 1 | 1 | | 1 | 1 | 1 | 1 | 1 |",
"| 0 | 2 | 2 | 1 | 2 | 2 | 3 | 2 | 2 | 1 | 1.5 |",
"| 0 | 3 | 3 | 1 | 3 | 2 | 6 | 3 | 3 | 1 | 2 |",
"| 0 | 4 | 4 | 1 | 4 | 2 | 10 | 4 | 4 | 1 | 2.5 |",
"| 0 | 5 | 5 | 1 | 5 | 2 | 15 | 5 | 5 | 1 | 3 |",
"+----+----+--------------+----------------------+---------------------+-----------------------------+--------------+----------------+--------------+--------------+--------------+",
];
// window function shall respect ordering
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn window_partition_by() -> Result<()> {
let results = execute(
"SELECT \
c1, \
c2, \
SUM(c2) OVER (PARTITION BY c2), \
COUNT(c2) OVER (PARTITION BY c2), \
MAX(c2) OVER (PARTITION BY c2), \
MIN(c2) OVER (PARTITION BY c2), \
AVG(c2) OVER (PARTITION BY c2) \
FROM test \
ORDER BY c1, c2 \
LIMIT 5",
4,
)
.await?;
let expected = vec![
"+----+----+--------------+----------------+--------------+--------------+--------------+",
"| c1 | c2 | SUM(test.c2) | COUNT(test.c2) | MAX(test.c2) | MIN(test.c2) | AVG(test.c2) |",
"+----+----+--------------+----------------+--------------+--------------+--------------+",
"| 0 | 1 | 4 | 4 | 1 | 1 | 1 |",
"| 0 | 2 | 8 | 4 | 2 | 2 | 2 |",
"| 0 | 3 | 12 | 4 | 3 | 3 | 3 |",
"| 0 | 4 | 16 | 4 | 4 | 4 | 4 |",
"| 0 | 5 | 20 | 4 | 5 | 5 | 5 |",
"+----+----+--------------+----------------+--------------+--------------+--------------+",
];
// window function shall respect ordering
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn window_partition_by_order_by() -> Result<()> {
let results = execute(
"SELECT \
c1, \
c2, \
ROW_NUMBER() OVER (PARTITION BY c2 ORDER BY c1), \
FIRST_VALUE(c2 + c1) OVER (PARTITION BY c2 ORDER BY c1), \
LAST_VALUE(c2 + c1) OVER (PARTITION BY c2 ORDER BY c1), \
NTH_VALUE(c2 + c1, 1) OVER (PARTITION BY c2 ORDER BY c1), \
SUM(c2) OVER (PARTITION BY c2 ORDER BY c1), \
COUNT(c2) OVER (PARTITION BY c2 ORDER BY c1), \
MAX(c2) OVER (PARTITION BY c2 ORDER BY c1), \
MIN(c2) OVER (PARTITION BY c2 ORDER BY c1), \
AVG(c2) OVER (PARTITION BY c2 ORDER BY c1) \
FROM test \
ORDER BY c1, c2 \
LIMIT 5",
4,
)
.await?;
let expected = vec![
"+----+----+--------------+-----------------------------------+----------------------------------+------------------------------------------+--------------+----------------+--------------+--------------+--------------+",
"| c1 | c2 | ROW_NUMBER() | FIRST_VALUE(test.c2 Plus test.c1) | LAST_VALUE(test.c2 Plus test.c1) | NTH_VALUE(test.c2 Plus test.c1,Int64(1)) | SUM(test.c2) | COUNT(test.c2) | MAX(test.c2) | MIN(test.c2) | AVG(test.c2) |",
"+----+----+--------------+-----------------------------------+----------------------------------+------------------------------------------+--------------+----------------+--------------+--------------+--------------+",
"| 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 |",
"| 0 | 2 | 1 | 2 | 2 | 2 | 2 | 1 | 2 | 2 | 2 |",
"| 0 | 3 | 1 | 3 | 3 | 3 | 3 | 1 | 3 | 3 | 3 |",
"| 0 | 4 | 1 | 4 | 4 | 4 | 4 | 1 | 4 | 4 | 4 |",
"| 0 | 5 | 1 | 5 | 5 | 5 | 5 | 1 | 5 | 5 | 5 |",
"+----+----+--------------+-----------------------------------+----------------------------------+------------------------------------------+--------------+----------------+--------------+--------------+--------------+",
];
// window function shall respect ordering
assert_batches_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate() -> Result<()> {
let results = execute("SELECT SUM(c1), SUM(c2) FROM test", 4).await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+--------------+--------------+",
"| SUM(test.c1) | SUM(test.c2) |",
"+--------------+--------------+",
"| 60 | 220 |",
"+--------------+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_empty() -> Result<()> {
// The predicate on this query purposely generates no results
let results = execute("SELECT SUM(c1), SUM(c2) FROM test where c1 > 100000", 4)
.await
.unwrap();
assert_eq!(results.len(), 1);
let expected = vec![
"+--------------+--------------+",
"| SUM(test.c1) | SUM(test.c2) |",
"+--------------+--------------+",
"| | |",
"+--------------+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_avg() -> Result<()> {
let results = execute("SELECT AVG(c1), AVG(c2) FROM test", 4).await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+--------------+--------------+",
"| AVG(test.c1) | AVG(test.c2) |",
"+--------------+--------------+",
"| 1.5 | 5.5 |",
"+--------------+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_max() -> Result<()> {
let results = execute("SELECT MAX(c1), MAX(c2) FROM test", 4).await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+--------------+--------------+",
"| MAX(test.c1) | MAX(test.c2) |",
"+--------------+--------------+",
"| 3 | 10 |",
"+--------------+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_min() -> Result<()> {
let results = execute("SELECT MIN(c1), MIN(c2) FROM test", 4).await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+--------------+--------------+",
"| MIN(test.c1) | MIN(test.c2) |",
"+--------------+--------------+",
"| 0 | 1 |",
"+--------------+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_grouped() -> Result<()> {
let results = execute("SELECT c1, SUM(c2) FROM test GROUP BY c1", 4).await?;
let expected = vec![
"+----+--------------+",
"| c1 | SUM(test.c2) |",
"+----+--------------+",
"| 0 | 55 |",
"| 1 | 55 |",
"| 2 | 55 |",
"| 3 | 55 |",
"+----+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_grouped_avg() -> Result<()> {
let results = execute("SELECT c1, AVG(c2) FROM test GROUP BY c1", 4).await?;
let expected = vec![
"+----+--------------+",
"| c1 | AVG(test.c2) |",
"+----+--------------+",
"| 0 | 5.5 |",
"| 1 | 5.5 |",
"| 2 | 5.5 |",
"| 3 | 5.5 |",
"+----+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn boolean_literal() -> Result<()> {
let results =
execute("SELECT c1, c3 FROM test WHERE c1 > 2 AND c3 = true", 4).await?;
let expected = vec![
"+----+------+",
"| c1 | c3 |",
"+----+------+",
"| 3 | true |",
"| 3 | true |",
"| 3 | true |",
"| 3 | true |",
"| 3 | true |",
"+----+------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_grouped_empty() -> Result<()> {
let results =
execute("SELECT c1, AVG(c2) FROM test WHERE c1 = 123 GROUP BY c1", 4).await?;
let expected = vec!["++", "||", "++", "++"];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_grouped_max() -> Result<()> {
let results = execute("SELECT c1, MAX(c2) FROM test GROUP BY c1", 4).await?;
let expected = vec![
"+----+--------------+",
"| c1 | MAX(test.c2) |",
"+----+--------------+",
"| 0 | 10 |",
"| 1 | 10 |",
"| 2 | 10 |",
"| 3 | 10 |",
"+----+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_grouped_min() -> Result<()> {
let results = execute("SELECT c1, MIN(c2) FROM test GROUP BY c1", 4).await?;
let expected = vec![
"+----+--------------+",
"| c1 | MIN(test.c2) |",
"+----+--------------+",
"| 0 | 1 |",
"| 1 | 1 |",
"| 2 | 1 |",
"| 3 | 1 |",
"+----+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_timestamps_sum() -> Result<()> {
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 1)?;
ctx.register_table("t", test::table_with_timestamps())
.unwrap();
let results = plan_and_collect(
&mut ctx,
"SELECT sum(nanos), sum(micros), sum(millis), sum(secs) FROM t",
)
.await
.unwrap_err();
assert_eq!(results.to_string(), "Error during planning: Coercion from [Timestamp(Nanosecond, None)] to the signature Uniform(1, [Int8, Int16, Int32, Int64, UInt8, UInt16, UInt32, UInt64, Float32, Float64]) failed.");
Ok(())
}
#[tokio::test]
async fn aggregate_timestamps_count() -> Result<()> {
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 1)?;
ctx.register_table("t", test::table_with_timestamps())
.unwrap();
let results = plan_and_collect(
&mut ctx,
"SELECT count(nanos), count(micros), count(millis), count(secs) FROM t",
)
.await
.unwrap();
let expected = vec![
"+----------------+-----------------+-----------------+---------------+",
"| COUNT(t.nanos) | COUNT(t.micros) | COUNT(t.millis) | COUNT(t.secs) |",
"+----------------+-----------------+-----------------+---------------+",
"| 3 | 3 | 3 | 3 |",
"+----------------+-----------------+-----------------+---------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_timestamps_min() -> Result<()> {
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 1)?;
ctx.register_table("t", test::table_with_timestamps())
.unwrap();
let results = plan_and_collect(
&mut ctx,
"SELECT min(nanos), min(micros), min(millis), min(secs) FROM t",
)
.await
.unwrap();
let expected = vec![
"+----------------------------+----------------------------+-------------------------+---------------------+",
"| MIN(t.nanos) | MIN(t.micros) | MIN(t.millis) | MIN(t.secs) |",
"+----------------------------+----------------------------+-------------------------+---------------------+",
"| 2011-12-13 11:13:10.123450 | 2011-12-13 11:13:10.123450 | 2011-12-13 11:13:10.123 | 2011-12-13 11:13:10 |",
"+----------------------------+----------------------------+-------------------------+---------------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_timestamps_max() -> Result<()> {
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 1)?;
ctx.register_table("t", test::table_with_timestamps())
.unwrap();
let results = plan_and_collect(
&mut ctx,
"SELECT max(nanos), max(micros), max(millis), max(secs) FROM t",
)
.await
.unwrap();
let expected = vec![
"+-------------------------+-------------------------+-------------------------+---------------------+",
"| MAX(t.nanos) | MAX(t.micros) | MAX(t.millis) | MAX(t.secs) |",
"+-------------------------+-------------------------+-------------------------+---------------------+",
"| 2021-01-01 05:11:10.432 | 2021-01-01 05:11:10.432 | 2021-01-01 05:11:10.432 | 2021-01-01 05:11:10 |",
"+-------------------------+-------------------------+-------------------------+---------------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn aggregate_timestamps_avg() -> Result<()> {
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 1)?;
ctx.register_table("t", test::table_with_timestamps())
.unwrap();
let results = plan_and_collect(
&mut ctx,
"SELECT avg(nanos), avg(micros), avg(millis), avg(secs) FROM t",
)
.await
.unwrap_err();
assert_eq!(results.to_string(), "Error during planning: Coercion from [Timestamp(Nanosecond, None)] to the signature Uniform(1, [Int8, Int16, Int32, Int64, UInt8, UInt16, UInt32, UInt64, Float32, Float64]) failed.");
Ok(())
}
#[tokio::test]
async fn aggregate_avg_add() -> Result<()> {
let results = execute(
"SELECT AVG(c1), AVG(c1) + 1, AVG(c1) + 2, 1 + AVG(c1) FROM test",
4,
)
.await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+--------------+----------------------------+----------------------------+----------------------------+",
"| AVG(test.c1) | AVG(test.c1) Plus Int64(1) | AVG(test.c1) Plus Int64(2) | Int64(1) Plus AVG(test.c1) |",
"+--------------+----------------------------+----------------------------+----------------------------+",
"| 1.5 | 2.5 | 3.5 | 2.5 |",
"+--------------+----------------------------+----------------------------+----------------------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn join_partitioned() -> Result<()> {
// self join on partition id (workaround for duplicate column name)
let results = execute(
"SELECT 1 FROM test JOIN (SELECT c1 AS id1 FROM test) ON c1=id1",
4,
)
.await?;
assert_eq!(
results.iter().map(|b| b.num_rows()).sum::<usize>(),
4 * 10 * 10
);
Ok(())
}
#[tokio::test]
async fn count_basic() -> Result<()> {
let results = execute("SELECT COUNT(c1), COUNT(c2) FROM test", 1).await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+----------------+----------------+",
"| COUNT(test.c1) | COUNT(test.c2) |",
"+----------------+----------------+",
"| 10 | 10 |",
"+----------------+----------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn count_partitioned() -> Result<()> {
let results = execute("SELECT COUNT(c1), COUNT(c2) FROM test", 4).await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+----------------+----------------+",
"| COUNT(test.c1) | COUNT(test.c2) |",
"+----------------+----------------+",
"| 40 | 40 |",
"+----------------+----------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn count_aggregated() -> Result<()> {
let results = execute("SELECT c1, COUNT(c2) FROM test GROUP BY c1", 4).await?;
let expected = vec![
"+----+----------------+",
"| c1 | COUNT(test.c2) |",
"+----+----------------+",
"| 0 | 10 |",
"| 1 | 10 |",
"| 2 | 10 |",
"| 3 | 10 |",
"+----+----------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn group_by_date_trunc() -> Result<()> {
let tmp_dir = TempDir::new()?;
let mut ctx = ExecutionContext::new();
let schema = Arc::new(Schema::new(vec![
Field::new("c2", DataType::UInt64, false),
Field::new(
"t1",
DataType::Timestamp(TimeUnit::Microsecond, None),
false,
),
]));
// generate a partitioned file
for partition in 0..4 {
let filename = format!("partition-{}.{}", partition, "csv");
let file_path = tmp_dir.path().join(&filename);
let mut file = File::create(file_path)?;
// generate some data
for i in 0..10 {
let data = format!("{},2020-12-{}T00:00:00.000\n", i, i + 10);
file.write_all(data.as_bytes())?;
}
}
ctx.register_csv(
"test",
tmp_dir.path().to_str().unwrap(),
CsvReadOptions::new().schema(&schema).has_header(false),
)?;
let results = plan_and_collect(
&mut ctx,
"SELECT date_trunc('week', t1) as week, SUM(c2) FROM test GROUP BY date_trunc('week', t1)",
).await?;
let expected = vec![
"+---------------------+--------------+",
"| week | SUM(test.c2) |",
"+---------------------+--------------+",
"| 2020-12-07 00:00:00 | 24 |",
"| 2020-12-14 00:00:00 | 156 |",
"+---------------------+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn group_by_largeutf8() {
{
let mut ctx = ExecutionContext::new();
// input data looks like:
// A, 1
// B, 2
// A, 2
// A, 4
// C, 1
// A, 1
let str_array: LargeStringArray = vec!["A", "B", "A", "A", "C", "A"]
.into_iter()
.map(Some)
.collect();
let str_array = Arc::new(str_array);
let val_array: Int64Array = vec![1, 2, 2, 4, 1, 1].into();
let val_array = Arc::new(val_array);
let schema = Arc::new(Schema::new(vec![
Field::new("str", str_array.data_type().clone(), false),
Field::new("val", val_array.data_type().clone(), false),
]));
let batch =
RecordBatch::try_new(schema.clone(), vec![str_array, val_array]).unwrap();
let provider = MemTable::try_new(schema.clone(), vec![vec![batch]]).unwrap();
ctx.register_table("t", Arc::new(provider)).unwrap();
let results =
plan_and_collect(&mut ctx, "SELECT str, count(val) FROM t GROUP BY str")
.await
.expect("ran plan correctly");
let expected = vec![
"+-----+--------------+",
"| str | COUNT(t.val) |",
"+-----+--------------+",
"| A | 4 |",
"| B | 1 |",
"| C | 1 |",
"+-----+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
}
}
#[tokio::test]
async fn unprojected_filter() {
let mut ctx = ExecutionContext::new();
let df = ctx
.read_table(test::table_with_sequence(1, 3).unwrap())
.unwrap();
let df = df
.select(vec![binary_expr(col("i"), Operator::Plus, col("i"))])
.unwrap()
.filter(col("i").gt(lit(2)))
.unwrap();
let results = df.collect().await.unwrap();
let expected = vec![
"+--------------------------+",
"| ?table?.i Plus ?table?.i |",
"+--------------------------+",
"| 6 |",
"+--------------------------+",
];
assert_batches_sorted_eq!(expected, &results);
}
#[tokio::test]
async fn group_by_dictionary() {
async fn run_test_case<K: ArrowDictionaryKeyType>() {
let mut ctx = ExecutionContext::new();
// input data looks like:
// A, 1
// B, 2
// A, 2
// A, 4
// C, 1
// A, 1
let dict_array: DictionaryArray<K> =
vec!["A", "B", "A", "A", "C", "A"].into_iter().collect();
let dict_array = Arc::new(dict_array);
let val_array: Int64Array = vec![1, 2, 2, 4, 1, 1].into();
let val_array = Arc::new(val_array);
let schema = Arc::new(Schema::new(vec![
Field::new("dict", dict_array.data_type().clone(), false),
Field::new("val", val_array.data_type().clone(), false),
]));
let batch = RecordBatch::try_new(schema.clone(), vec![dict_array, val_array])
.unwrap();
let provider = MemTable::try_new(schema.clone(), vec![vec![batch]]).unwrap();
ctx.register_table("t", Arc::new(provider)).unwrap();
let results = plan_and_collect(
&mut ctx,
"SELECT dict, count(val) FROM t GROUP BY dict",
)
.await
.expect("ran plan correctly");
let expected = vec![
"+------+--------------+",
"| dict | COUNT(t.val) |",
"+------+--------------+",
"| A | 4 |",
"| B | 1 |",
"| C | 1 |",
"+------+--------------+",
];
assert_batches_sorted_eq!(expected, &results);
// Now, use dict as an aggregate
let results =
plan_and_collect(&mut ctx, "SELECT val, count(dict) FROM t GROUP BY val")
.await
.expect("ran plan correctly");
let expected = vec![
"+-----+---------------+",
"| val | COUNT(t.dict) |",
"+-----+---------------+",
"| 1 | 3 |",
"| 2 | 2 |",
"| 4 | 1 |",
"+-----+---------------+",
];
assert_batches_sorted_eq!(expected, &results);
// Now, use dict as an aggregate
let results = plan_and_collect(
&mut ctx,
"SELECT val, count(distinct dict) FROM t GROUP BY val",
)
.await
.expect("ran plan correctly");
let expected = vec![
"+-----+------------------------+",
"| val | COUNT(DISTINCT t.dict) |",
"+-----+------------------------+",
"| 1 | 2 |",
"| 2 | 2 |",
"| 4 | 1 |",
"+-----+------------------------+",
];
assert_batches_sorted_eq!(expected, &results);
}
run_test_case::<Int8Type>().await;
run_test_case::<Int16Type>().await;
run_test_case::<Int32Type>().await;
run_test_case::<Int64Type>().await;
run_test_case::<UInt8Type>().await;
run_test_case::<UInt16Type>().await;
run_test_case::<UInt32Type>().await;
run_test_case::<UInt64Type>().await;
}
async fn run_count_distinct_integers_aggregated_scenario(
partitions: Vec<Vec<(&str, u64)>>,
) -> Result<Vec<RecordBatch>> {
let tmp_dir = TempDir::new()?;
let mut ctx = ExecutionContext::new();
let schema = Arc::new(Schema::new(vec![
Field::new("c_group", DataType::Utf8, false),
Field::new("c_int8", DataType::Int8, false),
Field::new("c_int16", DataType::Int16, false),
Field::new("c_int32", DataType::Int32, false),
Field::new("c_int64", DataType::Int64, false),
Field::new("c_uint8", DataType::UInt8, false),
Field::new("c_uint16", DataType::UInt16, false),
Field::new("c_uint32", DataType::UInt32, false),
Field::new("c_uint64", DataType::UInt64, false),
]));
for (i, partition) in partitions.iter().enumerate() {
let filename = format!("partition-{}.csv", i);
let file_path = tmp_dir.path().join(&filename);
let mut file = File::create(file_path)?;
for row in partition {
let row_str = format!(
"{},{}\n",
row.0,
// Populate values for each of the integer fields in the
// schema.
(0..8)
.map(|_| { row.1.to_string() })
.collect::<Vec<_>>()
.join(","),
);
file.write_all(row_str.as_bytes())?;
}
}
ctx.register_csv(
"test",
tmp_dir.path().to_str().unwrap(),
CsvReadOptions::new().schema(&schema).has_header(false),
)?;
let results = plan_and_collect(
&mut ctx,
"
SELECT
c_group,
COUNT(c_uint64),
COUNT(DISTINCT c_int8),
COUNT(DISTINCT c_int16),
COUNT(DISTINCT c_int32),
COUNT(DISTINCT c_int64),
COUNT(DISTINCT c_uint8),
COUNT(DISTINCT c_uint16),
COUNT(DISTINCT c_uint32),
COUNT(DISTINCT c_uint64)
FROM test
GROUP BY c_group
",
)
.await?;
Ok(results)
}
#[tokio::test]
async fn count_distinct_integers_aggregated_single_partition() -> Result<()> {
let partitions = vec![
// The first member of each tuple will be the value for the
// `c_group` column, and the second member will be the value for
// each of the int/uint fields.
vec![
("a", 1),
("a", 1),
("a", 2),
("b", 9),
("c", 9),
("c", 10),
("c", 9),
],
];
let results = run_count_distinct_integers_aggregated_scenario(partitions).await?;
let expected = vec![
"+---------+----------------------+-----------------------------+------------------------------+------------------------------+------------------------------+------------------------------+-------------------------------+-------------------------------+-------------------------------+",
"| c_group | COUNT(test.c_uint64) | COUNT(DISTINCT test.c_int8) | COUNT(DISTINCT test.c_int16) | COUNT(DISTINCT test.c_int32) | COUNT(DISTINCT test.c_int64) | COUNT(DISTINCT test.c_uint8) | COUNT(DISTINCT test.c_uint16) | COUNT(DISTINCT test.c_uint32) | COUNT(DISTINCT test.c_uint64) |",
"+---------+----------------------+-----------------------------+------------------------------+------------------------------+------------------------------+------------------------------+-------------------------------+-------------------------------+-------------------------------+",
"| a | 3 | 2 | 2 | 2 | 2 | 2 | 2 | 2 | 2 |",
"| b | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 |",
"| c | 3 | 2 | 2 | 2 | 2 | 2 | 2 | 2 | 2 |",
"+---------+----------------------+-----------------------------+------------------------------+------------------------------+------------------------------+------------------------------+-------------------------------+-------------------------------+-------------------------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[tokio::test]
async fn count_distinct_integers_aggregated_multiple_partitions() -> Result<()> {
let partitions = vec![
// The first member of each tuple will be the value for the
// `c_group` column, and the second member will be the value for
// each of the int/uint fields.
vec![("a", 1), ("a", 1), ("a", 2), ("b", 9), ("c", 9)],
vec![("a", 1), ("a", 3), ("b", 8), ("b", 9), ("b", 10), ("b", 11)],
];
let results = run_count_distinct_integers_aggregated_scenario(partitions).await?;
let expected = vec![
"+---------+----------------------+-----------------------------+------------------------------+------------------------------+------------------------------+------------------------------+-------------------------------+-------------------------------+-------------------------------+",
"| c_group | COUNT(test.c_uint64) | COUNT(DISTINCT test.c_int8) | COUNT(DISTINCT test.c_int16) | COUNT(DISTINCT test.c_int32) | COUNT(DISTINCT test.c_int64) | COUNT(DISTINCT test.c_uint8) | COUNT(DISTINCT test.c_uint16) | COUNT(DISTINCT test.c_uint32) | COUNT(DISTINCT test.c_uint64) |",
"+---------+----------------------+-----------------------------+------------------------------+------------------------------+------------------------------+------------------------------+-------------------------------+-------------------------------+-------------------------------+",
"| a | 5 | 3 | 3 | 3 | 3 | 3 | 3 | 3 | 3 |",
"| b | 5 | 4 | 4 | 4 | 4 | 4 | 4 | 4 | 4 |",
"| c | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 |",
"+---------+----------------------+-----------------------------+------------------------------+------------------------------+------------------------------+------------------------------+-------------------------------+-------------------------------+-------------------------------+",
];
assert_batches_sorted_eq!(expected, &results);
Ok(())
}
#[test]
fn aggregate_with_alias() -> Result<()> {
let tmp_dir = TempDir::new()?;
let ctx = create_ctx(&tmp_dir, 1)?;
let schema = Arc::new(Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::UInt32, false),
]));
let plan = LogicalPlanBuilder::scan_empty(None, schema.as_ref(), None)?
.aggregate(vec![col("c1")], vec![sum(col("c2"))])?
.project(vec![col("c1"), sum(col("c2")).alias("total_salary")])?
.build()?;
let plan = ctx.optimize(&plan)?;
let physical_plan = ctx.create_physical_plan(&Arc::new(plan))?;
assert_eq!("c1", physical_plan.schema().field(0).name().as_str());
assert_eq!(
"total_salary",
physical_plan.schema().field(1).name().as_str()
);
Ok(())
}
#[tokio::test]
async fn limit() -> Result<()> {
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 1)?;
ctx.register_table("t", test::table_with_sequence(1, 1000).unwrap())
.unwrap();
let results =
plan_and_collect(&mut ctx, "SELECT i FROM t ORDER BY i DESC limit 3")
.await
.unwrap();
let expected = vec![
"+------+", "| i |", "+------+", "| 1000 |", "| 999 |", "| 998 |",
"+------+",
];
assert_batches_eq!(expected, &results);
let results = plan_and_collect(&mut ctx, "SELECT i FROM t ORDER BY i limit 3")
.await
.unwrap();
let expected = vec![
"+---+", "| i |", "+---+", "| 1 |", "| 2 |", "| 3 |", "+---+",
];
assert_batches_eq!(expected, &results);
let results = plan_and_collect(&mut ctx, "SELECT i FROM t limit 3")
.await
.unwrap();
// the actual rows are not guaranteed, so only check the count (should be 3)
let num_rows: usize = results.into_iter().map(|b| b.num_rows()).sum();
assert_eq!(num_rows, 3);
Ok(())
}
#[tokio::test]
async fn limit_multi_partitions() -> Result<()> {
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 1)?;
let partitions = vec![
vec![test::make_partition(0)],
vec![test::make_partition(1)],
vec![test::make_partition(2)],
vec![test::make_partition(3)],
vec![test::make_partition(4)],
vec![test::make_partition(5)],
];
let schema = partitions[0][0].schema();
let provider = Arc::new(MemTable::try_new(schema, partitions).unwrap());
ctx.register_table("t", provider).unwrap();
// select all rows
let results = plan_and_collect(&mut ctx, "SELECT i FROM t").await.unwrap();
let num_rows: usize = results.into_iter().map(|b| b.num_rows()).sum();
assert_eq!(num_rows, 15);
for limit in 1..10 {
let query = format!("SELECT i FROM t limit {}", limit);
let results = plan_and_collect(&mut ctx, &query).await.unwrap();
let num_rows: usize = results.into_iter().map(|b| b.num_rows()).sum();
assert_eq!(num_rows, limit, "mismatch with query {}", query);
}
Ok(())
}
#[tokio::test]
async fn case_sensitive_identifiers_functions() {
let mut ctx = ExecutionContext::new();
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let expected = vec![
"+-----------+",
"| sqrt(t.i) |",
"+-----------+",
"| 1 |",
"+-----------+",
];
let results = plan_and_collect(&mut ctx, "SELECT sqrt(i) FROM t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &results);
let results = plan_and_collect(&mut ctx, "SELECT SQRT(i) FROM t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &results);
// Using double quotes allows specifying the function name with capitalization
let err = plan_and_collect(&mut ctx, "SELECT \"SQRT\"(i) FROM t")
.await
.unwrap_err();
assert_eq!(
err.to_string(),
"Error during planning: Invalid function 'SQRT'"
);
let results = plan_and_collect(&mut ctx, "SELECT \"sqrt\"(i) FROM t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &results);
}
#[tokio::test]
async fn case_builtin_math_expression() {
let mut ctx = ExecutionContext::new();
let type_values = vec![
(
DataType::Int8,
Arc::new(Int8Array::from(vec![1])) as ArrayRef,
),
(
DataType::Int16,
Arc::new(Int16Array::from(vec![1])) as ArrayRef,
),
(
DataType::Int32,
Arc::new(Int32Array::from(vec![1])) as ArrayRef,
),
(
DataType::Int64,
Arc::new(Int64Array::from(vec![1])) as ArrayRef,
),
(
DataType::UInt8,
Arc::new(UInt8Array::from(vec![1])) as ArrayRef,
),
(
DataType::UInt16,
Arc::new(UInt16Array::from(vec![1])) as ArrayRef,
),
(
DataType::UInt32,
Arc::new(UInt32Array::from(vec![1])) as ArrayRef,
),
(
DataType::UInt64,
Arc::new(UInt64Array::from(vec![1])) as ArrayRef,
),
(
DataType::Float32,
Arc::new(Float32Array::from(vec![1.0_f32])) as ArrayRef,
),
(
DataType::Float64,
Arc::new(Float64Array::from(vec![1.0_f64])) as ArrayRef,
),
];
for (data_type, array) in type_values.iter() {
let schema =
Arc::new(Schema::new(vec![Field::new("v", data_type.clone(), false)]));
let batch =
RecordBatch::try_new(schema.clone(), vec![array.clone()]).unwrap();
let provider = MemTable::try_new(schema, vec![vec![batch]]).unwrap();
ctx.register_table("t", Arc::new(provider)).unwrap();
let expected = vec![
"+-----------+",
"| sqrt(t.v) |",
"+-----------+",
"| 1 |",
"+-----------+",
];
let results = plan_and_collect(&mut ctx, "SELECT sqrt(v) FROM t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &results);
}
}
#[tokio::test]
async fn case_sensitive_identifiers_user_defined_functions() -> Result<()> {
let mut ctx = ExecutionContext::new();
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let myfunc = |args: &[ArrayRef]| Ok(Arc::clone(&args[0]));
let myfunc = make_scalar_function(myfunc);
ctx.register_udf(create_udf(
"MY_FUNC",
vec![DataType::Int32],
Arc::new(DataType::Int32),
myfunc,
));
// doesn't work as it was registered with non lowercase
let err = plan_and_collect(&mut ctx, "SELECT MY_FUNC(i) FROM t")
.await
.unwrap_err();
assert_eq!(
err.to_string(),
"Error during planning: Invalid function \'my_func\'"
);
// Can call it if you put quotes
let result = plan_and_collect(&mut ctx, "SELECT \"MY_FUNC\"(i) FROM t").await?;
let expected = vec![
"+--------------+",
"| MY_FUNC(t.i) |",
"+--------------+",
"| 1 |",
"+--------------+",
];
assert_batches_eq!(expected, &result);
Ok(())
}
#[tokio::test]
async fn case_sensitive_identifiers_aggregates() {
let mut ctx = ExecutionContext::new();
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let expected = vec![
"+----------+",
"| MAX(t.i) |",
"+----------+",
"| 1 |",
"+----------+",
];
let results = plan_and_collect(&mut ctx, "SELECT max(i) FROM t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &results);
let results = plan_and_collect(&mut ctx, "SELECT MAX(i) FROM t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &results);
// Using double quotes allows specifying the function name with capitalization
let err = plan_and_collect(&mut ctx, "SELECT \"MAX\"(i) FROM t")
.await
.unwrap_err();
assert_eq!(
err.to_string(),
"Error during planning: Invalid function 'MAX'"
);
let results = plan_and_collect(&mut ctx, "SELECT \"max\"(i) FROM t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &results);
}
#[tokio::test]
async fn case_sensitive_identifiers_user_defined_aggregates() -> Result<()> {
let mut ctx = ExecutionContext::new();
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
// Note capitalizaton
let my_avg = create_udaf(
"MY_AVG",
DataType::Float64,
Arc::new(DataType::Float64),
Arc::new(|| Ok(Box::new(AvgAccumulator::try_new(&DataType::Float64)?))),
Arc::new(vec![DataType::UInt64, DataType::Float64]),
);
ctx.register_udaf(my_avg);
// doesn't work as it was registered as non lowercase
let err = plan_and_collect(&mut ctx, "SELECT MY_AVG(i) FROM t")
.await
.unwrap_err();
assert_eq!(
err.to_string(),
"Error during planning: Invalid function \'my_avg\'"
);
// Can call it if you put quotes
let result = plan_and_collect(&mut ctx, "SELECT \"MY_AVG\"(i) FROM t").await?;
let expected = vec![
"+-------------+",
"| MY_AVG(t.i) |",
"+-------------+",
"| 1 |",
"+-------------+",
];
assert_batches_eq!(expected, &result);
Ok(())
}
#[tokio::test]
async fn write_csv_results() -> Result<()> {
// create partitioned input file and context
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 4)?;
// execute a simple query and write the results to CSV
let out_dir = tmp_dir.as_ref().to_str().unwrap().to_string() + "/out";
write_csv(&mut ctx, "SELECT c1, c2 FROM test", &out_dir).await?;
// create a new context and verify that the results were saved to a partitioned csv file
let mut ctx = ExecutionContext::new();
let schema = Arc::new(Schema::new(vec![
Field::new("c1", DataType::UInt32, false),
Field::new("c2", DataType::UInt64, false),
]));
// register each partition as well as the top level dir
let csv_read_option = CsvReadOptions::new().schema(&schema);
ctx.register_csv("part0", &format!("{}/part-0.csv", out_dir), csv_read_option)?;
ctx.register_csv("allparts", &out_dir, csv_read_option)?;
let part0 = plan_and_collect(&mut ctx, "SELECT c1, c2 FROM part0").await?;
let allparts = plan_and_collect(&mut ctx, "SELECT c1, c2 FROM allparts").await?;
let allparts_count: usize = allparts.iter().map(|batch| batch.num_rows()).sum();
assert_eq!(part0[0].schema(), allparts[0].schema());
assert_eq!(allparts_count, 40);
Ok(())
}
#[tokio::test]
async fn write_parquet_results() -> Result<()> {
// create partitioned input file and context
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, 4)?;
// execute a simple query and write the results to CSV
let out_dir = tmp_dir.as_ref().to_str().unwrap().to_string() + "/out";
write_parquet(&mut ctx, "SELECT c1, c2 FROM test", &out_dir, None).await?;
// create a new context and verify that the results were saved to a partitioned csv file
let mut ctx = ExecutionContext::new();
// register each partition as well as the top level dir
ctx.register_parquet("part0", &format!("{}/part-0.parquet", out_dir))?;
ctx.register_parquet("part1", &format!("{}/part-1.parquet", out_dir))?;
ctx.register_parquet("part2", &format!("{}/part-2.parquet", out_dir))?;
ctx.register_parquet("part3", &format!("{}/part-3.parquet", out_dir))?;
ctx.register_parquet("allparts", &out_dir)?;
let part0 = plan_and_collect(&mut ctx, "SELECT c1, c2 FROM part0").await?;
let allparts = plan_and_collect(&mut ctx, "SELECT c1, c2 FROM allparts").await?;
let allparts_count: usize = allparts.iter().map(|batch| batch.num_rows()).sum();
assert_eq!(part0[0].schema(), allparts[0].schema());
assert_eq!(allparts_count, 40);
Ok(())
}
#[tokio::test]
async fn query_csv_with_custom_partition_extension() -> Result<()> {
let tmp_dir = TempDir::new()?;
// The main stipulation of this test: use a file extension that isn't .csv.
let file_extension = ".tst";
let mut ctx = ExecutionContext::new();
let schema = populate_csv_partitions(&tmp_dir, 2, file_extension)?;
ctx.register_csv(
"test",
tmp_dir.path().to_str().unwrap(),
CsvReadOptions::new()
.schema(&schema)
.file_extension(file_extension),
)?;
let results =
plan_and_collect(&mut ctx, "SELECT SUM(c1), SUM(c2), COUNT(*) FROM test")
.await?;
assert_eq!(results.len(), 1);
let expected = vec![
"+--------------+--------------+-----------------+",
"| SUM(test.c1) | SUM(test.c2) | COUNT(UInt8(1)) |",
"+--------------+--------------+-----------------+",
"| 10 | 110 | 20 |",
"+--------------+--------------+-----------------+",
];
assert_batches_eq!(expected, &results);
Ok(())
}
#[test]
fn send_context_to_threads() -> Result<()> {
// ensure ExecutionContexts can be used in a multi-threaded
// environment. Usecase is for concurrent planing.
let tmp_dir = TempDir::new()?;
let partition_count = 4;
let ctx = Arc::new(Mutex::new(create_ctx(&tmp_dir, partition_count)?));
let threads: Vec<JoinHandle<Result<_>>> = (0..2)
.map(|_| ctx.clone())
.map(|ctx_clone| {
thread::spawn(move || {
let ctx = ctx_clone.lock().expect("Locked context");
// Ensure we can create logical plan code on a separate thread.
ctx.create_logical_plan(
"SELECT c1, c2 FROM test WHERE c1 > 0 AND c1 < 3",
)
})
})
.collect();
for thread in threads {
thread.join().expect("Failed to join thread")?;
}
Ok(())
}
#[test]
fn ctx_sql_should_optimize_plan() -> Result<()> {
let mut ctx = ExecutionContext::new();
let plan1 =
ctx.create_logical_plan("SELECT * FROM (SELECT 1) WHERE TRUE AND TRUE")?;
let opt_plan1 = ctx.optimize(&plan1)?;
let plan2 = ctx.sql("SELECT * FROM (SELECT 1) WHERE TRUE AND TRUE")?;
assert_eq!(
format!("{:?}", opt_plan1),
format!("{:?}", plan2.to_logical_plan())
);
Ok(())
}
#[tokio::test]
async fn scalar_udf() -> Result<()> {
let schema = Schema::new(vec![
Field::new("a", DataType::Int32, false),
Field::new("b", DataType::Int32, false),
]);
let batch = RecordBatch::try_new(
Arc::new(schema.clone()),
vec![
Arc::new(Int32Array::from(vec![1, 10, 10, 100])),
Arc::new(Int32Array::from(vec![2, 12, 12, 120])),
],
)?;
let mut ctx = ExecutionContext::new();
let provider = MemTable::try_new(Arc::new(schema), vec![vec![batch]])?;
ctx.register_table("t", Arc::new(provider))?;
let myfunc = |args: &[ArrayRef]| {
let l = &args[0]
.as_any()
.downcast_ref::<Int32Array>()
.expect("cast failed");
let r = &args[1]
.as_any()
.downcast_ref::<Int32Array>()
.expect("cast failed");
Ok(Arc::new(add(l, r)?) as ArrayRef)
};
let myfunc = make_scalar_function(myfunc);
ctx.register_udf(create_udf(
"my_add",
vec![DataType::Int32, DataType::Int32],
Arc::new(DataType::Int32),
myfunc,
));
// from here on, we may be in a different scope. We would still like to be able
// to call UDFs.
let t = ctx.table("t")?;
let plan = LogicalPlanBuilder::from(t.to_logical_plan())
.project(vec![
col("a"),
col("b"),
ctx.udf("my_add")?.call(vec![col("a"), col("b")]),
])?
.build()?;
assert_eq!(
format!("{:?}", plan),
"Projection: #t.a, #t.b, my_add(#t.a, #t.b)\n TableScan: t projection=None"
);
let plan = ctx.optimize(&plan)?;
let plan = ctx.create_physical_plan(&plan)?;
let result = collect(plan).await?;
let expected = vec![
"+-----+-----+-----------------+",
"| a | b | my_add(t.a,t.b) |",
"+-----+-----+-----------------+",
"| 1 | 2 | 3 |",
"| 10 | 12 | 22 |",
"| 10 | 12 | 22 |",
"| 100 | 120 | 220 |",
"+-----+-----+-----------------+",
];
assert_batches_eq!(expected, &result);
let batch = &result[0];
let a = batch
.column(0)
.as_any()
.downcast_ref::<Int32Array>()
.expect("failed to cast a");
let b = batch
.column(1)
.as_any()
.downcast_ref::<Int32Array>()
.expect("failed to cast b");
let sum = batch
.column(2)
.as_any()
.downcast_ref::<Int32Array>()
.expect("failed to cast sum");
assert_eq!(4, a.len());
assert_eq!(4, b.len());
assert_eq!(4, sum.len());
for i in 0..sum.len() {
assert_eq!(a.value(i) + b.value(i), sum.value(i));
}
ctx.deregister_table("t")?;
Ok(())
}
#[tokio::test]
async fn simple_avg() -> Result<()> {
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let batch1 = RecordBatch::try_new(
Arc::new(schema.clone()),
vec![Arc::new(Int32Array::from(vec![1, 2, 3]))],
)?;
let batch2 = RecordBatch::try_new(
Arc::new(schema.clone()),
vec![Arc::new(Int32Array::from(vec![4, 5]))],
)?;
let mut ctx = ExecutionContext::new();
let provider =
MemTable::try_new(Arc::new(schema), vec![vec![batch1], vec![batch2]])?;
ctx.register_table("t", Arc::new(provider))?;
let result = plan_and_collect(&mut ctx, "SELECT AVG(a) FROM t").await?;
let batch = &result[0];
assert_eq!(1, batch.num_columns());
assert_eq!(1, batch.num_rows());
let values = batch
.column(0)
.as_any()
.downcast_ref::<Float64Array>()
.expect("failed to cast version");
assert_eq!(values.len(), 1);
// avg(1,2,3,4,5) = 3.0
assert_eq!(values.value(0), 3.0_f64);
Ok(())
}
/// tests the creation, registration and usage of a UDAF
#[tokio::test]
async fn simple_udaf() -> Result<()> {
let schema = Schema::new(vec![Field::new("a", DataType::Int32, false)]);
let batch1 = RecordBatch::try_new(
Arc::new(schema.clone()),
vec![Arc::new(Int32Array::from(vec![1, 2, 3]))],
)?;
let batch2 = RecordBatch::try_new(
Arc::new(schema.clone()),
vec![Arc::new(Int32Array::from(vec![4, 5]))],
)?;
let mut ctx = ExecutionContext::new();
let provider =
MemTable::try_new(Arc::new(schema), vec![vec![batch1], vec![batch2]])?;
ctx.register_table("t", Arc::new(provider))?;
// define a udaf, using a DataFusion's accumulator
let my_avg = create_udaf(
"my_avg",
DataType::Float64,
Arc::new(DataType::Float64),
Arc::new(|| Ok(Box::new(AvgAccumulator::try_new(&DataType::Float64)?))),
Arc::new(vec![DataType::UInt64, DataType::Float64]),
);
ctx.register_udaf(my_avg);
let result = plan_and_collect(&mut ctx, "SELECT MY_AVG(a) FROM t").await?;
let expected = vec![
"+-------------+",
"| my_avg(t.a) |",
"+-------------+",
"| 3 |",
"+-------------+",
];
assert_batches_eq!(expected, &result);
Ok(())
}
#[tokio::test]
async fn custom_query_planner() -> Result<()> {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_query_planner(Arc::new(MyQueryPlanner {})),
);
let df = ctx.sql("SELECT 1")?;
df.collect().await.expect_err("query not supported");
Ok(())
}
#[tokio::test]
async fn information_schema_tables_not_exist_by_default() {
let mut ctx = ExecutionContext::new();
let err = plan_and_collect(&mut ctx, "SELECT * from information_schema.tables")
.await
.unwrap_err();
assert_eq!(
err.to_string(),
"Error during planning: Table or CTE with name 'information_schema.tables' not found"
);
}
#[tokio::test]
async fn information_schema_tables_no_tables() {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
let result =
plan_and_collect(&mut ctx, "SELECT * from information_schema.tables")
.await
.unwrap();
let expected = vec![
"+---------------+--------------------+------------+------------+",
"| table_catalog | table_schema | table_name | table_type |",
"+---------------+--------------------+------------+------------+",
"| datafusion | information_schema | columns | VIEW |",
"| datafusion | information_schema | tables | VIEW |",
"+---------------+--------------------+------------+------------+",
];
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn information_schema_tables_tables_default_catalog() {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
// Now, register an empty table
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let result =
plan_and_collect(&mut ctx, "SELECT * from information_schema.tables")
.await
.unwrap();
let expected = vec![
"+---------------+--------------------+------------+------------+",
"| table_catalog | table_schema | table_name | table_type |",
"+---------------+--------------------+------------+------------+",
"| datafusion | information_schema | tables | VIEW |",
"| datafusion | information_schema | columns | VIEW |",
"| datafusion | public | t | BASE TABLE |",
"+---------------+--------------------+------------+------------+",
];
assert_batches_sorted_eq!(expected, &result);
// Newly added tables should appear
ctx.register_table("t2", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let result =
plan_and_collect(&mut ctx, "SELECT * from information_schema.tables")
.await
.unwrap();
let expected = vec![
"+---------------+--------------------+------------+------------+",
"| table_catalog | table_schema | table_name | table_type |",
"+---------------+--------------------+------------+------------+",
"| datafusion | information_schema | columns | VIEW |",
"| datafusion | information_schema | tables | VIEW |",
"| datafusion | public | t | BASE TABLE |",
"| datafusion | public | t2 | BASE TABLE |",
"+---------------+--------------------+------------+------------+",
];
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn information_schema_tables_tables_with_multiple_catalogs() {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
let catalog = MemoryCatalogProvider::new();
let schema = MemorySchemaProvider::new();
schema
.register_table("t1".to_owned(), test::table_with_sequence(1, 1).unwrap())
.unwrap();
schema
.register_table("t2".to_owned(), test::table_with_sequence(1, 1).unwrap())
.unwrap();
catalog.register_schema("my_schema", Arc::new(schema));
ctx.register_catalog("my_catalog", Arc::new(catalog));
let catalog = MemoryCatalogProvider::new();
let schema = MemorySchemaProvider::new();
schema
.register_table("t3".to_owned(), test::table_with_sequence(1, 1).unwrap())
.unwrap();
catalog.register_schema("my_other_schema", Arc::new(schema));
ctx.register_catalog("my_other_catalog", Arc::new(catalog));
let result =
plan_and_collect(&mut ctx, "SELECT * from information_schema.tables")
.await
.unwrap();
let expected = vec![
"+------------------+--------------------+------------+------------+",
"| table_catalog | table_schema | table_name | table_type |",
"+------------------+--------------------+------------+------------+",
"| datafusion | information_schema | columns | VIEW |",
"| datafusion | information_schema | tables | VIEW |",
"| my_catalog | information_schema | columns | VIEW |",
"| my_catalog | information_schema | tables | VIEW |",
"| my_catalog | my_schema | t1 | BASE TABLE |",
"| my_catalog | my_schema | t2 | BASE TABLE |",
"| my_other_catalog | information_schema | columns | VIEW |",
"| my_other_catalog | information_schema | tables | VIEW |",
"| my_other_catalog | my_other_schema | t3 | BASE TABLE |",
"+------------------+--------------------+------------+------------+",
];
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn information_schema_tables_table_types() {
struct TestTable(TableType);
impl TableProvider for TestTable {
fn as_any(&self) -> &dyn std::any::Any {
self
}
fn table_type(&self) -> TableType {
self.0
}
fn schema(&self) -> SchemaRef {
unimplemented!()
}
fn scan(
&self,
_: &Option<Vec<usize>>,
_: usize,
_: &[Expr],
_: Option<usize>,
) -> Result<Arc<dyn ExecutionPlan>> {
unimplemented!()
}
}
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
ctx.register_table("physical", Arc::new(TestTable(TableType::Base)))
.unwrap();
ctx.register_table("query", Arc::new(TestTable(TableType::View)))
.unwrap();
ctx.register_table("temp", Arc::new(TestTable(TableType::Temporary)))
.unwrap();
let result =
plan_and_collect(&mut ctx, "SELECT * from information_schema.tables")
.await
.unwrap();
let expected = vec![
"+---------------+--------------------+------------+-----------------+",
"| table_catalog | table_schema | table_name | table_type |",
"+---------------+--------------------+------------+-----------------+",
"| datafusion | information_schema | tables | VIEW |",
"| datafusion | information_schema | columns | VIEW |",
"| datafusion | public | physical | BASE TABLE |",
"| datafusion | public | query | VIEW |",
"| datafusion | public | temp | LOCAL TEMPORARY |",
"+---------------+--------------------+------------+-----------------+",
];
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn information_schema_show_tables_no_information_schema() {
let mut ctx = ExecutionContext::with_config(ExecutionConfig::new());
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
// use show tables alias
let err = plan_and_collect(&mut ctx, "SHOW TABLES").await.unwrap_err();
assert_eq!(err.to_string(), "Error during planning: SHOW TABLES is not supported unless information_schema is enabled");
}
#[tokio::test]
async fn information_schema_show_tables() {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
// use show tables alias
let result = plan_and_collect(&mut ctx, "SHOW TABLES").await.unwrap();
let expected = vec![
"+---------------+--------------------+------------+------------+",
"| table_catalog | table_schema | table_name | table_type |",
"+---------------+--------------------+------------+------------+",
"| datafusion | information_schema | columns | VIEW |",
"| datafusion | information_schema | tables | VIEW |",
"| datafusion | public | t | BASE TABLE |",
"+---------------+--------------------+------------+------------+",
];
assert_batches_sorted_eq!(expected, &result);
let result = plan_and_collect(&mut ctx, "SHOW tables").await.unwrap();
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn information_schema_show_columns_no_information_schema() {
let mut ctx = ExecutionContext::with_config(ExecutionConfig::new());
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let err = plan_and_collect(&mut ctx, "SHOW COLUMNS FROM t")
.await
.unwrap_err();
assert_eq!(err.to_string(), "Error during planning: SHOW COLUMNS is not supported unless information_schema is enabled");
}
#[tokio::test]
async fn information_schema_show_columns_like_where() {
let mut ctx = ExecutionContext::with_config(ExecutionConfig::new());
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let expected =
"Error during planning: SHOW COLUMNS with WHERE or LIKE is not supported";
let err = plan_and_collect(&mut ctx, "SHOW COLUMNS FROM t LIKE 'f'")
.await
.unwrap_err();
assert_eq!(err.to_string(), expected);
let err =
plan_and_collect(&mut ctx, "SHOW COLUMNS FROM t WHERE column_name = 'bar'")
.await
.unwrap_err();
assert_eq!(err.to_string(), expected);
}
#[tokio::test]
async fn information_schema_show_columns() {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let result = plan_and_collect(&mut ctx, "SHOW COLUMNS FROM t")
.await
.unwrap();
let expected = vec![
"+---------------+--------------+------------+-------------+-----------+-------------+",
"| table_catalog | table_schema | table_name | column_name | data_type | is_nullable |",
"+---------------+--------------+------------+-------------+-----------+-------------+",
"| datafusion | public | t | i | Int32 | YES |",
"+---------------+--------------+------------+-------------+-----------+-------------+",
];
assert_batches_sorted_eq!(expected, &result);
let result = plan_and_collect(&mut ctx, "SHOW columns from t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &result);
// This isn't ideal but it is consistent behavior for `SELECT * from T`
let err = plan_and_collect(&mut ctx, "SHOW columns from T")
.await
.unwrap_err();
assert_eq!(
err.to_string(),
"Error during planning: Unknown relation for SHOW COLUMNS: T"
);
}
// test errors with WHERE and LIKE
#[tokio::test]
async fn information_schema_show_columns_full_extended() {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let result = plan_and_collect(&mut ctx, "SHOW FULL COLUMNS FROM t")
.await
.unwrap();
let expected = vec![
"+---------------+--------------+------------+-------------+------------------+----------------+-------------+-----------+--------------------------+------------------------+-------------------+-------------------------+---------------+--------------------+---------------+",
"| table_catalog | table_schema | table_name | column_name | ordinal_position | column_default | is_nullable | data_type | character_maximum_length | character_octet_length | numeric_precision | numeric_precision_radix | numeric_scale | datetime_precision | interval_type |",
"+---------------+--------------+------------+-------------+------------------+----------------+-------------+-----------+--------------------------+------------------------+-------------------+-------------------------+---------------+--------------------+---------------+",
"| datafusion | public | t | i | 0 | | YES | Int32 | | | 32 | 2 | | | |",
"+---------------+--------------+------------+-------------+------------------+----------------+-------------+-----------+--------------------------+------------------------+-------------------+-------------------------+---------------+--------------------+---------------+",
];
assert_batches_sorted_eq!(expected, &result);
let result = plan_and_collect(&mut ctx, "SHOW EXTENDED COLUMNS FROM t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn information_schema_show_table_table_names() {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
ctx.register_table("t", test::table_with_sequence(1, 1).unwrap())
.unwrap();
let result = plan_and_collect(&mut ctx, "SHOW COLUMNS FROM public.t")
.await
.unwrap();
let expected = vec![
"+---------------+--------------+------------+-------------+-----------+-------------+",
"| table_catalog | table_schema | table_name | column_name | data_type | is_nullable |",
"+---------------+--------------+------------+-------------+-----------+-------------+",
"| datafusion | public | t | i | Int32 | YES |",
"+---------------+--------------+------------+-------------+-----------+-------------+",
];
assert_batches_sorted_eq!(expected, &result);
let result = plan_and_collect(&mut ctx, "SHOW columns from datafusion.public.t")
.await
.unwrap();
assert_batches_sorted_eq!(expected, &result);
let err = plan_and_collect(&mut ctx, "SHOW columns from t2")
.await
.unwrap_err();
assert_eq!(
err.to_string(),
"Error during planning: Unknown relation for SHOW COLUMNS: t2"
);
let err = plan_and_collect(&mut ctx, "SHOW columns from datafusion.public.t2")
.await
.unwrap_err();
assert_eq!(err.to_string(), "Error during planning: Unknown relation for SHOW COLUMNS: datafusion.public.t2");
}
#[tokio::test]
async fn show_unsupported() {
let mut ctx = ExecutionContext::with_config(ExecutionConfig::new());
let err = plan_and_collect(&mut ctx, "SHOW SOMETHING_UNKNOWN")
.await
.unwrap_err();
assert_eq!(err.to_string(), "This feature is not implemented: SHOW SOMETHING_UNKNOWN not implemented. Supported syntax: SHOW <TABLES>");
}
#[tokio::test]
async fn information_schema_columns_not_exist_by_default() {
let mut ctx = ExecutionContext::new();
let err = plan_and_collect(&mut ctx, "SELECT * from information_schema.columns")
.await
.unwrap_err();
assert_eq!(
err.to_string(),
"Error during planning: Table or CTE with name 'information_schema.columns' not found"
);
}
fn table_with_many_types() -> Arc<dyn TableProvider> {
let schema = Schema::new(vec![
Field::new("int32_col", DataType::Int32, false),
Field::new("float64_col", DataType::Float64, true),
Field::new("utf8_col", DataType::Utf8, true),
Field::new("large_utf8_col", DataType::LargeUtf8, false),
Field::new("binary_col", DataType::Binary, false),
Field::new("large_binary_col", DataType::LargeBinary, false),
Field::new(
"timestamp_nanos",
DataType::Timestamp(TimeUnit::Nanosecond, None),
false,
),
]);
let batch = RecordBatch::try_new(
Arc::new(schema.clone()),
vec![
Arc::new(Int32Array::from(vec![1])),
Arc::new(Float64Array::from(vec![1.0])),
Arc::new(StringArray::from(vec![Some("foo")])),
Arc::new(LargeStringArray::from(vec![Some("bar")])),
Arc::new(BinaryArray::from(vec![b"foo" as &[u8]])),
Arc::new(LargeBinaryArray::from(vec![b"foo" as &[u8]])),
Arc::new(TimestampNanosecondArray::from_opt_vec(
vec![Some(123)],
None,
)),
],
)
.unwrap();
let provider = MemTable::try_new(Arc::new(schema), vec![vec![batch]]).unwrap();
Arc::new(provider)
}
#[tokio::test]
async fn information_schema_columns() {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
let catalog = MemoryCatalogProvider::new();
let schema = MemorySchemaProvider::new();
schema
.register_table("t1".to_owned(), test::table_with_sequence(1, 1).unwrap())
.unwrap();
schema
.register_table("t2".to_owned(), table_with_many_types())
.unwrap();
catalog.register_schema("my_schema", Arc::new(schema));
ctx.register_catalog("my_catalog", Arc::new(catalog));
let result =
plan_and_collect(&mut ctx, "SELECT * from information_schema.columns")
.await
.unwrap();
let expected = vec![
"+---------------+--------------+------------+------------------+------------------+----------------+-------------+-----------------------------+--------------------------+------------------------+-------------------+-------------------------+---------------+--------------------+---------------+",
"| table_catalog | table_schema | table_name | column_name | ordinal_position | column_default | is_nullable | data_type | character_maximum_length | character_octet_length | numeric_precision | numeric_precision_radix | numeric_scale | datetime_precision | interval_type |",
"+---------------+--------------+------------+------------------+------------------+----------------+-------------+-----------------------------+--------------------------+------------------------+-------------------+-------------------------+---------------+--------------------+---------------+",
"| my_catalog | my_schema | t1 | i | 0 | | YES | Int32 | | | 32 | 2 | | | |",
"| my_catalog | my_schema | t2 | binary_col | 4 | | NO | Binary | | 2147483647 | | | | | |",
"| my_catalog | my_schema | t2 | float64_col | 1 | | YES | Float64 | | | 24 | 2 | | | |",
"| my_catalog | my_schema | t2 | int32_col | 0 | | NO | Int32 | | | 32 | 2 | | | |",
"| my_catalog | my_schema | t2 | large_binary_col | 5 | | NO | LargeBinary | | 9223372036854775807 | | | | | |",
"| my_catalog | my_schema | t2 | large_utf8_col | 3 | | NO | LargeUtf8 | | 9223372036854775807 | | | | | |",
"| my_catalog | my_schema | t2 | timestamp_nanos | 6 | | NO | Timestamp(Nanosecond, None) | | | | | | | |",
"| my_catalog | my_schema | t2 | utf8_col | 2 | | YES | Utf8 | | 2147483647 | | | | | |",
"+---------------+--------------+------------+------------------+------------------+----------------+-------------+-----------------------------+--------------------------+------------------------+-------------------+-------------------------+---------------+--------------------+---------------+",
];
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn disabled_default_catalog_and_schema() -> Result<()> {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().create_default_catalog_and_schema(false),
);
assert!(matches!(
ctx.register_table("test", test::table_with_sequence(1, 1)?),
Err(DataFusionError::Plan(_))
));
assert!(matches!(
ctx.sql("select * from datafusion.public.test"),
Err(DataFusionError::Plan(_))
));
Ok(())
}
#[tokio::test]
async fn custom_catalog_and_schema() -> Result<()> {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new()
.create_default_catalog_and_schema(false)
.with_default_catalog_and_schema("my_catalog", "my_schema"),
);
let catalog = MemoryCatalogProvider::new();
let schema = MemorySchemaProvider::new();
schema.register_table("test".to_owned(), test::table_with_sequence(1, 1)?)?;
catalog.register_schema("my_schema", Arc::new(schema));
ctx.register_catalog("my_catalog", Arc::new(catalog));
for table_ref in &["my_catalog.my_schema.test", "my_schema.test", "test"] {
let result = plan_and_collect(
&mut ctx,
&format!("SELECT COUNT(*) AS count FROM {}", table_ref),
)
.await?;
let expected = vec![
"+-------+",
"| count |",
"+-------+",
"| 1 |",
"+-------+",
];
assert_batches_eq!(expected, &result);
}
Ok(())
}
#[tokio::test]
async fn cross_catalog_access() -> Result<()> {
let mut ctx = ExecutionContext::new();
let catalog_a = MemoryCatalogProvider::new();
let schema_a = MemorySchemaProvider::new();
schema_a
.register_table("table_a".to_owned(), test::table_with_sequence(1, 1)?)?;
catalog_a.register_schema("schema_a", Arc::new(schema_a));
ctx.register_catalog("catalog_a", Arc::new(catalog_a));
let catalog_b = MemoryCatalogProvider::new();
let schema_b = MemorySchemaProvider::new();
schema_b
.register_table("table_b".to_owned(), test::table_with_sequence(1, 2)?)?;
catalog_b.register_schema("schema_b", Arc::new(schema_b));
ctx.register_catalog("catalog_b", Arc::new(catalog_b));
let result = plan_and_collect(
&mut ctx,
"SELECT cat, SUM(i) AS total FROM (
SELECT i, 'a' AS cat FROM catalog_a.schema_a.table_a
UNION ALL
SELECT i, 'b' AS cat FROM catalog_b.schema_b.table_b
)
GROUP BY cat
ORDER BY cat
",
)
.await?;
let expected = vec![
"+-----+-------+",
"| cat | total |",
"+-----+-------+",
"| a | 1 |",
"| b | 3 |",
"+-----+-------+",
];
assert_batches_eq!(expected, &result);
Ok(())
}
#[tokio::test]
async fn create_external_table_with_timestamps() {
let mut ctx = ExecutionContext::new();
let data = "Jorge,2018-12-13T12:12:10.011\n\
Andrew,2018-11-13T17:11:10.011";
let tmp_dir = TempDir::new().unwrap();
let file_path = tmp_dir.path().join("timestamps.csv");
// scope to ensure the file is closed and written
{
File::create(&file_path)
.expect("creating temp file")
.write_all(data.as_bytes())
.expect("writing data");
}
let sql = format!(
"CREATE EXTERNAL TABLE csv_with_timestamps (
name VARCHAR,
ts TIMESTAMP
)
STORED AS CSV
LOCATION '{}'
",
file_path.to_str().expect("path is utf8")
);
plan_and_collect(&mut ctx, &sql)
.await
.expect("Executing CREATE EXTERNAL TABLE");
let sql = "SELECT * from csv_with_timestamps";
let result = plan_and_collect(&mut ctx, sql).await.unwrap();
let expected = vec![
"+--------+-------------------------+",
"| name | ts |",
"+--------+-------------------------+",
"| Andrew | 2018-11-13 17:11:10.011 |",
"| Jorge | 2018-12-13 12:12:10.011 |",
"+--------+-------------------------+",
];
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn query_empty_table() {
let mut ctx = ExecutionContext::new();
let empty_table = Arc::new(EmptyTable::new(Arc::new(Schema::empty())));
ctx.register_table("test_tbl", empty_table).unwrap();
let sql = "SELECT * FROM test_tbl";
let result = plan_and_collect(&mut ctx, sql)
.await
.expect("Query empty table");
let expected = vec!["++", "++"];
assert_batches_sorted_eq!(expected, &result);
}
#[tokio::test]
async fn catalogs_not_leaked() {
// the information schema used to introduce cyclic Arcs
let ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_information_schema(true),
);
// register a single catalog
let catalog = Arc::new(MemoryCatalogProvider::new());
let catalog_weak = Arc::downgrade(&catalog);
ctx.register_catalog("my_catalog", catalog);
let catalog_list_weak = {
let state = ctx.state.lock().unwrap();
Arc::downgrade(&state.catalog_list)
};
drop(ctx);
assert_eq!(Weak::strong_count(&catalog_list_weak), 0);
assert_eq!(Weak::strong_count(&catalog_weak), 0);
}
struct MyPhysicalPlanner {}
impl PhysicalPlanner for MyPhysicalPlanner {
fn create_physical_plan(
&self,
_logical_plan: &LogicalPlan,
_ctx_state: &ExecutionContextState,
) -> Result<Arc<dyn ExecutionPlan>> {
Err(DataFusionError::NotImplemented(
"query not supported".to_string(),
))
}
fn create_physical_expr(
&self,
_expr: &Expr,
_input_dfschema: &crate::logical_plan::DFSchema,
_input_schema: &Schema,
_ctx_state: &ExecutionContextState,
) -> Result<Arc<dyn crate::physical_plan::PhysicalExpr>> {
unimplemented!()
}
}
struct MyQueryPlanner {}
impl QueryPlanner for MyQueryPlanner {
fn create_physical_plan(
&self,
logical_plan: &LogicalPlan,
ctx_state: &ExecutionContextState,
) -> Result<Arc<dyn ExecutionPlan>> {
let physical_planner = MyPhysicalPlanner {};
physical_planner.create_physical_plan(logical_plan, ctx_state)
}
}
/// Execute SQL and return results
async fn plan_and_collect(
ctx: &mut ExecutionContext,
sql: &str,
) -> Result<Vec<RecordBatch>> {
ctx.sql(sql)?.collect().await
}
/// Execute SQL and return results
async fn execute(sql: &str, partition_count: usize) -> Result<Vec<RecordBatch>> {
let tmp_dir = TempDir::new()?;
let mut ctx = create_ctx(&tmp_dir, partition_count)?;
plan_and_collect(&mut ctx, sql).await
}
/// Execute SQL and write results to partitioned csv files
async fn write_csv(
ctx: &mut ExecutionContext,
sql: &str,
out_dir: &str,
) -> Result<()> {
let logical_plan = ctx.create_logical_plan(sql)?;
let logical_plan = ctx.optimize(&logical_plan)?;
let physical_plan = ctx.create_physical_plan(&logical_plan)?;
ctx.write_csv(physical_plan, out_dir.to_string()).await
}
/// Execute SQL and write results to partitioned parquet files
async fn write_parquet(
ctx: &mut ExecutionContext,
sql: &str,
out_dir: &str,
writer_properties: Option<WriterProperties>,
) -> Result<()> {
let logical_plan = ctx.create_logical_plan(sql)?;
let logical_plan = ctx.optimize(&logical_plan)?;
let physical_plan = ctx.create_physical_plan(&logical_plan)?;
ctx.write_parquet(physical_plan, out_dir.to_string(), writer_properties)
.await
}
/// Generate CSV partitions within the supplied directory
fn populate_csv_partitions(
tmp_dir: &TempDir,
partition_count: usize,
file_extension: &str,
) -> Result<SchemaRef> {
// define schema for data source (csv file)
let schema = Arc::new(Schema::new(vec![
Field::new("c1", DataType::UInt32, false),
Field::new("c2", DataType::UInt64, false),
Field::new("c3", DataType::Boolean, false),
]));
// generate a partitioned file
for partition in 0..partition_count {
let filename = format!("partition-{}.{}", partition, file_extension);
let file_path = tmp_dir.path().join(&filename);
let mut file = File::create(file_path)?;
// generate some data
for i in 0..=10 {
let data = format!("{},{},{}\n", partition, i, i % 2 == 0);
file.write_all(data.as_bytes())?;
}
}
Ok(schema)
}
/// Generate a partitioned CSV file and register it with an execution context
fn create_ctx(tmp_dir: &TempDir, partition_count: usize) -> Result<ExecutionContext> {
let mut ctx = ExecutionContext::with_config(
ExecutionConfig::new().with_target_partitions(8),
);
let schema = populate_csv_partitions(tmp_dir, partition_count, ".csv")?;
// register csv file with the execution context
ctx.register_csv(
"test",
tmp_dir.path().to_str().unwrap(),
CsvReadOptions::new().schema(&schema),
)?;
Ok(ctx)
}
}
|
{
ExecutionProps {
query_execution_start_time: chrono::Utc::now(),
}
}
|
plot_hue.py
|
# -*- coding: utf-8 -*-
"""
=============================
|
Grouping data by category
=============================
When plotting, its useful to have a way to color points by some category or
variable. Hypertools does this using the `hue` kwarg, which takes a list
of string category labels or numerical values. If text labels are passed, the
data is restructured according to those labels and plotted in different colors
according to your color palette. If numerical values are passed, the values
are binned (default resolution: 100) and plotted according to your color
palette.
"""
# Code source: Andrew Heusser
# License: MIT
# import
import hypertools as hyp
import numpy as np
# load example data
geo = hyp.load('weights_sample')
data = geo.get_data()
# simulate random groups
hue=[]
for idx,i in enumerate(data):
tmp=[]
for iidx,ii in enumerate(i):
tmp.append(int(np.random.randint(1000, size=1)))
hue.append(tmp)
# plot
geo.plot(fmt='.', hue=hue)
| |
p22.py
|
def read_from_file(filename):
|
def string_of_names_to_array(string_of_names):
return string_of_names.replace('"','').split(",")
def get_alpha_value(string):
sum = 0
for char in string:
sum += (ord(char) - 64)
return sum
def main():
sum = 0
filename = "p022_names.txt"
string_of_names = read_from_file(filename)
array_of_names = string_of_names_to_array(string_of_names)
array_of_names.sort()
for i in range(0, len(array_of_names)):
sum += (i+1) * get_alpha_value(array_of_names[i])
return sum
def test_get_alpha_value():
assert get_alpha_value("COLIN") == 53
assert get_alpha_value("A") == 1
assert get_alpha_value("Z") == 26
def test():
test_get_alpha_value()
if __name__ == '__main__':
test()
print main()
|
txt = open(filename)
string_of_names = txt.read()
return string_of_names
|
Address.rs
|
#![allow(unused_imports, non_camel_case_types)]
use crate::models::r4b::Element::Element;
use crate::models::r4b::Extension::Extension;
use crate::models::r4b::Period::Period;
use serde_json::json;
|
use serde_json::value::Value;
use std::borrow::Cow;
/// An address expressed using postal conventions (as opposed to GPS or other location
/// definition formats). This data type may be used to convey addresses for use in
/// delivering mail as well as for visiting locations which might not be valid for
/// mail delivery. There are a variety of postal address formats defined around the
/// world.
#[derive(Debug)]
pub struct Address<'a> {
pub(crate) value: Cow<'a, Value>,
}
impl Address<'_> {
pub fn new(value: &Value) -> Address {
Address {
value: Cow::Borrowed(value),
}
}
pub fn to_json(&self) -> Value {
(*self.value).clone()
}
/// Extensions for city
pub fn _city(&self) -> Option<Element> {
if let Some(val) = self.value.get("_city") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for country
pub fn _country(&self) -> Option<Element> {
if let Some(val) = self.value.get("_country") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for district
pub fn _district(&self) -> Option<Element> {
if let Some(val) = self.value.get("_district") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for line
pub fn _line(&self) -> Option<Vec<Element>> {
if let Some(Value::Array(val)) = self.value.get("_line") {
return Some(
val.into_iter()
.map(|e| Element {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Extensions for postalCode
pub fn _postal_code(&self) -> Option<Element> {
if let Some(val) = self.value.get("_postalCode") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for state
pub fn _state(&self) -> Option<Element> {
if let Some(val) = self.value.get("_state") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for text
pub fn _text(&self) -> Option<Element> {
if let Some(val) = self.value.get("_text") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for type
pub fn _type(&self) -> Option<Element> {
if let Some(val) = self.value.get("_type") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for use
pub fn _use(&self) -> Option<Element> {
if let Some(val) = self.value.get("_use") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// The name of the city, town, suburb, village or other community or delivery center.
pub fn city(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("city") {
return Some(string);
}
return None;
}
/// Country - a nation as commonly understood or generally accepted.
pub fn country(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("country") {
return Some(string);
}
return None;
}
/// The name of the administrative area (county).
pub fn district(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("district") {
return Some(string);
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the element. To make the use of extensions safe and manageable,
/// there is a strict set of governance applied to the definition and use of
/// extensions. Though any implementer can define an extension, there is a set of
/// requirements that SHALL be met as part of the definition of the extension.
pub fn extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("extension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Unique id for the element within a resource (for internal references). This may be
/// any string value that does not contain spaces.
pub fn id(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("id") {
return Some(string);
}
return None;
}
/// This component contains the house number, apartment number, street name, street
/// direction, P.O. Box number, delivery hints, and similar address information.
pub fn line(&self) -> Option<Vec<&str>> {
if let Some(Value::Array(val)) = self.value.get("line") {
return Some(
val.into_iter()
.map(|e| e.as_str().unwrap())
.collect::<Vec<_>>(),
);
}
return None;
}
/// Time period when address was/is in use.
pub fn period(&self) -> Option<Period> {
if let Some(val) = self.value.get("period") {
return Some(Period {
value: Cow::Borrowed(val),
});
}
return None;
}
/// A postal code designating a region defined by the postal service.
pub fn postal_code(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("postalCode") {
return Some(string);
}
return None;
}
/// Sub-unit of a country with limited sovereignty in a federally organized country. A
/// code may be used if codes are in common use (e.g. US 2 letter state codes).
pub fn state(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("state") {
return Some(string);
}
return None;
}
/// Specifies the entire address as it should be displayed e.g. on a postal label.
/// This may be provided instead of or as well as the specific parts.
pub fn text(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("text") {
return Some(string);
}
return None;
}
/// Distinguishes between physical addresses (those you can visit) and mailing
/// addresses (e.g. PO Boxes and care-of addresses). Most addresses are both.
pub fn fhir_type(&self) -> Option<AddressType> {
if let Some(Value::String(val)) = self.value.get("type") {
return Some(AddressType::from_string(&val).unwrap());
}
return None;
}
/// The purpose of this address.
pub fn fhir_use(&self) -> Option<AddressUse> {
if let Some(Value::String(val)) = self.value.get("use") {
return Some(AddressUse::from_string(&val).unwrap());
}
return None;
}
pub fn validate(&self) -> bool {
if let Some(_val) = self._city() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._country() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._district() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._line() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self._postal_code() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._state() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._text() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._type() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._use() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.city() {}
if let Some(_val) = self.country() {}
if let Some(_val) = self.district() {}
if let Some(_val) = self.extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.id() {}
if let Some(_val) = self.line() {
_val.into_iter().for_each(|_e| {});
}
if let Some(_val) = self.period() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.postal_code() {}
if let Some(_val) = self.state() {}
if let Some(_val) = self.text() {}
if let Some(_val) = self.fhir_type() {}
if let Some(_val) = self.fhir_use() {}
return true;
}
}
#[derive(Debug)]
pub struct AddressBuilder {
pub(crate) value: Value,
}
impl AddressBuilder {
pub fn build(&self) -> Address {
Address {
value: Cow::Owned(self.value.clone()),
}
}
pub fn with(existing: Address) -> AddressBuilder {
AddressBuilder {
value: (*existing.value).clone(),
}
}
pub fn new() -> AddressBuilder {
let mut __value: Value = json!({});
return AddressBuilder { value: __value };
}
pub fn _city<'a>(&'a mut self, val: Element) -> &'a mut AddressBuilder {
self.value["_city"] = json!(val.value);
return self;
}
pub fn _country<'a>(&'a mut self, val: Element) -> &'a mut AddressBuilder {
self.value["_country"] = json!(val.value);
return self;
}
pub fn _district<'a>(&'a mut self, val: Element) -> &'a mut AddressBuilder {
self.value["_district"] = json!(val.value);
return self;
}
pub fn _line<'a>(&'a mut self, val: Vec<Element>) -> &'a mut AddressBuilder {
self.value["_line"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn _postal_code<'a>(&'a mut self, val: Element) -> &'a mut AddressBuilder {
self.value["_postalCode"] = json!(val.value);
return self;
}
pub fn _state<'a>(&'a mut self, val: Element) -> &'a mut AddressBuilder {
self.value["_state"] = json!(val.value);
return self;
}
pub fn _text<'a>(&'a mut self, val: Element) -> &'a mut AddressBuilder {
self.value["_text"] = json!(val.value);
return self;
}
pub fn _type<'a>(&'a mut self, val: Element) -> &'a mut AddressBuilder {
self.value["_type"] = json!(val.value);
return self;
}
pub fn _use<'a>(&'a mut self, val: Element) -> &'a mut AddressBuilder {
self.value["_use"] = json!(val.value);
return self;
}
pub fn city<'a>(&'a mut self, val: &str) -> &'a mut AddressBuilder {
self.value["city"] = json!(val);
return self;
}
pub fn country<'a>(&'a mut self, val: &str) -> &'a mut AddressBuilder {
self.value["country"] = json!(val);
return self;
}
pub fn district<'a>(&'a mut self, val: &str) -> &'a mut AddressBuilder {
self.value["district"] = json!(val);
return self;
}
pub fn extension<'a>(&'a mut self, val: Vec<Extension>) -> &'a mut AddressBuilder {
self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn id<'a>(&'a mut self, val: &str) -> &'a mut AddressBuilder {
self.value["id"] = json!(val);
return self;
}
pub fn line<'a>(&'a mut self, val: Vec<&str>) -> &'a mut AddressBuilder {
self.value["line"] = json!(val);
return self;
}
pub fn period<'a>(&'a mut self, val: Period) -> &'a mut AddressBuilder {
self.value["period"] = json!(val.value);
return self;
}
pub fn postal_code<'a>(&'a mut self, val: &str) -> &'a mut AddressBuilder {
self.value["postalCode"] = json!(val);
return self;
}
pub fn state<'a>(&'a mut self, val: &str) -> &'a mut AddressBuilder {
self.value["state"] = json!(val);
return self;
}
pub fn text<'a>(&'a mut self, val: &str) -> &'a mut AddressBuilder {
self.value["text"] = json!(val);
return self;
}
pub fn fhir_type<'a>(&'a mut self, val: AddressType) -> &'a mut AddressBuilder {
self.value["type"] = json!(val.to_string());
return self;
}
pub fn fhir_use<'a>(&'a mut self, val: AddressUse) -> &'a mut AddressBuilder {
self.value["use"] = json!(val.to_string());
return self;
}
}
#[derive(Debug)]
pub enum AddressType {
Postal,
Physical,
Both,
}
impl AddressType {
pub fn from_string(string: &str) -> Option<AddressType> {
match string {
"postal" => Some(AddressType::Postal),
"physical" => Some(AddressType::Physical),
"both" => Some(AddressType::Both),
_ => None,
}
}
pub fn to_string(&self) -> String {
match self {
AddressType::Postal => "postal".to_string(),
AddressType::Physical => "physical".to_string(),
AddressType::Both => "both".to_string(),
}
}
}
#[derive(Debug)]
pub enum AddressUse {
Home,
Work,
Temp,
Old,
Billing,
}
impl AddressUse {
pub fn from_string(string: &str) -> Option<AddressUse> {
match string {
"home" => Some(AddressUse::Home),
"work" => Some(AddressUse::Work),
"temp" => Some(AddressUse::Temp),
"old" => Some(AddressUse::Old),
"billing" => Some(AddressUse::Billing),
_ => None,
}
}
pub fn to_string(&self) -> String {
match self {
AddressUse::Home => "home".to_string(),
AddressUse::Work => "work".to_string(),
AddressUse::Temp => "temp".to_string(),
AddressUse::Old => "old".to_string(),
AddressUse::Billing => "billing".to_string(),
}
}
}
| |
product.seeder.ts
|
import { InjectRepository } from '@nestjs/typeorm';
import {DataFactory, Seeder} from 'nestjs-seeder';
import { Repository } from 'typeorm';
import { Product } from './product.entity';
export class
|
implements Seeder{
constructor(
@InjectRepository(Product) private readonly productRepository:Repository<Product>
){
}
seed(): Promise<any> {
const products = DataFactory.createForClass(Product).generate(50);
return this.productRepository.insert(products);
// return Promise.resolve(undefined)
}
drop(): Promise<any> {
return this.productRepository.delete({});
}
}
|
ProductSeeder
|
index.3059afc6.js
|
import{A as k,bA as f,bC as B,cg as x,ba as j,r as w,j as D,u as _,S as O,a0 as s,B as a,a1 as F,a6 as o,D as d,ap as T,w as c,H as A,X as K,ae as N,J as y,ad as h,ab as z,K as G,aq as q,cG as L,br as P,c2 as M}from"./vendor.7bee64cc.js";/* empty css *//* empty css */import{a as S,k as R,_ as $,h as I}from"./index.9eb02b52.js";/* empty css *//* empty css *//* empty css *//* empty css *//* empty css */const b=[{key:"1",name:"\u901A\u77E5",list:[{id:"000000001",avatar:"https://gw.alipayobjects.com/zos/rmsportal/ThXAXghbEsBCCSDihZxY.png",title:"\u4F60\u6536\u5230\u4E86 14 \u4EFD\u65B0\u5468\u62A5",description:"",datetime:"2017-08-09",type:"1"},{id:"000000002",avatar:"https://gw.alipayobjects.com/zos/rmsportal/OKJXDXrmkNshAMvwtvhu.png",title:"\u4F60\u63A8\u8350\u7684 \u66F2\u59AE\u59AE \u5DF2\u901A\u8FC7\u7B2C\u4E09\u8F6E\u9762\u8BD5",description:"",datetime:"2017-08-08",type:"1"},{id:"000000003",avatar:"https://gw.alipayobjects.com/zos/rmsportal/kISTdvpyTAhtGxpovNWd.png",title:"\u8FD9\u79CD\u6A21\u677F\u53EF\u4EE5\u533A\u5206\u591A\u79CD\u901A\u77E5\u7C7B\u578B",description:"",datetime:"2017-08-07",type:"1"},{id:"000000004",avatar:"https://gw.alipayobjects.com/zos/rmsportal/GvqBnKhFgObvnSGkDsje.png",title:"\u5DE6\u4FA7\u56FE\u6807\u7528\u4E8E\u533A\u5206\u4E0D\u540C\u7684\u7C7B\u578B",description:"",datetime:"2017-08-07",type:"1"},{id:"000000005",avatar:"https://gw.alipayobjects.com/zos/rmsportal/GvqBnKhFgObvnSGkDsje.png",title:"\u6807\u9898\u53EF\u4EE5\u8BBE\u7F6E\u81EA\u52A8\u663E\u793A\u7701\u7565\u53F7\uFF0C\u672C\u4F8B\u4E2D\u6807\u9898\u884C\u6570\u5DF2\u8BBE\u4E3A1\u884C\uFF0C\u5982\u679C\u5185\u5BB9\u8D85\u8FC71\u884C\u5C06\u81EA\u52A8\u622A\u65AD\u5E76\u652F\u6301tooltip\u663E\u793A\u5B8C\u6574\u6807\u9898\u3002",description:"",datetime:"2017-08-07",type:"1"},{id:"000000006",avatar:"https://gw.alipayobjects.com/zos/rmsportal/GvqBnKhFgObvnSGkDsje.png",title:"\u5DE6\u4FA7\u56FE\u6807\u7528\u4E8E\u533A\u5206\u4E0D\u540C\u7684\u7C7B\u578B",description:"",datetime:"2017-08-07",type:"1"},{id:"000000007",avatar:"https://gw.alipayobjects.com/zos/rmsportal/GvqBnKhFgObvnSGkDsje.png",title:"\u5DE6\u4FA7\u56FE\u6807\u7528\u4E8E\u533A\u5206\u4E0D\u540C\u7684\u7C7B\u578B",description:"",datetime:"2017-08-07",type:"1"},{id:"000000008",avatar:"https://gw.alipayobjects.com/zos/rmsportal/GvqBnKhFgObvnSGkDsje.png",title:"\u5DE6\u4FA7\u56FE\u6807\u7528\u4E8E\u533A\u5206\u4E0D\u540C\u7684\u7C7B\u578B",description:"",datetime:"2017-08-07",type:"1"},{id:"000000009",avatar:"https://gw.alipayobjects.com/zos/rmsportal/GvqBnKhFgObvnSGkDsje.png",title:"\u5DE6\u4FA7\u56FE\u6807\u7528\u4E8E\u533A\u5206\u4E0D\u540C\u7684\u7C7B\u578B",description:"",datetime:"2017-08-07",type:"1"},{id:"000000010",avatar:"https://gw.alipayobjects.com/zos/rmsportal/GvqBnKhFgObvnSGkDsje.png",title:"\u5DE6\u4FA7\u56FE\u6807\u7528\u4E8E\u533A\u5206\u4E0D\u540C\u7684\u7C7B\u578B",description:"",datetime:"2017-08-07",type:"1"}]},{key:"2",name:"\u6D88\u606F",list:[{id:"000000006",avatar:"https://gw.alipayobjects.com/zos/rmsportal/fcHMVNCjPOsbUGdEduuv.jpeg",title:"\u66F2\u4E3D\u4E3D \u8BC4\u8BBA\u4E86\u4F60",description:"\u63CF\u8FF0\u4FE1\u606F\u63CF\u8FF0\u4FE1\u606F\u63CF\u8FF0\u4FE1\u606F",datetime:"2017-08-07",type:"2",clickClose:!0},{id:"000000007",avatar:"https://gw.alipayobjects.com/zos/rmsportal/fcHMVNCjPOsbUGdEduuv.jpeg",title:"\u6731\u504F\u53F3 \u56DE\u590D\u4E86\u4F60",description:"\u8FD9\u79CD\u6A21\u677F\u7528\u4E8E\u63D0\u9192\u8C01\u4E0E\u4F60\u53D1\u751F\u4E86\u4E92\u52A8",datetime:"2017-08-07",type:"2",clickClose:!0},{id:"000000008",avatar:"https://gw.alipayobjects.com/zos/rmsportal/fcHMVNCjPOsbUGdEduuv.jpeg",title:"\u6807\u9898",description:"\u8BF7\u5C06\u9F20\u6807\u79FB\u52A8\u5230\u6B64\u5904\uFF0C\u4EE5\u4FBF\u6D4B\u8BD5\u8D85\u957F\u7684\u6D88\u606F\u5728\u6B64\u5904\u5C06\u5982\u4F55\u5904\u7406\u3002\u672C\u4F8B\u4E2D\u8BBE\u7F6E\u7684\u63CF\u8FF0\u6700\u5927\u884C\u6570\u4E3A2\uFF0C\u8D85\u8FC72\u884C\u7684\u63CF\u8FF0\u5185\u5BB9\u5C06\u88AB\u7701\u7565\u5E76\u4E14\u53EF\u4EE5\u901A\u8FC7tooltip\u67E5\u770B\u5B8C\u6574\u5185\u5BB9",datetime:"2017-08-07",type:"2",clickClose:!0}]},{key:"3",name:"\u5F85\u529E",list:[{id:"000000009",avatar:"",title:"\u4EFB\u52A1\u540D\u79F0",description:"\u4EFB\u52A1\u9700\u8981\u5728 2017-01-12 20:00 \u524D\u542F\u52A8",datetime:"",extra:"\u672A\u5F00\u59CB",color:"",type:"3"},{id:"000000010",avatar:"",title:"\u7B2C\u4E09\u65B9\u7D27\u6025\u4EE3\u7801\u53D8\u66F4",description:"\u51A0\u9716 \u9700\u5728 2017-01-07 \u524D\u5B8C\u6210\u4EE3\u7801\u53D8\u66F4\u4EFB\u52A1",datetime:"",extra:"\u9A6C\u4E0A\u5230\u671F",color:"red",type:"3"},{id:"000000011",avatar:"",title:"\u4FE1\u606F\u5B89\u5168\u8003\u8BD5",description:"\u6307\u6D3E\u7AF9\u5C14\u4E8E 2017-01-09 \u524D\u5B8C\u6210\u66F4\u65B0\u5E76\u53D1\u5E03",datetime:"",extra:"\u5DF2\u8017\u65F6 8 \u5929",color:"gold",type:"3"},{id:"000000012",avatar:"",title:"ABCD \u7248\u672C\u53D1\u5E03",description:"\u6307\u6D3E\u7AF9\u5C14\u4E8E 2017-01-09 \u524D\u5B8C\u6210\u66F4\u65B0\u5E76\u53D1\u5E03",datetime:"",extra:"\u8FDB\u884C\u4E2D",color:"blue",type:"3"}]}];const V=k({components:{[f.name]:f,[B.name]:B,[B.Item.name]:B.Item,AListItemMeta:B.Item.Meta,ATypographyParagraph:x.Paragraph,[j.name]:j},props:{list:{type:Array,default:()=>[]},pageSize:{type:[Boolean,Number],default:5},currentPage:{type:Number,default:1},titleRows:{type:Number,default:1},descRows:{type:Number,default:2},onTitleClick:{type:Function}},emits:["update:currentPage"],setup(t,{emit:E}){const{prefixCls:m}=S("header-notify-list"),l=w(t.currentPage||1),C=D(()=>{const{pageSize:e,list:r}=t;if(e===!1)return[];let p=R(e)?e:5;return r.slice(p*(_(l)-1),p*_(l))});O(()=>t.currentPage,e=>{l.value=e});const i=D(()=>!!t.onTitleClick),n=D(()=>{const{list:e,pageSize:r}=t;return r>0&&e&&e.length>r?{total:e.length,pageSize:r,current:_(l),onChange(p){l.value=p,E("update:currentPage",p)}}:!1});function g(e){t.onTitleClick&&t.onTitleClick(e)}return{prefixCls:m,getPagination:n,getData:C,handleTitleClick:g,isTitleClickable:i}}}),X={class:"title"},H={key:0,class:"extra"},U={key:1},J={key:0,class:"description"},W={class:"datetime"};function Y(t,E,m,l,C,i){const n=s("a-typography-paragraph"),g=s("a-tag"),e=s("a-avatar"),r=s("a-list-item-meta"),p=s("a-list-item"),v=s("a-list");return a(),F(v,{class:G(t.prefixCls),bordered:"",pagination:t.getPagination},{default:o(()=>[(a(!0),d(z,null,T(t.getData,u=>(a(),F(p,{key:u.id,class:"list-item"},{default:o(()=>[c(r,null,{title:o(()=>[A("div",X,[c(n,{onClick:et=>t.handleTitleClick(u),style:K([{width:"100%","margin-bottom":"0 !important"},{cursor:t.isTitleClickable?"pointer":""}]),delete:!!u.titleDelete,ellipsis:t.$props.titleRows&&t.$props.titleRows>0?{rows:t.$props.titleRows,tooltip:!!u.title}:!1,content:u.title},null,8,["onClick","style","delete","ellipsis","content"]),u.extra?(a(),d("div",H,[c(g,{class:"tag",color:u.color},{default:o(()=>[N(y(u.extra),1)]),_:2},1032,["color"])])):h("",!0)])]),avatar:o(()=>[u.avatar?(a(),F(e,{key:0,class:"avatar",src:u.avatar},null,8,["src"])):(a(),d("span",U,y(u.avatar),1))]),description:o(()=>[A("div",null,[u.description?(a(),d("div",J,[c(n,{style:{width:"100%","margin-bottom":"0 !important"},ellipsis:t.$props.descRows&&t.$props.descRows>0?{rows:t.$props.descRows,tooltip:!!u.description}:!1,content:u.description},null,8,["ellipsis","content"])])):h("",!0),A("div",W,y(u.datetime),1)])]),_:2},1024)]),_:2},1024))),128))]),_:1},8,["class","pagination"])}var Z=$(V,[["render",Y],["__scopeId","data-v-737c3776"]]);const Q=k({components:{Popover:q,BellOutlined:L,Tabs:P,TabPane:P.TabPane,Badge:M,NoticeList:Z},setup(){const{prefixCls:t}=S("header-notify"),{createMessage:E}=I(),m=w(b),l=D(()=>{let i=0;for(let n=0;n<b.length;n++)i+=b[n].list.length;return i});function C(i){E.success("\u4F60\u70B9\u51FB\u4E86\u901A\u77E5\uFF0CID="+i.id),i.titleDelete=!i.titleDelete}return{prefixCls:t,listData:m,count:l,onNoticeClick:C,numberStyle:{}}}}),tt={key:0};function ut(t,E,m,l,C,i){const n=s("BellOutlined"),g=s("Badge"),e=s("NoticeList"),r=s("TabPane"),p=s("Tabs"),v=s("Popover");return a(),d("div",{class:G(t.prefixCls)},[c(v,{title:"",trigger:"click",overlayClassName:`${t.prefixCls}__overlay`},{content:o(()=>[c(p,null,{default:o(()=>[(a(!0),d(z,null,T(t.listData,u=>(a(),F(r,{key:u.key},{tab:o(()=>[N(y(u.name)+" ",1),u.list.length!==0?(a(),d("span",tt,"("+y(u.list.length)+")",1)):h("",!0)]),default:o(()=>[u.key==="1"?(a(),F(e,{key:0,list:u.list,onTitleClick:t.onNoticeClick},null,8,["list","onTitleClick"])):(a(),F(e,{key:1,list:u.list},null,8,["list"]))]),_:2},1024))),128))]),_:1})]),default:o(()=>[c(g,{count:t.count,dot:"",numberStyle:t.numberStyle},{default:o(()=>[c(n)]),_:1},8,["count","numberStyle"])]),_:1},8,["overlayClassName"])],2)}var dt=$(Q,[["render",ut]]);export{dt as default};
|
||
compress_tiered_imagenet.py
|
# Copyright (c) 2018 Mengye Ren, Eleni Triantafillou, Sachin Ravi, Jake Snell,
# Kevin Swersky, Joshua B. Tenenbaum, Hugo Larochelle, Richars S. Zemel.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# =============================================================================
import cv2
import numpy as np
import six
import sys
import pickle as pkl
from tqdm import tqdm
def
|
(path, output):
with np.load(path, mmap_mode="r") as data:
images = data["images"]
array = []
for ii in tqdm(six.moves.xrange(images.shape[0]), desc='compress'):
im = images[ii]
im_str = cv2.imencode('.png', im)[1]
array.append(im_str)
with open(output, 'wb') as f:
pkl.dump(array, f, protocol=pkl.HIGHEST_PROTOCOL)
def decompress(path, output):
with open(output, 'rb') as f:
array = pkl.load(f)
images = np.zeros([len(array), 84, 84, 3], dtype=np.uint8)
for ii, item in tqdm(enumerate(array), desc='decompress'):
im = cv2.imdecode(item, 1)
images[ii] = im
np.savez(path, images=images)
def main():
if sys.argv[1] == 'compress':
compress(sys.argv[2], sys.argv[3])
elif sys.argv[1] == 'decompress':
decompress(sys.argv[2], sys.argv[3])
if __name__ == '__main__':
main()
|
compress
|
tablemagic.js
|
/**
* @license TableMagic v0.8.1
* (c) 2011-2014 tweeeety, Ryuichi Murata http://tweeeety.com
* License: MIT
*/
//;(function(global, $) {
var TableMagic = ( function(TableMagic, undefined ) {
// SimpleDomManipulator
var SimpleDomManipulator = function() {
var Constructor = function() {
this.simpleDom = null;
};
Constructor.create = function(elm) {
var _this = new this();
var dom = document.createElement(elm);
_this.simpleDom = dom;
return _this;
}
Constructor.prototype.attr = function(attr, val) {
this.simpleDom.setAttribute(attr,val);
return this;
}
Constructor.prototype.text = function(text) {
var textNode = document.createTextNode(text)
this.simpleDom.appendChild(textNode);
return this;
}
Constructor.prototype.addClass = function(className) {
if( !className ) return this;
this.simpleDom.setAttribute("class",className);
this.simpleDom.setAttribute("className",className);
return this;
}
Constructor.prototype.append = function(content) {
var dom = ( content instanceof SimpleDomManipulator ) ? content.get() : content;
this.simpleDom.appendChild(dom);
return this;
}
Constructor.prototype.get = function() {
return this.simpleDom;
}
return Constructor;
}();
// Renderers
var Renderers = function() {
var Constructor = function() {
var _this = this;
_this._arr = null;
this.baseRenderers = {
// normal orientation
// ok
normal: function(){
var array = _this.ddm.addTitleRow().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
sumUpCol: function(){
var array = _this.ddm.addTitleRow().sumUpCol().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
aveCol: function(){
var array = _this.ddm.addTitleRow().aveCol().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
aveColExceptZero: function(){
var array = _this.ddm.addTitleRow().aveCol(true,true,true).get();
var tbl = _this.arrayToTable(array);
return tbl;
},
// rotate orientation
// ok
rotateNormal: function(){
var array = _this.ddm.addTitleRow().rowColRotate().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
sumUpColRotate: function(){
var array = _this.ddm.addTitleRow().sumUpCol().rowColRotate().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
rotateSumUpCol: function(){
var array = _this.ddm.addTitleRow().rowColRotate().sumUpCol().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
aveColRotate: function(){
var array = _this.ddm.addTitleRow().aveCol().rowColRotate().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
aveColExceptZeroRotate: function(){
var array = _this.ddm.addTitleRow().aveCol(true,true,true).rowColRotate().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
rotateAveCol: function(){
var array = _this.ddm.addTitleRow().rowColRotate().aveCol().get();
var tbl = _this.arrayToTable(array);
return tbl;
},
rotateAveColExceptZero: function(){
var array = _this.ddm.addTitleRow().rowColRotate().aveCol(true,true,true).get();
var tbl = _this.arrayToTable(array);
return tbl;
}
};
};
Constructor.create = function(ddm, opt){
var _this = new this;
_this.ddm = ddm;
if( typeof opt !== 'object' ) opt = {};
_this.tableClassName = opt.tableClassName ? opt.tableClassName : "" ;
_this.addThead = opt.addThead ? opt.addThead : false ;
_this.trOddClassName = opt.trOddClassName ? opt.trOddClassName : "" ;
_this.trEvenClassName = opt.trEvenClassName ? opt.trEvenClassName : "" ;
_this.trHeaderClassName = opt.trHeaderClassName ? opt.trHeaderClassName : "" ;
_this.tdHeaderClassName = opt.tdHeaderClassName ? opt.tdHeaderClassName : "" ;
_this.firstRowTd2Th = opt.firstRowTd2Th ? opt.firstRowTd2Th : "" ;
_this.firstColTd2Th = opt.firstColTd2Th ? opt.firstColTd2Th : "" ;
_this.rendereCallback = opt.rendereCallback ? opt.rendereCallback : undefined ;
// normal, sumUp,
_this.renderName = opt.renderName ? opt.renderName : 'normal';
return _this;
};
Constructor.prototype.render = function(){
if( !this.renderName ){
throw "renderName is not defined.";
}
if( this.baseRenderers && !this.baseRenderers[this.renderName] ){
throw "renderName:" + this.renderName + ' is not defined;';
}
this.tbl = this.baseRenderers[this.renderName]();
return this;
};
Constructor.prototype.get = function(){
return this.getTableTag();
};
Constructor.prototype.getTableObj = function(){
return this.tbl.get();;
};
Constructor.prototype.getTableTag = function(){
return this.tbl.get().outerHTML;
};
Constructor.prototype.getInner = function(){
var tbl = this.tbl.get().innerHTML;
console.log(tbl);
return tbl;
};
Constructor.prototype.addRenderer = function(name, fn){
this.baseRenderers[name] = fn;
};
Constructor.prototype.addProp = function(name, prop){
this[name] = prop;
};
// 配列からtableに変換して表示
Constructor.prototype.arrayToTable = function(array){
var _this = this;
var tbodyObj = SimpleDomManipulator.create("tbody");
var tblObj = SimpleDomManipulator.create("table");
for( var i=0, iLen=array.length; i<iLen; i++ ){
var trObj = SimpleDomManipulator.create("tr");
var trClassName = '';
trClassName = (this.trEvenClassName && this.trOddClassName && i%2==0)? this.trEvenClassName : this.trOddClassName;
if( i==0 && this.trHeaderClassName && typeof this.trHeaderClassName == 'string') trClassName += ' ' + this.trHeaderClassName;
trObj.addClass(trClassName);
for( var j=0, jLen=array[i].length; j<jLen; j++ ){
var tdObj;
if( i==0 ){
tdObj = ( this.firstRowTd2Th ) ? SimpleDomManipulator.create("th") : SimpleDomManipulator.create("td");
}else{
tdObj = ( this.firstColTd2Th && j==0 ) ? SimpleDomManipulator.create("th") : SimpleDomManipulator.create("td");
if( j==0 && typeof this.tdHeaderClassName == 'string') tdObj.addClass(_this.tdHeaderClassName);
}
tdObj.text( array[i][j] );
trObj.append(tdObj);
}
// trObj append to thead or tbody
if(this.addThead && i==0){
var theadObj = SimpleDomManipulator.create("thead");
tblObj.append(theadObj.append(trObj));
}else{
tbodyObj.append(trObj);
}
|
tblObj.append(tbodyObj).addClass(this.tableClassName);
return tblObj;
};
return Constructor;
}();
// DbDataManipulator
var DbDataManipulator = function() {
var Constructor = function() {
this._dbData = [];
this._orderArr = [];
this._titleHash = {};
this._baseTableArray = [];
this._newTableArray = null;
};
Constructor.create = function(data, orderArr, titleHash, opt){
var _this = new this;
if( typeof opt == 'undefined' ) opt = {};
if( typeof opt.label == 'undefined' ) opt.label = {
'sum' : 'sum',
'ave' : 'ave'
};
_this.label = opt.label;
if( typeof opt.noTitleRow == 'undefined' ) _this.noTitleRow = false;
_this.noTitleRow = opt.noTitleRow;
if( typeof opt.rotate == 'undefined' ) opt.rotate = false;
_this.rotate = opt.rotate;
_this._dbData = data;
if(!orderArr) orderArr = _this._dbData2OrderArr(data);
_this._orderArr = orderArr;
_this._titleHash = titleHash;
// default
_this.addedTitle = false;
_this.toArray();
//if( opt.title )_this.addTitle();
//if( opt.rotate ) _this.rowColRotate();
return _this;
};
Constructor.prototype._dbData2OrderArr = function(data){
if( !data || data.length == 0 ) return [];
var row = data[0];
var arr = [];
for( var key in row ){
arr.push(key);
}
return arr;
};
Constructor.prototype.toArray = function(){
var array = [];
//if(!this._orderArr) this._orderArr = this._dbData2OrderArr(data);
array = this._dbData2Array(this._dbData, this._orderArr );
//var titleRowArray = this.makeTitleRowArray(orderArr, titleHash);
//array.unshift(titleRowArray);
this.set_baseTableArray(array);
this.set_newTableArray(array);
return this;
};
Constructor.prototype._dbData2Array = function(data, orderArr){
var arr = [];
for ( var n in data ){
var colArray = [];
for ( var i=0, len=orderArr.length; i < len ; i++ )
{
if(data[n][orderArr[i]] == undefined) data[n][orderArr[i]] = 0 ;
colArray.push(data[n][orderArr[i]]);
}
arr.push(colArray);
}
return arr;
};
Constructor.prototype.addTitleRow = function(){
if( this.noTitleRow ) return this;
var arr = this.get_newTableArray();
var titleRowArray = this.makeTitleRowArray();
arr.unshift(titleRowArray);
this.set_newTableArray(arr);
this.addedTitle = true;
return this;
};
Constructor.prototype.makeTitleRowArray = function(){
if( typeof this._titleHash === 'undefined' ) return this._orderArr;
var titleRowArray = [];
for ( var i=0, len = this._orderArr.length; i < len ; i++ ){
titleRowArray.push(this._titleHash[this._orderArr[i]]);
}
return titleRowArray;
};
Constructor.prototype.get = function(){
return this.getArray();
};
Constructor.prototype.getArray = function(){
return this._newTableArray ? this._newTableArray : this._baseTableArray;
};
Constructor.prototype.get_baseTableArray = function(){
return this._baseTableArray;
};
Constructor.prototype.set_baseTableArray = function(array){
this._baseTableArray = array;
};
Constructor.prototype.get_newTableArray = function(){
return this._newTableArray;
};
Constructor.prototype.set_newTableArray = function(array){
this._newTableArray = array;
};
Constructor.prototype.rowColRotate = function(){
var array = this.get_newTableArray();
//tblRowArray
var newArray = [];
for(var i=0,iLen = array.length; i<iLen; i++){
for(var j=0,jLen = array[i].length; j<jLen; j++){
if(newArray[j] == undefined)newArray[j] = [];
newArray[j].push(array[i][j]);
//newArray[j].unshift(array[i][j]) //unshiftだとさらに行列を反転
//newArray[j][iLen-i] = array[i][j];
}
}
this.set_newTableArray(newArray);
return this;
};
Constructor.prototype.sumUpCol = function(exceptRowTitle, exceptColTitle){
if( typeof exceptRowTitle == 'undefined' ) exceptRowTitle = true;
if( typeof exceptColTitle == 'undefined' ) exceptColTitle = true;
var array = this.get_newTableArray();
var sumUpArray = [];
for(var row=0, rowLen = array.length; row<rowLen; row++){
for(var col=0, colLen = array[row].length; col<colLen; col++){
if(exceptRowTitle && row == 0){
continue;
}
if(exceptColTitle && col == 0){
sumUpArray[col] = this.label.sum;
continue
}
if(sumUpArray[col] == undefined) sumUpArray[col] = 0;
sumUpArray[col] = sumUpArray[col] + parseInt(array[row][col],10);
}
}
array.push(sumUpArray);
this.set_newTableArray(array);
return this;
};
Constructor.prototype.aveCol = function(exceptRowTitle, exceptColTitle, exceptZero){
if( typeof exceptRowTitle == 'undefined' ) exceptRowTitle = true;
if( typeof exceptColTitle == 'undefined' ) exceptColTitle = true;
if( typeof exceptZero == 'undefined' ) exceptZero = false;
var array = this.get_newTableArray();
var aveArray = [];
var sumUpArray = [];
//var rowLen = array.length;
var intCntArray = []; // 有効数字の個数配列
for(var row=0, rowLen=array.length; row<rowLen; row++){
for(var col=0, colLen = array[row].length; col<colLen; col++){
if(exceptRowTitle && row == 0){
continue;
}
if(exceptColTitle && col == 0){
sumUpArray[col] = this.label.ave;
continue;
}
if(sumUpArray[col] == undefined) sumUpArray[col] = 0;
sumUpArray[col] = sumUpArray[col] + parseInt(array[row][col],10);
if(intCntArray[col] == undefined) intCntArray[col] = 0;
if(exceptZero && parseInt(array[row][col],10) == 0 ) continue;
intCntArray[col]++;
}
}
for (var idx=0; idx<sumUpArray.length; idx++){
if(idx == 0)
{
aveArray[idx] = sumUpArray[idx];
continue;
}
aveArray[idx] = ( !intCntArray[idx] ) ? 0 : (sumUpArray[idx] / intCntArray[idx]).toFixed(1);
}
array.push(aveArray);
this.set_newTableArray(array);
return this;
};
return Constructor;
}();
// TableMagic
var TableMagic = function(data, opt){
this.data = data;
if( !(opt != null && opt instanceof Object && !(opt instanceof Array)) ) opt = {};
this.orderArr = opt.titleOrderArr || undefined;
this.titleHash = opt.titleHash || undefined;
this.opt = opt;
this.initialize();
};
/*----------------
* initialize
----------------*/
TableMagic.prototype.initialize = function(){
var ddm = DbDataManipulator.create(this.data, this.orderArr, this.titleHash, {label:this.opt.label, noTitleRow: this.opt.noTitleRow||false});
this.r = Renderers.create(ddm, this.opt);
return this;
};
/*----------------
* accessor
----------------*/
TableMagic.prototype.get = function(){
return this.getTableTag();
};
TableMagic.prototype.getTableTag = function(){
if( !this.r ) return false;
return this.r.render().get();
};
TableMagic.prototype.getInner = function(){
if( !this.r ) return false;
return this.r.render().getInner();
};
return TableMagic;
})({});
|
}
// tbody append to table
|
operations.rs
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn actions(&self) -> actions::Client {
actions::Client(self.clone())
}
pub fn alert_rule_templates(&self) -> alert_rule_templates::Client {
alert_rule_templates::Client(self.clone())
}
pub fn alert_rules(&self) -> alert_rules::Client {
alert_rules::Client(self.clone())
}
pub fn automation_rules(&self) -> automation_rules::Client {
automation_rules::Client(self.clone())
}
pub fn bookmark(&self) -> bookmark::Client {
bookmark::Client(self.clone())
}
pub fn bookmark_relations(&self) -> bookmark_relations::Client {
bookmark_relations::Client(self.clone())
}
pub fn bookmarks(&self) -> bookmarks::Client {
bookmarks::Client(self.clone())
}
pub fn case_comments(&self) -> case_comments::Client {
case_comments::Client(self.clone())
}
pub fn case_relations(&self) -> case_relations::Client {
case_relations::Client(self.clone())
}
pub fn cases(&self) -> cases::Client {
cases::Client(self.clone())
}
pub fn cases_aggregations(&self) -> cases_aggregations::Client {
cases_aggregations::Client(self.clone())
}
pub fn comments(&self) -> comments::Client {
comments::Client(self.clone())
}
pub fn data_connectors(&self) -> data_connectors::Client {
data_connectors::Client(self.clone())
}
pub fn data_connectors_check_requirements(&self) -> data_connectors_check_requirements::Client {
data_connectors_check_requirements::Client(self.clone())
}
pub fn domain_whois(&self) -> domain_whois::Client {
domain_whois::Client(self.clone())
}
pub fn entities(&self) -> entities::Client {
entities::Client(self.clone())
}
pub fn entities_get_timeline(&self) -> entities_get_timeline::Client {
entities_get_timeline::Client(self.clone())
}
pub fn entities_relations(&self) -> entities_relations::Client {
entities_relations::Client(self.clone())
}
pub fn entity_queries(&self) -> entity_queries::Client {
entity_queries::Client(self.clone())
}
pub fn entity_relations(&self) -> entity_relations::Client {
entity_relations::Client(self.clone())
}
pub fn incident_comments(&self) -> incident_comments::Client {
incident_comments::Client(self.clone())
}
pub fn incident_relations(&self) -> incident_relations::Client {
incident_relations::Client(self.clone())
}
pub fn incidents(&self) -> incidents::Client {
incidents::Client(self.clone())
}
pub fn ip_geodata(&self) -> ip_geodata::Client {
ip_geodata::Client(self.clone())
}
pub fn office_consents(&self) -> office_consents::Client {
office_consents::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
pub fn product_settings(&self) -> product_settings::Client {
product_settings::Client(self.clone())
}
pub fn threat_intelligence_indicator(&self) -> threat_intelligence_indicator::Client {
threat_intelligence_indicator::Client(self.clone())
}
pub fn threat_intelligence_indicator_metrics(&self) -> threat_intelligence_indicator_metrics::Client {
threat_intelligence_indicator_metrics::Client(self.clone())
}
pub fn threat_intelligence_indicators(&self) -> threat_intelligence_indicators::Client {
threat_intelligence_indicators::Client(self.clone())
}
pub fn watchlist_items(&self) -> watchlist_items::Client {
watchlist_items::Client(self.clone())
}
pub fn watchlists(&self) -> watchlists::Client {
watchlists::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
CasesAggregations_Get(#[from] cases_aggregations::get::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
AlertRules_List(#[from] alert_rules::list::Error),
#[error(transparent)]
AlertRules_Get(#[from] alert_rules::get::Error),
#[error(transparent)]
AlertRules_CreateOrUpdate(#[from] alert_rules::create_or_update::Error),
#[error(transparent)]
AlertRules_Delete(#[from] alert_rules::delete::Error),
#[error(transparent)]
Actions_ListByAlertRule(#[from] actions::list_by_alert_rule::Error),
#[error(transparent)]
Actions_Get(#[from] actions::get::Error),
#[error(transparent)]
Actions_CreateOrUpdate(#[from] actions::create_or_update::Error),
#[error(transparent)]
Actions_Delete(#[from] actions::delete::Error),
#[error(transparent)]
AlertRuleTemplates_List(#[from] alert_rule_templates::list::Error),
#[error(transparent)]
AlertRuleTemplates_Get(#[from] alert_rule_templates::get::Error),
#[error(transparent)]
DataConnectors_List(#[from] data_connectors::list::Error),
#[error(transparent)]
DataConnectors_Get(#[from] data_connectors::get::Error),
#[error(transparent)]
DataConnectors_CreateOrUpdate(#[from] data_connectors::create_or_update::Error),
#[error(transparent)]
DataConnectors_Delete(#[from] data_connectors::delete::Error),
#[error(transparent)]
ProductSettings_GetAll(#[from] product_settings::get_all::Error),
#[error(transparent)]
ProductSettings_Get(#[from] product_settings::get::Error),
#[error(transparent)]
ProductSettings_Update(#[from] product_settings::update::Error),
#[error(transparent)]
ProductSettings_Delete(#[from] product_settings::delete::Error),
#[error(transparent)]
EntityQueries_List(#[from] entity_queries::list::Error),
#[error(transparent)]
EntityQueries_Get(#[from] entity_queries::get::Error),
#[error(transparent)]
Incidents_List(#[from] incidents::list::Error),
#[error(transparent)]
Incidents_Get(#[from] incidents::get::Error),
#[error(transparent)]
Incidents_CreateOrUpdate(#[from] incidents::create_or_update::Error),
#[error(transparent)]
Incidents_Delete(#[from] incidents::delete::Error),
#[error(transparent)]
Incidents_ListOfAlerts(#[from] incidents::list_of_alerts::Error),
#[error(transparent)]
Incidents_ListOfBookmarks(#[from] incidents::list_of_bookmarks::Error),
#[error(transparent)]
IncidentComments_ListByIncident(#[from] incident_comments::list_by_incident::Error),
#[error(transparent)]
IncidentComments_GetComment(#[from] incident_comments::get_comment::Error),
#[error(transparent)]
IncidentComments_CreateComment(#[from] incident_comments::create_comment::Error),
#[error(transparent)]
IncidentComments_DeleteComment(#[from] incident_comments::delete_comment::Error),
#[error(transparent)]
Incidents_ListOfEntities(#[from] incidents::list_of_entities::Error),
#[error(transparent)]
IncidentRelations_List(#[from] incident_relations::list::Error),
#[error(transparent)]
IncidentRelations_GetRelation(#[from] incident_relations::get_relation::Error),
#[error(transparent)]
IncidentRelations_CreateOrUpdateRelation(#[from] incident_relations::create_or_update_relation::Error),
#[error(transparent)]
IncidentRelations_DeleteRelation(#[from] incident_relations::delete_relation::Error),
#[error(transparent)]
Watchlists_List(#[from] watchlists::list::Error),
#[error(transparent)]
Watchlists_Get(#[from] watchlists::get::Error),
#[error(transparent)]
Watchlists_Create(#[from] watchlists::create::Error),
#[error(transparent)]
Watchlists_Delete(#[from] watchlists::delete::Error),
#[error(transparent)]
WatchlistItems_List(#[from] watchlist_items::list::Error),
#[error(transparent)]
WatchlistItems_Get(#[from] watchlist_items::get::Error),
#[error(transparent)]
WatchlistItems_CreateOrUpdate(#[from] watchlist_items::create_or_update::Error),
#[error(transparent)]
WatchlistItems_Delete(#[from] watchlist_items::delete::Error),
#[error(transparent)]
AutomationRules_List(#[from] automation_rules::list::Error),
#[error(transparent)]
AutomationRules_Get(#[from] automation_rules::get::Error),
#[error(transparent)]
AutomationRules_CreateOrUpdate(#[from] automation_rules::create_or_update::Error),
#[error(transparent)]
AutomationRules_Delete(#[from] automation_rules::delete::Error),
#[error(transparent)]
Bookmarks_List(#[from] bookmarks::list::Error),
#[error(transparent)]
Bookmarks_Get(#[from] bookmarks::get::Error),
#[error(transparent)]
Bookmarks_CreateOrUpdate(#[from] bookmarks::create_or_update::Error),
#[error(transparent)]
Bookmarks_Delete(#[from] bookmarks::delete::Error),
#[error(transparent)]
BookmarkRelations_List(#[from] bookmark_relations::list::Error),
#[error(transparent)]
Bookmark_Expand(#[from] bookmark::expand::Error),
#[error(transparent)]
BookmarkRelations_GetRelation(#[from] bookmark_relations::get_relation::Error),
#[error(transparent)]
BookmarkRelations_CreateOrUpdateRelation(#[from] bookmark_relations::create_or_update_relation::Error),
#[error(transparent)]
BookmarkRelations_DeleteRelation(#[from] bookmark_relations::delete_relation::Error),
#[error(transparent)]
Cases_List(#[from] cases::list::Error),
#[error(transparent)]
Cases_Get(#[from] cases::get::Error),
#[error(transparent)]
Cases_CreateOrUpdate(#[from] cases::create_or_update::Error),
#[error(transparent)]
Cases_Delete(#[from] cases::delete::Error),
#[error(transparent)]
Comments_ListByCase(#[from] comments::list_by_case::Error),
#[error(transparent)]
Cases_GetComment(#[from] cases::get_comment::Error),
#[error(transparent)]
CaseComments_CreateComment(#[from] case_comments::create_comment::Error),
#[error(transparent)]
CaseRelations_List(#[from] case_relations::list::Error),
#[error(transparent)]
CaseRelations_GetRelation(#[from] case_relations::get_relation::Error),
#[error(transparent)]
CaseRelations_CreateOrUpdateRelation(#[from] case_relations::create_or_update_relation::Error),
#[error(transparent)]
CaseRelations_DeleteRelation(#[from] case_relations::delete_relation::Error),
#[error(transparent)]
DataConnectorsCheckRequirements_Post(#[from] data_connectors_check_requirements::post::Error),
#[error(transparent)]
IpGeodata_Get(#[from] ip_geodata::get::Error),
#[error(transparent)]
DomainWhois_Get(#[from] domain_whois::get::Error),
#[error(transparent)]
Entities_List(#[from] entities::list::Error),
#[error(transparent)]
Entities_Get(#[from] entities::get::Error),
#[error(transparent)]
Entities_Expand(#[from] entities::expand::Error),
#[error(transparent)]
EntitiesGetTimeline_List(#[from] entities_get_timeline::list::Error),
#[error(transparent)]
Entities_Queries(#[from] entities::queries::Error),
#[error(transparent)]
Entities_GetInsights(#[from] entities::get_insights::Error),
#[error(transparent)]
EntitiesRelations_List(#[from] entities_relations::list::Error),
#[error(transparent)]
EntityRelations_GetRelation(#[from] entity_relations::get_relation::Error),
#[error(transparent)]
OfficeConsents_List(#[from] office_consents::list::Error),
#[error(transparent)]
OfficeConsents_Get(#[from] office_consents::get::Error),
#[error(transparent)]
OfficeConsents_Delete(#[from] office_consents::delete::Error),
#[error(transparent)]
ThreatIntelligenceIndicator_CreateIndicator(#[from] threat_intelligence_indicator::create_indicator::Error),
#[error(transparent)]
ThreatIntelligenceIndicators_List(#[from] threat_intelligence_indicators::list::Error),
#[error(transparent)]
ThreatIntelligenceIndicator_Get(#[from] threat_intelligence_indicator::get::Error),
#[error(transparent)]
ThreatIntelligenceIndicator_Create(#[from] threat_intelligence_indicator::create::Error),
#[error(transparent)]
ThreatIntelligenceIndicator_Delete(#[from] threat_intelligence_indicator::delete::Error),
#[error(transparent)]
ThreatIntelligenceIndicator_QueryIndicators(#[from] threat_intelligence_indicator::query_indicators::Error),
#[error(transparent)]
ThreatIntelligenceIndicatorMetrics_List(#[from] threat_intelligence_indicator_metrics::list::Error),
#[error(transparent)]
ThreatIntelligenceIndicator_AppendTags(#[from] threat_intelligence_indicator::append_tags::Error),
#[error(transparent)]
ThreatIntelligenceIndicator_ReplaceTags(#[from] threat_intelligence_indicator::replace_tags::Error),
}
pub mod cases_aggregations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
aggregations_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
aggregations_name: aggregations_name.into(),
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) aggregations_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Aggregations, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/aggregations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . aggregations_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Aggregations =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationsList, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.SecurityInsights/operations", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OperationsList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod alert_rules {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
rule_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
rule_id: rule_id.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
rule_id: impl Into<String>,
alert_rule: impl Into<models::AlertRule>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
rule_id: rule_id.into(),
alert_rule: alert_rule.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
rule_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
rule_id: rule_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AlertRulesList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRules",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AlertRulesList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) rule_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AlertRule, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . rule_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AlertRule =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::AlertRule),
Created201(models::AlertRule),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) rule_id: String,
pub(crate) alert_rule: models::AlertRule,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . rule_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.alert_rule).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AlertRule =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AlertRule =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) rule_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . rule_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod actions {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_alert_rule(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
rule_id: impl Into<String>,
) -> list_by_alert_rule::Builder {
list_by_alert_rule::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
rule_id: rule_id.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
rule_id: impl Into<String>,
action_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
rule_id: rule_id.into(),
action_id: action_id.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
rule_id: impl Into<String>,
action_id: impl Into<String>,
action: impl Into<models::ActionRequest>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
rule_id: rule_id.into(),
action_id: action_id.into(),
action: action.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
rule_id: impl Into<String>,
action_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
rule_id: rule_id.into(),
action_id: action_id.into(),
}
}
}
pub mod list_by_alert_rule {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) rule_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ActionsList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRules/{}/actions" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . rule_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ActionsList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) rule_id: String,
pub(crate) action_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ActionResponse, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRules/{}/actions/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . rule_id , & self . action_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ActionResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ActionResponse),
Created201(models::ActionResponse),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) rule_id: String,
pub(crate) action_id: String,
pub(crate) action: models::ActionRequest,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRules/{}/actions/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . rule_id , & self . action_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.action).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ActionResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ActionResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) rule_id: String,
pub(crate) action_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRules/{}/actions/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . rule_id , & self . action_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod alert_rule_templates {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
alert_rule_template_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
alert_rule_template_id: alert_rule_template_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AlertRuleTemplatesList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRuleTemplates" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AlertRuleTemplatesList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) alert_rule_template_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AlertRuleTemplate, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/alertRuleTemplates/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . alert_rule_template_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AlertRuleTemplate =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod data_connectors {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
data_connector_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
data_connector_id: data_connector_id.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
data_connector_id: impl Into<String>,
data_connector: impl Into<models::DataConnector>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
data_connector_id: data_connector_id.into(),
data_connector: data_connector.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
data_connector_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
data_connector_id: data_connector_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataConnectorList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectors" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataConnectorList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) data_connector_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::DataConnector, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectors/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . data_connector_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataConnector =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::DataConnector),
Created201(models::DataConnector),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) data_connector_id: String,
pub(crate) data_connector: models::DataConnector,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectors/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . data_connector_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.data_connector).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataConnector =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataConnector =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) data_connector_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectors/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . data_connector_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod product_settings {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get_all(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> get_all::Builder {
get_all::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
settings_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
settings_name: settings_name.into(),
}
}
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
settings_name: impl Into<String>,
settings: impl Into<models::Settings>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
settings_name: settings_name.into(),
settings: settings.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
settings_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
settings_name: settings_name.into(),
}
}
}
pub mod get_all {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SettingList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SettingList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) settings_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Settings, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name,
&self.settings_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Settings =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) settings_name: String,
pub(crate) settings: models::Settings,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Settings, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name,
&self.settings_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.settings).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Settings =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) settings_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/settings/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name,
&self.settings_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod entity_queries {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
entity_query_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
entity_query_id: entity_query_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EntityQueryList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entityQueries" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EntityQueryList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) entity_query_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EntityQuery, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entityQueries/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . entity_query_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EntityQuery =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod incidents {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
incident: impl Into<models::Incident>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
incident: incident.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
}
}
pub fn list_of_alerts(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
) -> list_of_alerts::Builder {
list_of_alerts::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
}
}
pub fn list_of_bookmarks(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
) -> list_of_bookmarks::Builder {
list_of_bookmarks::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
}
}
pub fn list_of_entities(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
) -> list_of_entities::Builder {
list_of_entities::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::IncidentList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::IncidentList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Incident, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Incident =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Incident),
Created201(models::Incident),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) incident: models::Incident,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.incident).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Incident =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Incident =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_of_alerts {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::IncidentAlertList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/alerts" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::IncidentAlertList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_of_bookmarks {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::IncidentBookmarkList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/bookmarks" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::IncidentBookmarkList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_of_entities {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::IncidentEntitiesResponse, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/entities" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::IncidentEntitiesResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod incident_comments {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_incident(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
) -> list_by_incident::Builder {
list_by_incident::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
pub fn get_comment(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
incident_comment_id: impl Into<String>,
) -> get_comment::Builder {
get_comment::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
incident_comment_id: incident_comment_id.into(),
}
}
pub fn create_comment(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
incident_comment_id: impl Into<String>,
incident_comment: impl Into<models::IncidentComment>,
) -> create_comment::Builder {
create_comment::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
incident_comment_id: incident_comment_id.into(),
incident_comment: incident_comment.into(),
}
}
pub fn delete_comment(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
incident_comment_id: impl Into<String>,
) -> delete_comment::Builder {
delete_comment::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
incident_comment_id: incident_comment_id.into(),
}
}
}
pub mod list_by_incident {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::IncidentCommentList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/comments" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::IncidentCommentList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_comment {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) incident_comment_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::IncidentComment, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/comments/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id , & self . incident_comment_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::IncidentComment =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_comment {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::IncidentComment),
Created201(models::IncidentComment),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) incident_comment_id: String,
pub(crate) incident_comment: models::IncidentComment,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/comments/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id , & self . incident_comment_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.incident_comment).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::IncidentComment =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::IncidentComment =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_comment {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) incident_comment_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/comments/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id , & self . incident_comment_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod incident_relations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
pub fn get_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
relation_name: impl Into<String>,
) -> get_relation::Builder {
get_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
relation_name: relation_name.into(),
}
}
pub fn create_or_update_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
relation_name: impl Into<String>,
relation: impl Into<models::Relation>,
) -> create_or_update_relation::Builder {
create_or_update_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
relation_name: relation_name.into(),
relation: relation.into(),
}
}
pub fn delete_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
incident_id: impl Into<String>,
relation_name: impl Into<String>,
) -> delete_relation::Builder {
delete_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
incident_id: incident_id.into(),
relation_name: relation_name.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RelationList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/relations" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RelationList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_relation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) relation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Relation, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Relation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_relation {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Relation),
Created201(models::Relation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) relation_name: String,
pub(crate) relation: models::Relation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.relation).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Relation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Relation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_relation {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) incident_id: String,
pub(crate) relation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/incidents/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . incident_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod watchlists {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
watchlist_alias: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
watchlist_alias: watchlist_alias.into(),
}
}
pub fn create(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
watchlist_alias: impl Into<String>,
watchlist: impl Into<models::Watchlist>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
watchlist_alias: watchlist_alias.into(),
watchlist: watchlist.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
watchlist_alias: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
watchlist_alias: watchlist_alias.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WatchlistList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::WatchlistList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) watchlist_alias: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Watchlist, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . watchlist_alias) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Watchlist =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Watchlist),
Created201(models::Watchlist),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) watchlist_alias: String,
pub(crate) watchlist: models::Watchlist,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . watchlist_alias) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.watchlist).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Watchlist =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Watchlist =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) watchlist_alias: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . watchlist_alias) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod watchlist_items {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
watchlist_alias: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
watchlist_alias: watchlist_alias.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
watchlist_alias: impl Into<String>,
watchlist_item_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
watchlist_alias: watchlist_alias.into(),
watchlist_item_id: watchlist_item_id.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
watchlist_alias: impl Into<String>,
watchlist_item_id: impl Into<String>,
watchlist_item: impl Into<models::WatchlistItem>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
watchlist_alias: watchlist_alias.into(),
watchlist_item_id: watchlist_item_id.into(),
watchlist_item: watchlist_item.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
watchlist_alias: impl Into<String>,
watchlist_item_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
watchlist_alias: watchlist_alias.into(),
watchlist_item_id: watchlist_item_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) watchlist_alias: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WatchlistItemList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . watchlist_alias) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::WatchlistItemList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) watchlist_alias: String,
pub(crate) watchlist_item_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::WatchlistItem, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . watchlist_alias , & self . watchlist_item_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::WatchlistItem =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::WatchlistItem),
Created201(models::WatchlistItem),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) watchlist_alias: String,
pub(crate) watchlist_item_id: String,
pub(crate) watchlist_item: models::WatchlistItem,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . watchlist_alias , & self . watchlist_item_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.watchlist_item).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::WatchlistItem =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::WatchlistItem =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) watchlist_alias: String,
pub(crate) watchlist_item_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/watchlists/{}/watchlistItems/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . watchlist_alias , & self . watchlist_item_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod automation_rules {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
automation_rule_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
automation_rule_id: automation_rule_id.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
automation_rule_id: impl Into<String>,
automation_rule: impl Into<models::AutomationRule>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
automation_rule_id: automation_rule_id.into(),
automation_rule: automation_rule.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
automation_rule_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
automation_rule_id: automation_rule_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AutomationRulesList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/automationRules" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AutomationRulesList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) automation_rule_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AutomationRule, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/automationRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . automation_rule_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AutomationRule =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::AutomationRule),
Created201(models::AutomationRule),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) automation_rule_id: String,
pub(crate) automation_rule: models::AutomationRule,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/automationRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . automation_rule_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.automation_rule).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AutomationRule =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AutomationRule =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) automation_rule_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/automationRules/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . automation_rule_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod bookmarks {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
bookmark_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
bookmark_id: bookmark_id.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
bookmark_id: impl Into<String>,
bookmark: impl Into<models::Bookmark>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
bookmark_id: bookmark_id.into(),
bookmark: bookmark.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
bookmark_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
bookmark_id: bookmark_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BookmarkList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BookmarkList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) bookmark_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Bookmark, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . bookmark_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Bookmark =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Bookmark),
Created201(models::Bookmark),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) bookmark_id: String,
pub(crate) bookmark: models::Bookmark,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . bookmark_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.bookmark).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Bookmark =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Bookmark =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) bookmark_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . bookmark_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod bookmark_relations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
bookmark_id: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
bookmark_id: bookmark_id.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
pub fn get_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
bookmark_id: impl Into<String>,
relation_name: impl Into<String>,
) -> get_relation::Builder {
get_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
bookmark_id: bookmark_id.into(),
relation_name: relation_name.into(),
}
}
pub fn create_or_update_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
bookmark_id: impl Into<String>,
relation_name: impl Into<String>,
relation: impl Into<models::Relation>,
) -> create_or_update_relation::Builder {
create_or_update_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
bookmark_id: bookmark_id.into(),
relation_name: relation_name.into(),
relation: relation.into(),
}
}
pub fn delete_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
bookmark_id: impl Into<String>,
relation_name: impl Into<String>,
) -> delete_relation::Builder {
delete_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
bookmark_id: bookmark_id.into(),
relation_name: relation_name.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) bookmark_id: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RelationList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks/{}/relations" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . bookmark_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RelationList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_relation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) bookmark_id: String,
pub(crate) relation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Relation, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . bookmark_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Relation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_relation {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Relation),
Created201(models::Relation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) bookmark_id: String,
pub(crate) relation_name: String,
pub(crate) relation: models::Relation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . bookmark_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.relation).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Relation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Relation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_relation {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) bookmark_id: String,
pub(crate) relation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . bookmark_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod bookmark {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn expand(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
bookmark_id: impl Into<String>,
parameters: impl Into<models::BookmarkExpandParameters>,
) -> expand::Builder {
expand::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
bookmark_id: bookmark_id.into(),
parameters: parameters.into(),
}
}
}
pub mod expand {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) bookmark_id: String,
pub(crate) parameters: models::BookmarkExpandParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BookmarkExpandResponse, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks/{}/expand" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . bookmark_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BookmarkExpandResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod cases {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
}
}
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
case: impl Into<models::Case>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
case: case.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
}
}
pub fn get_comment(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
case_comment_id: impl Into<String>,
) -> get_comment::Builder {
get_comment::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
case_comment_id: case_comment_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CaseList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CaseList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Case, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name,
&self.case_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Case =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::Case),
Created201(models::Case),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
pub(crate) case: models::Case,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name,
&self.case_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.case).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Case =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Case =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name,
&self.case_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_comment {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
pub(crate) case_comment_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CaseComment, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}/comments/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . case_id , & self . case_comment_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CaseComment =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod comments {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_case(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
) -> list_by_case::Builder {
list_by_case::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
}
pub mod list_by_case {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CaseCommentList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}/comments" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . case_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CaseCommentList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod case_comments {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn create_comment(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
case_comment_id: impl Into<String>,
case_comment: impl Into<models::CaseComment>,
) -> create_comment::Builder {
create_comment::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
case_comment_id: case_comment_id.into(),
case_comment: case_comment.into(),
}
}
}
pub mod create_comment {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
pub(crate) case_comment_id: String,
pub(crate) case_comment: models::CaseComment,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CaseComment, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}/comments/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . case_id , & self . case_comment_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.case_comment).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CaseComment =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod case_relations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
pub fn get_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
relation_name: impl Into<String>,
) -> get_relation::Builder {
get_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
relation_name: relation_name.into(),
}
}
pub fn create_or_update_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
relation_name: impl Into<String>,
relation_input_model: impl Into<models::RelationsModelInput>,
) -> create_or_update_relation::Builder {
create_or_update_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
relation_name: relation_name.into(),
relation_input_model: relation_input_model.into(),
}
}
pub fn delete_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
case_id: impl Into<String>,
relation_name: impl Into<String>,
) -> delete_relation::Builder {
delete_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
case_id: case_id.into(),
relation_name: relation_name.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CaseRelationList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}/relations" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . case_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CaseRelationList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_relation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
pub(crate) relation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CaseRelation, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . case_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CaseRelation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_relation {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::CaseRelation),
Created201(models::CaseRelation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
pub(crate) relation_name: String,
pub(crate) relation_input_model: models::RelationsModelInput,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . case_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.relation_input_model).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CaseRelation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CaseRelation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_relation {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) case_id: String,
pub(crate) relation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/cases/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . case_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod data_connectors_check_requirements {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn post(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
workspace_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
data_connectors_check_requirements: impl Into<models::DataConnectorsCheckRequirements>,
) -> post::Builder {
post::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
workspace_name: workspace_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
data_connectors_check_requirements: data_connectors_check_requirements.into(),
}
}
}
pub mod post {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) workspace_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) data_connectors_check_requirements: models::DataConnectorsCheckRequirements,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::DataConnectorRequirementsState, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/dataConnectorsCheckRequirements" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.data_connectors_check_requirements).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::DataConnectorRequirementsState =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod ip_geodata {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
ip_address: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
ip_address: ip_address.into(),
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) ip_address: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EnrichmentIpGeodata, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SecurityInsights/enrichment/ip/geodata/",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let ip_address = &self.ip_address;
url.query_pairs_mut().append_pair("ipAddress", ip_address);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EnrichmentIpGeodata =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod domain_whois {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
domain: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
domain: domain.into(),
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) domain: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EnrichmentDomainWhois, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.SecurityInsights/enrichment/domain/whois/",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let domain = &self.domain;
url.query_pairs_mut().append_pair("domain", domain);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EnrichmentDomainWhois =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod entities {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
entity_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
entity_id: entity_id.into(),
}
}
pub fn expand(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
entity_id: impl Into<String>,
parameters: impl Into<models::EntityExpandParameters>,
) -> expand::Builder {
expand::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
entity_id: entity_id.into(),
parameters: parameters.into(),
}
}
pub fn queries(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
entity_id: impl Into<String>,
kind: impl Into<String>,
) -> queries::Builder {
queries::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
entity_id: entity_id.into(),
kind: kind.into(),
}
}
pub fn get_insights(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
entity_id: impl Into<String>,
parameters: impl Into<models::EntityGetInsightsParameters>,
) -> get_insights::Builder {
get_insights::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
entity_id: entity_id.into(),
parameters: parameters.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EntityList, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entities",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EntityList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) entity_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Entity, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entities/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.operational_insights_resource_provider,
&self.workspace_name,
&self.entity_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Entity =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod expand {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) entity_id: String,
pub(crate) parameters: models::EntityExpandParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EntityExpandResponse, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entities/{}/expand" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . entity_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EntityExpandResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod queries {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) entity_id: String,
pub(crate) kind: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::GetQueriesResponse, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entities/{}/queries" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . entity_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let kind = &self.kind;
url.query_pairs_mut().append_pair("kind", kind);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::GetQueriesResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_insights {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) entity_id: String,
pub(crate) parameters: models::EntityGetInsightsParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EntityGetInsightsResponse, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entities/{}/getInsights" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . entity_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EntityGetInsightsResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod entities_get_timeline {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
entity_id: impl Into<String>,
parameters: impl Into<models::EntityTimelineParameters>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
entity_id: entity_id.into(),
parameters: parameters.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) entity_id: String,
pub(crate) parameters: models::EntityTimelineParameters,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::EntityTimelineResponse, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entities/{}/getTimeline" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . entity_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::EntityTimelineResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod entities_relations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
entity_id: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
entity_id: entity_id.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) entity_id: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RelationList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entities/{}/relations" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . entity_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RelationList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod entity_relations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get_relation(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
entity_id: impl Into<String>,
relation_name: impl Into<String>,
) -> get_relation::Builder {
|
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
entity_id: entity_id.into(),
relation_name: relation_name.into(),
}
}
}
pub mod get_relation {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) entity_id: String,
pub(crate) relation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Relation, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/entities/{}/relations/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . entity_id , & self . relation_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Relation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod office_consents {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
consent_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
consent_id: consent_id.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
consent_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
consent_id: consent_id.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OfficeConsentList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/officeConsents" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OfficeConsentList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) consent_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OfficeConsent, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/officeConsents/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . consent_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OfficeConsent =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) consent_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/officeConsents/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . consent_id) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod threat_intelligence_indicator {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn create_indicator(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
threat_intelligence_properties: impl Into<models::ThreatIntelligenceIndicatorModelForRequestBody>,
) -> create_indicator::Builder {
create_indicator::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
threat_intelligence_properties: threat_intelligence_properties.into(),
}
}
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
name: name.into(),
}
}
pub fn create(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
name: impl Into<String>,
threat_intelligence_properties: impl Into<models::ThreatIntelligenceIndicatorModelForRequestBody>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
name: name.into(),
threat_intelligence_properties: threat_intelligence_properties.into(),
}
}
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
name: name.into(),
}
}
pub fn query_indicators(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
threat_intelligence_filtering_criteria: impl Into<models::ThreatIntelligenceFilteringCriteria>,
) -> query_indicators::Builder {
query_indicators::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
threat_intelligence_filtering_criteria: threat_intelligence_filtering_criteria.into(),
}
}
pub fn append_tags(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
name: impl Into<String>,
threat_intelligence_append_tags: impl Into<models::ThreatIntelligenceAppendTags>,
) -> append_tags::Builder {
append_tags::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
name: name.into(),
threat_intelligence_append_tags: threat_intelligence_append_tags.into(),
}
}
pub fn replace_tags(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
name: impl Into<String>,
threat_intelligence_replace_tags: impl Into<models::ThreatIntelligenceIndicatorModelForRequestBody>,
) -> replace_tags::Builder {
replace_tags::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
name: name.into(),
threat_intelligence_replace_tags: threat_intelligence_replace_tags.into(),
}
}
}
pub mod create_indicator {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ThreatIntelligenceInformation),
Created201(models::ThreatIntelligenceInformation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) threat_intelligence_properties: models::ThreatIntelligenceIndicatorModelForRequestBody,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/createIndicator" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.threat_intelligence_properties).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceInformation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceInformation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ThreatIntelligenceInformation, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceInformation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::ThreatIntelligenceInformation),
Created201(models::ThreatIntelligenceInformation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) name: String,
pub(crate) threat_intelligence_properties: models::ThreatIntelligenceIndicatorModelForRequestBody,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.threat_intelligence_properties).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceInformation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceInformation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod query_indicators {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) threat_intelligence_filtering_criteria: models::ThreatIntelligenceFilteringCriteria,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ThreatIntelligenceInformationList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/queryIndicators" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.threat_intelligence_filtering_criteria).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceInformationList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod append_tags {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) name: String,
pub(crate) threat_intelligence_append_tags: models::ThreatIntelligenceAppendTags,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}/appendTags" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.threat_intelligence_append_tags).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod replace_tags {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) name: String,
pub(crate) threat_intelligence_replace_tags: models::ThreatIntelligenceIndicatorModelForRequestBody,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ThreatIntelligenceInformation, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators/{}/replaceTags" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name , & self . name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.threat_intelligence_replace_tags).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceInformation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod threat_intelligence_indicators {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
filter: None,
orderby: None,
top: None,
skip_token: None,
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
pub(crate) filter: Option<String>,
pub(crate) orderby: Option<String>,
pub(crate) top: Option<i32>,
pub(crate) skip_token: Option<String>,
}
impl Builder {
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn orderby(mut self, orderby: impl Into<String>) -> Self {
self.orderby = Some(orderby.into());
self
}
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn skip_token(mut self, skip_token: impl Into<String>) -> Self {
self.skip_token = Some(skip_token.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ThreatIntelligenceInformationList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/indicators" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
if let Some(orderby) = &self.orderby {
url.query_pairs_mut().append_pair("$orderby", orderby);
}
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(skip_token) = &self.skip_token {
url.query_pairs_mut().append_pair("$skipToken", skip_token);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceInformationList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod threat_intelligence_indicator_metrics {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
operational_insights_resource_provider: impl Into<String>,
workspace_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
operational_insights_resource_provider: operational_insights_resource_provider.into(),
workspace_name: workspace_name.into(),
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) operational_insights_resource_provider: String,
pub(crate) workspace_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ThreatIntelligenceMetricsList, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/{}/workspaces/{}/providers/Microsoft.SecurityInsights/threatIntelligence/main/metrics" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . operational_insights_resource_provider , & self . workspace_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ThreatIntelligenceMetricsList =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
|
get_relation::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
|
fileio.rs
|
// Copyright (c) Facebook, Inc. and its affiliates.
// SPDX-License-Identifier: Apache-2.0
extern crate common;
extern crate crypto;
use std::{
collections::HashMap,
path::Path,
sync::{Arc, RwLock},
};
use crate::shared::TFeatures;
use common::{files, timer};
/// load text and update the protocol
pub fn
|
<T>(
input_path: T,
keys: Arc<RwLock<Vec<String>>>,
features: Arc<RwLock<TFeatures>>,
num_features: Arc<RwLock<usize>>,
num_records: Arc<RwLock<usize>>,
) where
T: AsRef<Path>,
{
if let (
Ok(mut input_keys),
Ok(mut input_features),
Ok(mut input_num_features),
Ok(mut input_num_records),
) = (
keys.write(),
features.write(),
num_features.write(),
num_records.write(),
) {
if !input_keys.is_empty() || !input_features.is_empty() {
info!("Data is not empty, skipping reading the data again")
} else {
let (mut keys, mut features) = common::files::transpose_keyed_nums(
common::files::read_csv_as_keyed_nums(input_path, false),
);
assert!(!features.is_empty(), "Empty data features, fail fast, bye!");
input_keys.extend(keys.drain(..));
input_features.extend(features.drain(..));
*input_num_features = input_features.len();
*input_num_records = input_features[0].len();
info!(
"Data initialised with dimensions: cols: {}, rows: {}, keys len: {}",
input_num_features,
input_num_records,
input_keys.len()
);
}
}
}
#[derive(Default, Debug)]
pub struct KeyedCSV {
pub headers: Vec<String>,
pub records: HashMap<String, Vec<String>>,
}
impl KeyedCSV {
/// Returns the keys(first column of the CSV) read from the CSV Input
pub fn get_plain_keys(&self) -> Vec<String> {
self.records
.keys()
.map(|k| k.to_string())
.collect::<Vec<String>>()
}
/// Returns a writable CSV record padded with empty values for every other column except the key
/// If there is no other column but key, it adds an empty string instead of the the plain key
/// for debugging purposes
pub fn get_empty_record_with_key(
&self,
key: String,
empty_val: Option<&String>,
) -> Vec<String> {
let mut record = vec![key];
let mut until = self.headers.len();
if self.headers.is_empty() {
until = 2;
}
for _ in 1..until {
record.push(empty_val.unwrap_or(&String::new()).to_string());
}
record
}
/// Returns a writable CSV record extended with non-key values from the input CSV
/// If there is no other column but key, it adds the plain key for debugging purposes
pub fn get_record_with_keys(&self, enc_key: String, raw_key: &str) -> Vec<String> {
let mut record = vec![enc_key];
if let Some(extra_columns) = self.records.get(raw_key) {
if extra_columns.is_empty() {
record.push(String::from(raw_key));
} else {
record.extend(extra_columns.iter().cloned());
}
}
record
}
}
pub fn load_data(data: Arc<RwLock<KeyedCSV>>, path: &str, has_headers: bool) {
let t = timer::Timer::new_silent("load");
let mut lines = files::read_csv_as_strings(path);
let text_len = lines.len();
if let Ok(mut wguard) = data.write() {
if wguard.records.is_empty() {
let mut line_it = lines.drain(..);
if has_headers {
if let Some(headers) = line_it.next() {
wguard.headers = headers;
}
}
for line in line_it {
if let Some((key, rest)) = line.split_first() {
wguard.records.insert(key.to_string(), rest.to_vec());
}
}
let keys_len = wguard.records.len();
info!(
"Read {} lines from {} (dedup: {} lines)",
text_len,
path,
text_len - keys_len
);
} else {
warn!("Attempted to run the protocol after the text was already initaialised")
}
t.qps("text read", text_len);
}
}
|
load_data_with_features
|
state.rs
|
//! State transition types
use spl_token::state::{Account, AccountState};
use {
crate::{
big_vec::BigVec, error::StakePoolError, stake_program::Lockup, MAX_WITHDRAWAL_FEE_INCREASE,
WITHDRAWAL_BASELINE_FEE,
},
borsh::{BorshDeserialize, BorshSchema, BorshSerialize},
num_derive::FromPrimitive,
num_traits::FromPrimitive,
solana_program::{
account_info::AccountInfo,
borsh::get_instance_packed_len,
msg,
program_error::ProgramError,
program_memory::sol_memcmp,
program_pack::{Pack, Sealed},
pubkey::{Pubkey, PUBKEY_BYTES},
},
spl_math::checked_ceil_div::CheckedCeilDiv,
std::{convert::TryFrom, fmt, matches},
};
/// Enum representing the account type managed by the program
#[derive(Clone, Debug, PartialEq, BorshDeserialize, BorshSerialize, BorshSchema)]
pub enum AccountType {
/// If the account has not been initialized, the enum will be 0
Uninitialized,
/// Stake pool
StakePool,
/// Validator stake list
ValidatorList,
}
impl Default for AccountType {
fn default() -> Self {
AccountType::Uninitialized
}
}
/// Initialized program details.
#[repr(C)]
#[derive(Clone, Debug, Default, PartialEq, BorshDeserialize, BorshSerialize, BorshSchema)]
pub struct StakePool {
/// Account type, must be StakePool currently
pub account_type: AccountType,
/// Manager authority, allows for updating the staker, manager, and fee account
pub manager: Pubkey,
/// Staker authority, allows for adding and removing validators, and managing stake
/// distribution
pub staker: Pubkey,
/// Stake deposit authority
///
/// If a depositor pubkey is specified on initialization, then deposits must be
/// signed by this authority. If no deposit authority is specified,
/// then the stake pool will default to the result of:
/// `Pubkey::find_program_address(
/// &[&stake_pool_address.to_bytes()[..32], b"deposit"],
/// program_id,
/// )`
pub stake_deposit_authority: Pubkey,
/// Stake withdrawal authority bump seed
/// for `create_program_address(&[state::StakePool account, "withdrawal"])`
pub stake_withdraw_bump_seed: u8,
/// Validator stake list storage account
pub validator_list: Pubkey,
/// Reserve stake account, holds deactivated stake
pub reserve_stake: Pubkey,
/// Pool Mint
pub pool_mint: Pubkey,
/// Manager fee account
pub manager_fee_account: Pubkey,
/// Pool token program id
pub token_program_id: Pubkey,
/// Total stake under management.
/// Note that if `last_update_epoch` does not match the current epoch then
/// this field may not be accurate
pub total_lamports: u64,
/// Total supply of pool tokens (should always match the supply in the Pool Mint)
pub pool_token_supply: u64,
/// Last epoch the `total_lamports` field was updated
pub last_update_epoch: u64,
/// Lockup that all stakes in the pool must have
pub lockup: Lockup,
/// Fee taken as a proportion of rewards each epoch
pub epoch_fee: Fee,
/// Fee for next epoch
pub next_epoch_fee: Option<Fee>,
/// Preferred deposit validator vote account pubkey
pub preferred_deposit_validator_vote_address: Option<Pubkey>,
/// Preferred withdraw validator vote account pubkey
pub preferred_withdraw_validator_vote_address: Option<Pubkey>,
/// Fee assessed on stake deposits
pub stake_deposit_fee: Fee,
/// Fee assessed on withdrawals
pub stake_withdrawal_fee: Fee,
/// Future stake withdrawal fee, to be set for the following epoch
pub next_stake_withdrawal_fee: Option<Fee>,
/// Fees paid out to referrers on referred stake deposits.
/// Expressed as a percentage (0 - 100) of deposit fees.
/// i.e. `stake_deposit_fee`% of stake deposited is collected as deposit fees for every deposit
/// and `stake_referral_fee`% of the collected stake deposit fees is paid out to the referrer
pub stake_referral_fee: u8,
/// Toggles whether the `DepositSol` instruction requires a signature from
/// this `sol_deposit_authority`
pub sol_deposit_authority: Option<Pubkey>,
/// Fee assessed on SOL deposits
pub sol_deposit_fee: Fee,
/// Fees paid out to referrers on referred SOL deposits.
/// Expressed as a percentage (0 - 100) of SOL deposit fees.
/// i.e. `sol_deposit_fee`% of SOL deposited is collected as deposit fees for every deposit
/// and `sol_referral_fee`% of the collected SOL deposit fees is paid out to the referrer
pub sol_referral_fee: u8,
/// Toggles whether the `WithdrawSol` instruction requires a signature from
/// the `deposit_authority`
pub sol_withdraw_authority: Option<Pubkey>,
/// Fee assessed on SOL withdrawals
pub sol_withdrawal_fee: Fee,
/// Future SOL withdrawal fee, to be set for the following epoch
pub next_sol_withdrawal_fee: Option<Fee>,
/// Last epoch's total pool tokens, used only for APR estimation
pub last_epoch_pool_token_supply: u64,
/// Last epoch's total lamports, used only for APR estimation
pub last_epoch_total_lamports: u64,
}
impl StakePool {
/// calculate the pool tokens that should be minted for a deposit of `stake_lamports`
#[inline]
pub fn calc_pool_tokens_for_deposit(&self, stake_lamports: u64) -> Option<u64> {
if self.total_lamports == 0 || self.pool_token_supply == 0 {
return Some(stake_lamports);
}
u64::try_from(
(stake_lamports as u128)
.checked_mul(self.pool_token_supply as u128)?
.checked_div(self.total_lamports as u128)?,
)
.ok()
}
/// calculate lamports amount on withdrawal
#[inline]
pub fn calc_lamports_withdraw_amount(&self, pool_tokens: u64) -> Option<u64> {
// `checked_ceil_div` returns `None` for a 0 quotient result, but in this
// case, a return of 0 is valid for small amounts of pool tokens. So
// we check for that separately
let numerator = (pool_tokens as u128).checked_mul(self.total_lamports as u128)?;
let denominator = self.pool_token_supply as u128;
if numerator < denominator || denominator == 0 {
Some(0)
} else {
let (quotient, _) = numerator.checked_ceil_div(denominator)?;
u64::try_from(quotient).ok()
}
}
/// calculate pool tokens to be deducted as withdrawal fees
#[inline]
pub fn calc_pool_tokens_stake_withdrawal_fee(&self, pool_tokens: u64) -> Option<u64> {
u64::try_from(self.stake_withdrawal_fee.apply(pool_tokens)?).ok()
}
/// calculate pool tokens to be deducted as withdrawal fees
#[inline]
pub fn calc_pool_tokens_sol_withdrawal_fee(&self, pool_tokens: u64) -> Option<u64> {
u64::try_from(self.sol_withdrawal_fee.apply(pool_tokens)?).ok()
}
/// calculate pool tokens to be deducted as stake deposit fees
#[inline]
pub fn calc_pool_tokens_stake_deposit_fee(&self, pool_tokens_minted: u64) -> Option<u64> {
u64::try_from(self.stake_deposit_fee.apply(pool_tokens_minted)?).ok()
}
/// calculate pool tokens to be deducted from deposit fees as referral fees
#[inline]
pub fn calc_pool_tokens_stake_referral_fee(&self, stake_deposit_fee: u64) -> Option<u64> {
u64::try_from(
(stake_deposit_fee as u128)
.checked_mul(self.stake_referral_fee as u128)?
.checked_div(100u128)?,
)
.ok()
}
/// calculate pool tokens to be deducted as SOL deposit fees
#[inline]
pub fn calc_pool_tokens_sol_deposit_fee(&self, pool_tokens_minted: u64) -> Option<u64> {
u64::try_from(self.sol_deposit_fee.apply(pool_tokens_minted)?).ok()
}
/// calculate pool tokens to be deducted from SOL deposit fees as referral fees
#[inline]
pub fn calc_pool_tokens_sol_referral_fee(&self, sol_deposit_fee: u64) -> Option<u64> {
u64::try_from(
(sol_deposit_fee as u128)
.checked_mul(self.sol_referral_fee as u128)?
.checked_div(100u128)?,
)
.ok()
}
/// Calculate the fee in pool tokens that goes to the manager
///
/// This function assumes that `reward_lamports` has not already been added
/// to the stake pool's `total_lamports`
#[inline]
pub fn calc_epoch_fee_amount(&self, reward_lamports: u64) -> Option<u64> {
if reward_lamports == 0 {
return Some(0);
}
let total_lamports = (self.total_lamports as u128).checked_add(reward_lamports as u128)?;
let fee_lamports = self.epoch_fee.apply(reward_lamports)?;
if total_lamports == fee_lamports || self.pool_token_supply == 0 {
Some(reward_lamports)
} else {
u64::try_from(
(self.pool_token_supply as u128)
.checked_mul(fee_lamports)?
.checked_div(total_lamports.checked_sub(fee_lamports)?)?,
)
.ok()
}
}
/// Checks that the withdraw or deposit authority is valid
fn check_program_derived_authority(
authority_address: &Pubkey,
program_id: &Pubkey,
stake_pool_address: &Pubkey,
authority_seed: &[u8],
bump_seed: u8,
) -> Result<(), ProgramError> {
let expected_address = Pubkey::create_program_address(
&[
&stake_pool_address.to_bytes()[..32],
authority_seed,
&[bump_seed],
],
program_id,
)?;
if *authority_address == expected_address {
Ok(())
} else {
msg!(
"Incorrect authority provided, expected {}, received {}",
expected_address,
authority_address
);
Err(StakePoolError::InvalidProgramAddress.into())
}
}
/// Check if the manager fee info is a valid token program account
/// capable of receiving tokens from the mint.
pub(crate) fn check_manager_fee_info(
&self,
manager_fee_info: &AccountInfo,
) -> Result<(), ProgramError> {
let token_account = Account::unpack(&manager_fee_info.data.borrow())?;
if manager_fee_info.owner != &self.token_program_id
|| token_account.state != AccountState::Initialized
|| token_account.mint != self.pool_mint
{
msg!("Manager fee account is not owned by token program, is not initialized, or does not match stake pool's mint");
return Err(StakePoolError::InvalidFeeAccount.into());
}
Ok(())
}
/// Checks that the withdraw authority is valid
#[inline]
pub(crate) fn check_authority_withdraw(
&self,
withdraw_authority: &Pubkey,
program_id: &Pubkey,
stake_pool_address: &Pubkey,
) -> Result<(), ProgramError> {
Self::check_program_derived_authority(
withdraw_authority,
program_id,
stake_pool_address,
crate::AUTHORITY_WITHDRAW,
self.stake_withdraw_bump_seed,
)
}
/// Checks that the deposit authority is valid
#[inline]
pub(crate) fn check_stake_deposit_authority(
&self,
stake_deposit_authority: &Pubkey,
) -> Result<(), ProgramError> {
if self.stake_deposit_authority == *stake_deposit_authority {
Ok(())
} else {
Err(StakePoolError::InvalidStakeDepositAuthority.into())
}
}
/// Checks that the deposit authority is valid
/// Does nothing if `sol_deposit_authority` is currently not set
#[inline]
pub(crate) fn check_sol_deposit_authority(
&self,
maybe_sol_deposit_authority: Result<&AccountInfo, ProgramError>,
) -> Result<(), ProgramError> {
if let Some(auth) = self.sol_deposit_authority {
let sol_deposit_authority = maybe_sol_deposit_authority?;
if auth != *sol_deposit_authority.key {
msg!("Expected {}, received {}", auth, sol_deposit_authority.key);
return Err(StakePoolError::InvalidSolDepositAuthority.into());
}
if !sol_deposit_authority.is_signer {
msg!("SOL Deposit authority signature missing");
return Err(StakePoolError::SignatureMissing.into());
}
}
Ok(())
}
/// Checks that the sol withdraw authority is valid
/// Does nothing if `sol_withdraw_authority` is currently not set
#[inline]
pub(crate) fn check_sol_withdraw_authority(
&self,
maybe_sol_withdraw_authority: Result<&AccountInfo, ProgramError>,
) -> Result<(), ProgramError> {
if let Some(auth) = self.sol_withdraw_authority {
let sol_withdraw_authority = maybe_sol_withdraw_authority?;
if auth != *sol_withdraw_authority.key {
return Err(StakePoolError::InvalidSolWithdrawAuthority.into());
}
if !sol_withdraw_authority.is_signer {
msg!("SOL withdraw authority signature missing");
return Err(StakePoolError::SignatureMissing.into());
}
}
Ok(())
}
/// Check mint is correct
#[inline]
pub(crate) fn check_mint(&self, mint_info: &AccountInfo) -> Result<(), ProgramError> {
if *mint_info.key != self.pool_mint {
Err(StakePoolError::WrongPoolMint.into())
} else {
Ok(())
}
}
/// Check manager validity and signature
pub(crate) fn check_manager(&self, manager_info: &AccountInfo) -> Result<(), ProgramError> {
if *manager_info.key != self.manager {
msg!(
"Incorrect manager provided, expected {}, received {}",
self.manager,
manager_info.key
);
return Err(StakePoolError::WrongManager.into());
}
if !manager_info.is_signer {
msg!("Manager signature missing");
return Err(StakePoolError::SignatureMissing.into());
}
Ok(())
}
/// Check staker validity and signature
pub(crate) fn check_staker(&self, staker_info: &AccountInfo) -> Result<(), ProgramError> {
if *staker_info.key != self.staker {
msg!(
"Incorrect staker provided, expected {}, received {}",
self.staker,
staker_info.key
);
return Err(StakePoolError::WrongStaker.into());
}
if !staker_info.is_signer {
msg!("Staker signature missing");
return Err(StakePoolError::SignatureMissing.into());
}
Ok(())
}
/// Check the validator list is valid
pub fn check_validator_list(
&self,
validator_list_info: &AccountInfo,
) -> Result<(), ProgramError> {
if *validator_list_info.key != self.validator_list {
msg!(
"Invalid validator list provided, expected {}, received {}",
self.validator_list,
validator_list_info.key
);
Err(StakePoolError::InvalidValidatorStakeList.into())
} else {
Ok(())
}
}
/// Check the reserve stake is valid
pub fn check_reserve_stake(
&self,
reserve_stake_info: &AccountInfo,
) -> Result<(), ProgramError> {
if *reserve_stake_info.key != self.reserve_stake {
msg!(
"Invalid reserve stake provided, expected {}, received {}",
self.reserve_stake,
reserve_stake_info.key
);
Err(StakePoolError::InvalidProgramAddress.into())
} else {
Ok(())
}
}
/// Check if StakePool is actually initialized as a stake pool
pub fn is_valid(&self) -> bool {
self.account_type == AccountType::StakePool
}
/// Check if StakePool is currently uninitialized
pub fn is_uninitialized(&self) -> bool {
self.account_type == AccountType::Uninitialized
}
/// Updates one of the StakePool's fees.
pub fn update_fee(&mut self, fee: &FeeType) -> Result<(), StakePoolError> {
match fee {
FeeType::SolReferral(new_fee) => self.sol_referral_fee = *new_fee,
FeeType::StakeReferral(new_fee) => self.stake_referral_fee = *new_fee,
FeeType::Epoch(new_fee) => self.next_epoch_fee = Some(*new_fee),
FeeType::StakeWithdrawal(new_fee) => {
new_fee.check_withdrawal(&self.stake_withdrawal_fee)?;
self.next_stake_withdrawal_fee = Some(*new_fee)
}
FeeType::SolWithdrawal(new_fee) => {
new_fee.check_withdrawal(&self.sol_withdrawal_fee)?;
self.next_sol_withdrawal_fee = Some(*new_fee)
}
FeeType::SolDeposit(new_fee) => self.sol_deposit_fee = *new_fee,
FeeType::StakeDeposit(new_fee) => self.stake_deposit_fee = *new_fee,
};
Ok(())
}
}
/// Storage list for all validator stake accounts in the pool.
#[repr(C)]
#[derive(Clone, Debug, Default, PartialEq, BorshDeserialize, BorshSerialize, BorshSchema)]
pub struct ValidatorList {
/// Data outside of the validator list, separated out for cheaper deserializations
pub header: ValidatorListHeader,
/// List of stake info for each validator in the pool
pub validators: Vec<ValidatorStakeInfo>,
}
/// Helper type to deserialize just the start of a ValidatorList
#[repr(C)]
#[derive(Clone, Debug, Default, PartialEq, BorshDeserialize, BorshSerialize, BorshSchema)]
pub struct ValidatorListHeader {
/// Account type, must be ValidatorList currently
pub account_type: AccountType,
/// Maximum allowable number of validators
pub max_validators: u32,
}
/// Status of the stake account in the validator list, for accounting
#[derive(
FromPrimitive, Copy, Clone, Debug, PartialEq, BorshDeserialize, BorshSerialize, BorshSchema,
)]
pub enum StakeStatus {
/// Stake account is active, there may be a transient stake as well
Active,
/// Only transient stake account exists, when a transient stake is
/// deactivating during validator removal
DeactivatingTransient,
/// No more validator stake accounts exist, entry ready for removal during
/// `UpdateStakePoolBalance`
ReadyForRemoval,
}
impl Default for StakeStatus {
fn default() -> Self {
Self::Active
}
}
/// Information about a validator in the pool
///
/// NOTE: ORDER IS VERY IMPORTANT HERE, PLEASE DO NOT RE-ORDER THE FIELDS UNLESS
/// THERE'S AN EXTREMELY GOOD REASON.
///
/// To save on BPF instructions, the serialized bytes are reinterpreted with an
/// unsafe pointer cast, which means that this structure cannot have any
/// undeclared alignment-padding in its representation.
#[repr(C)]
#[derive(Clone, Copy, Debug, Default, PartialEq, BorshDeserialize, BorshSerialize, BorshSchema)]
pub struct ValidatorStakeInfo {
/// Amount of active stake delegated to this validator, minus the minimum
/// required stake amount of rent-exemption + `crate::MINIMUM_ACTIVE_STAKE`
/// (currently 0.001 SOL).
///
/// Note that if `last_update_epoch` does not match the current epoch then
/// this field may not be accurate
pub active_stake_lamports: u64,
/// Amount of transient stake delegated to this validator
///
/// Note that if `last_update_epoch` does not match the current epoch then
/// this field may not be accurate
pub transient_stake_lamports: u64,
/// Last epoch the active and transient stake lamports fields were updated
pub last_update_epoch: u64,
/// Start of the validator transient account seed suffixess
pub transient_seed_suffix_start: u64,
/// End of the validator transient account seed suffixes
pub transient_seed_suffix_end: u64,
/// Status of the validator stake account
pub status: StakeStatus,
/// Validator vote account address
pub vote_account_address: Pubkey,
}
impl ValidatorStakeInfo {
/// Get the total lamports delegated to this validator (active and transient)
pub fn stake_lamports(&self) -> u64 {
self.active_stake_lamports
.checked_add(self.transient_stake_lamports)
.unwrap()
}
/// Performs a very cheap comparison, for checking if this validator stake
/// info matches the vote account address
pub fn memcmp_pubkey(data: &[u8], vote_address_bytes: &[u8]) -> bool {
sol_memcmp(
&data[41..41 + PUBKEY_BYTES],
vote_address_bytes,
PUBKEY_BYTES,
) == 0
}
/// Performs a very cheap comparison, for checking if this validator stake
/// info does not have active lamports equal to the given bytes
pub fn active_lamports_not_equal(data: &[u8], lamports_le_bytes: &[u8]) -> bool {
sol_memcmp(&data[0..8], lamports_le_bytes, 8) != 0
}
/// Performs a very cheap comparison, for checking if this validator stake
/// info does not have lamports equal to the given bytes
|
/// Check that the validator stake info is valid
pub fn is_not_removed(data: &[u8]) -> bool {
FromPrimitive::from_u8(data[40]) != Some(StakeStatus::ReadyForRemoval)
}
}
impl Sealed for ValidatorStakeInfo {}
impl Pack for ValidatorStakeInfo {
const LEN: usize = 73;
fn pack_into_slice(&self, data: &mut [u8]) {
let mut data = data;
self.serialize(&mut data).unwrap();
}
fn unpack_from_slice(src: &[u8]) -> Result<Self, ProgramError> {
let unpacked = Self::try_from_slice(src)?;
Ok(unpacked)
}
}
impl ValidatorList {
/// Create an empty instance containing space for `max_validators` and preferred validator keys
pub fn new(max_validators: u32) -> Self {
Self {
header: ValidatorListHeader {
account_type: AccountType::ValidatorList,
max_validators,
},
validators: vec![ValidatorStakeInfo::default(); max_validators as usize],
}
}
/// Calculate the number of validator entries that fit in the provided length
pub fn calculate_max_validators(buffer_length: usize) -> usize {
let header_size = ValidatorListHeader::LEN + 4;
buffer_length.saturating_sub(header_size) / ValidatorStakeInfo::LEN
}
/// Check if contains validator with particular pubkey
pub fn contains(&self, vote_account_address: &Pubkey) -> bool {
self.validators
.iter()
.any(|x| x.vote_account_address == *vote_account_address)
}
/// Check if contains validator with particular pubkey
pub fn find_mut(&mut self, vote_account_address: &Pubkey) -> Option<&mut ValidatorStakeInfo> {
self.validators
.iter_mut()
.find(|x| x.vote_account_address == *vote_account_address)
}
/// Check if contains validator with particular pubkey
pub fn find(&self, vote_account_address: &Pubkey) -> Option<&ValidatorStakeInfo> {
self.validators
.iter()
.find(|x| x.vote_account_address == *vote_account_address)
}
/// Check if the list has any active stake
pub fn has_active_stake(&self) -> bool {
self.validators.iter().any(|x| x.active_stake_lamports > 0)
}
}
impl ValidatorListHeader {
const LEN: usize = 1 + 4;
/// Check if validator stake list is actually initialized as a validator stake list
pub fn is_valid(&self) -> bool {
self.account_type == AccountType::ValidatorList
}
/// Check if the validator stake list is uninitialized
pub fn is_uninitialized(&self) -> bool {
self.account_type == AccountType::Uninitialized
}
/// Extracts a slice of ValidatorStakeInfo types from the vec part
/// of the ValidatorList
pub fn deserialize_mut_slice(
data: &mut [u8],
skip: usize,
len: usize,
) -> Result<(Self, Vec<&mut ValidatorStakeInfo>), ProgramError> {
let (header, mut big_vec) = Self::deserialize_vec(data)?;
let validator_list = big_vec.deserialize_mut_slice::<ValidatorStakeInfo>(skip, len)?;
Ok((header, validator_list))
}
/// Extracts the validator list into its header and internal BigVec
pub fn deserialize_vec(data: &mut [u8]) -> Result<(Self, BigVec), ProgramError> {
let mut data_mut = &data[..];
let header = ValidatorListHeader::deserialize(&mut data_mut)?;
let length = get_instance_packed_len(&header)?;
let big_vec = BigVec {
data: &mut data[length..],
};
Ok((header, big_vec))
}
}
/// Fee rate as a ratio, minted on `UpdateStakePoolBalance` as a proportion of
/// the rewards
/// If either the numerator or the denominator is 0, the fee is considered to be 0
#[repr(C)]
#[derive(Clone, Copy, Debug, Default, PartialEq, BorshSerialize, BorshDeserialize, BorshSchema)]
pub struct Fee {
/// denominator of the fee ratio
pub denominator: u64,
/// numerator of the fee ratio
pub numerator: u64,
}
impl Fee {
/// Applies the Fee's rates to a given amount, `amt`
/// returning the amount to be subtracted from it as fees
/// (0 if denominator is 0 or amt is 0),
/// or None if overflow occurs
#[inline]
pub fn apply(&self, amt: u64) -> Option<u128> {
if self.denominator == 0 {
return Some(0);
}
(amt as u128)
.checked_mul(self.numerator as u128)?
.checked_div(self.denominator as u128)
}
/// Withdrawal fees have some additional restrictions,
/// this fn checks if those are met, returning an error if not.
/// Does nothing and returns Ok if fee type is not withdrawal
pub fn check_withdrawal(&self, old_withdrawal_fee: &Fee) -> Result<(), StakePoolError> {
// If the previous withdrawal fee was 0, we allow the fee to be set to a
// maximum of (WITHDRAWAL_BASELINE_FEE * MAX_WITHDRAWAL_FEE_INCREASE)
let (old_num, old_denom) =
if old_withdrawal_fee.denominator == 0 || old_withdrawal_fee.numerator == 0 {
(
WITHDRAWAL_BASELINE_FEE.numerator,
WITHDRAWAL_BASELINE_FEE.denominator,
)
} else {
(old_withdrawal_fee.numerator, old_withdrawal_fee.denominator)
};
// Check that new_fee / old_fee <= MAX_WITHDRAWAL_FEE_INCREASE
// Program fails if provided numerator or denominator is too large, resulting in overflow
if (old_num as u128)
.checked_mul(self.denominator as u128)
.map(|x| x.checked_mul(MAX_WITHDRAWAL_FEE_INCREASE.numerator as u128))
.ok_or(StakePoolError::CalculationFailure)?
< (self.numerator as u128)
.checked_mul(old_denom as u128)
.map(|x| x.checked_mul(MAX_WITHDRAWAL_FEE_INCREASE.denominator as u128))
.ok_or(StakePoolError::CalculationFailure)?
{
msg!(
"Fee increase exceeds maximum allowed, proposed increase factor ({} / {})",
self.numerator * old_denom,
old_num * self.denominator,
);
return Err(StakePoolError::FeeIncreaseTooHigh);
}
Ok(())
}
}
impl fmt::Display for Fee {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.numerator > 0 && self.denominator > 0 {
write!(f, "{}/{}", self.numerator, self.denominator)
} else {
write!(f, "none")
}
}
}
/// The type of fees that can be set on the stake pool
#[derive(Clone, Debug, PartialEq, BorshDeserialize, BorshSerialize, BorshSchema)]
pub enum FeeType {
/// Referral fees for SOL deposits
SolReferral(u8),
/// Referral fees for stake deposits
StakeReferral(u8),
/// Management fee paid per epoch
Epoch(Fee),
/// Stake withdrawal fee
StakeWithdrawal(Fee),
/// Deposit fee for SOL deposits
SolDeposit(Fee),
/// Deposit fee for stake deposits
StakeDeposit(Fee),
/// SOL withdrawal fee
SolWithdrawal(Fee),
}
impl FeeType {
/// Checks if the provided fee is too high, returning an error if so
pub fn check_too_high(&self) -> Result<(), StakePoolError> {
let too_high = match self {
Self::SolReferral(pct) => *pct > 100u8,
Self::StakeReferral(pct) => *pct > 100u8,
Self::Epoch(fee) => fee.numerator > fee.denominator,
Self::StakeWithdrawal(fee) => fee.numerator > fee.denominator,
Self::SolWithdrawal(fee) => fee.numerator > fee.denominator,
Self::SolDeposit(fee) => fee.numerator > fee.denominator,
Self::StakeDeposit(fee) => fee.numerator > fee.denominator,
};
if too_high {
msg!("Fee greater than 100%: {:?}", self);
return Err(StakePoolError::FeeTooHigh);
}
Ok(())
}
/// Returns if the contained fee can only be updated earliest on the next epoch
#[inline]
pub fn can_only_change_next_epoch(&self) -> bool {
matches!(
self,
Self::StakeWithdrawal(_) | Self::SolWithdrawal(_) | Self::Epoch(_)
)
}
}
#[cfg(test)]
mod test {
use {
super::*,
proptest::prelude::*,
solana_program::borsh::{
get_instance_packed_len, get_packed_len, try_from_slice_unchecked,
},
solana_program::{
clock::{DEFAULT_SLOTS_PER_EPOCH, DEFAULT_S_PER_SLOT, SECONDS_PER_DAY},
native_token::LAMPORTS_PER_SOL,
},
};
fn uninitialized_validator_list() -> ValidatorList {
ValidatorList {
header: ValidatorListHeader {
account_type: AccountType::Uninitialized,
max_validators: 0,
},
validators: vec![],
}
}
fn test_validator_list(max_validators: u32) -> ValidatorList {
ValidatorList {
header: ValidatorListHeader {
account_type: AccountType::ValidatorList,
max_validators,
},
validators: vec![
ValidatorStakeInfo {
status: StakeStatus::Active,
vote_account_address: Pubkey::new_from_array([1; 32]),
active_stake_lamports: u64::from_le_bytes([255; 8]),
transient_stake_lamports: u64::from_le_bytes([128; 8]),
last_update_epoch: u64::from_le_bytes([64; 8]),
transient_seed_suffix_start: 0,
transient_seed_suffix_end: 0,
},
ValidatorStakeInfo {
status: StakeStatus::DeactivatingTransient,
vote_account_address: Pubkey::new_from_array([2; 32]),
active_stake_lamports: 998877665544,
transient_stake_lamports: 222222222,
last_update_epoch: 11223445566,
transient_seed_suffix_start: 0,
transient_seed_suffix_end: 0,
},
ValidatorStakeInfo {
status: StakeStatus::ReadyForRemoval,
vote_account_address: Pubkey::new_from_array([3; 32]),
active_stake_lamports: 0,
transient_stake_lamports: 0,
last_update_epoch: 999999999999999,
transient_seed_suffix_start: 0,
transient_seed_suffix_end: 0,
},
],
}
}
#[test]
fn state_packing() {
let max_validators = 10_000;
let size = get_instance_packed_len(&ValidatorList::new(max_validators)).unwrap();
let stake_list = uninitialized_validator_list();
let mut byte_vec = vec![0u8; size];
let mut bytes = byte_vec.as_mut_slice();
stake_list.serialize(&mut bytes).unwrap();
let stake_list_unpacked = try_from_slice_unchecked::<ValidatorList>(&byte_vec).unwrap();
assert_eq!(stake_list_unpacked, stake_list);
// Empty, one preferred key
let stake_list = ValidatorList {
header: ValidatorListHeader {
account_type: AccountType::ValidatorList,
max_validators: 0,
},
validators: vec![],
};
let mut byte_vec = vec![0u8; size];
let mut bytes = byte_vec.as_mut_slice();
stake_list.serialize(&mut bytes).unwrap();
let stake_list_unpacked = try_from_slice_unchecked::<ValidatorList>(&byte_vec).unwrap();
assert_eq!(stake_list_unpacked, stake_list);
// With several accounts
let stake_list = test_validator_list(max_validators);
let mut byte_vec = vec![0u8; size];
let mut bytes = byte_vec.as_mut_slice();
stake_list.serialize(&mut bytes).unwrap();
let stake_list_unpacked = try_from_slice_unchecked::<ValidatorList>(&byte_vec).unwrap();
assert_eq!(stake_list_unpacked, stake_list);
}
#[test]
fn validator_list_active_stake() {
let max_validators = 10_000;
let mut validator_list = test_validator_list(max_validators);
assert!(validator_list.has_active_stake());
for validator in validator_list.validators.iter_mut() {
validator.active_stake_lamports = 0;
}
assert!(!validator_list.has_active_stake());
}
#[test]
fn validator_list_deserialize_mut_slice() {
let max_validators = 10;
let stake_list = test_validator_list(max_validators);
let mut serialized = stake_list.try_to_vec().unwrap();
let (header, list) = ValidatorListHeader::deserialize_mut_slice(
&mut serialized,
0,
stake_list.validators.len(),
)
.unwrap();
assert_eq!(header.account_type, AccountType::ValidatorList);
assert_eq!(header.max_validators, max_validators);
assert!(list
.iter()
.zip(stake_list.validators.iter())
.all(|(a, b)| *a == b));
let (_, list) = ValidatorListHeader::deserialize_mut_slice(&mut serialized, 1, 2).unwrap();
assert!(list
.iter()
.zip(stake_list.validators[1..].iter())
.all(|(a, b)| *a == b));
let (_, list) = ValidatorListHeader::deserialize_mut_slice(&mut serialized, 2, 1).unwrap();
assert!(list
.iter()
.zip(stake_list.validators[2..].iter())
.all(|(a, b)| *a == b));
let (_, list) = ValidatorListHeader::deserialize_mut_slice(&mut serialized, 0, 2).unwrap();
assert!(list
.iter()
.zip(stake_list.validators[..2].iter())
.all(|(a, b)| *a == b));
assert_eq!(
ValidatorListHeader::deserialize_mut_slice(&mut serialized, 0, 4).unwrap_err(),
ProgramError::AccountDataTooSmall
);
assert_eq!(
ValidatorListHeader::deserialize_mut_slice(&mut serialized, 1, 3).unwrap_err(),
ProgramError::AccountDataTooSmall
);
}
#[test]
fn validator_list_iter() {
let max_validators = 10;
let stake_list = test_validator_list(max_validators);
let mut serialized = stake_list.try_to_vec().unwrap();
let (_, big_vec) = ValidatorListHeader::deserialize_vec(&mut serialized).unwrap();
for (a, b) in big_vec
.iter::<ValidatorStakeInfo>()
.zip(stake_list.validators.iter())
{
assert_eq!(a, b);
}
}
proptest! {
#[test]
fn stake_list_size_calculation(test_amount in 0..=100_000_u32) {
let validators = ValidatorList::new(test_amount);
let size = get_instance_packed_len(&validators).unwrap();
assert_eq!(ValidatorList::calculate_max_validators(size), test_amount as usize);
assert_eq!(ValidatorList::calculate_max_validators(size.saturating_add(1)), test_amount as usize);
assert_eq!(ValidatorList::calculate_max_validators(size.saturating_add(get_packed_len::<ValidatorStakeInfo>())), (test_amount + 1)as usize);
assert_eq!(ValidatorList::calculate_max_validators(size.saturating_sub(1)), (test_amount.saturating_sub(1)) as usize);
}
}
prop_compose! {
fn fee()(denominator in 1..=u16::MAX)(
denominator in Just(denominator),
numerator in 0..=denominator,
) -> (u64, u64) {
(numerator as u64, denominator as u64)
}
}
prop_compose! {
fn total_stake_and_rewards()(total_lamports in 1..u64::MAX)(
total_lamports in Just(total_lamports),
rewards in 0..=total_lamports,
) -> (u64, u64) {
(total_lamports - rewards, rewards)
}
}
#[test]
fn specific_fee_calculation() {
// 10% of 10 SOL in rewards should be 1 SOL in fees
let epoch_fee = Fee {
numerator: 1,
denominator: 10,
};
let mut stake_pool = StakePool {
total_lamports: 100 * LAMPORTS_PER_SOL,
pool_token_supply: 100 * LAMPORTS_PER_SOL,
epoch_fee,
..StakePool::default()
};
let reward_lamports = 10 * LAMPORTS_PER_SOL;
let pool_token_fee = stake_pool.calc_epoch_fee_amount(reward_lamports).unwrap();
stake_pool.total_lamports += reward_lamports;
stake_pool.pool_token_supply += pool_token_fee;
let fee_lamports = stake_pool
.calc_lamports_withdraw_amount(pool_token_fee)
.unwrap();
assert_eq!(fee_lamports, LAMPORTS_PER_SOL);
}
#[test]
fn zero_withdraw_calculation() {
let epoch_fee = Fee {
numerator: 0,
denominator: 1,
};
let stake_pool = StakePool {
epoch_fee,
..StakePool::default()
};
let fee_lamports = stake_pool.calc_lamports_withdraw_amount(0).unwrap();
assert_eq!(fee_lamports, 0);
}
#[test]
fn divide_by_zero_fee() {
let stake_pool = StakePool {
total_lamports: 0,
epoch_fee: Fee {
numerator: 1,
denominator: 10,
},
..StakePool::default()
};
let rewards = 10;
let fee = stake_pool.calc_epoch_fee_amount(rewards).unwrap();
assert_eq!(fee, rewards);
}
#[test]
fn approximate_apr_calculation() {
// 8% / year means roughly .044% / epoch
let stake_pool = StakePool {
last_epoch_total_lamports: 100_000,
last_epoch_pool_token_supply: 100_000,
total_lamports: 100_044,
pool_token_supply: 100_000,
..StakePool::default()
};
let pool_token_value =
stake_pool.total_lamports as f64 / stake_pool.pool_token_supply as f64;
let last_epoch_pool_token_value = stake_pool.last_epoch_total_lamports as f64
/ stake_pool.last_epoch_pool_token_supply as f64;
let epoch_rate = pool_token_value / last_epoch_pool_token_value - 1.0;
const SECONDS_PER_EPOCH: f64 = DEFAULT_SLOTS_PER_EPOCH as f64 * DEFAULT_S_PER_SLOT;
const EPOCHS_PER_YEAR: f64 = SECONDS_PER_DAY as f64 * 365.25 / SECONDS_PER_EPOCH;
const EPSILON: f64 = 0.00001;
let yearly_rate = epoch_rate * EPOCHS_PER_YEAR;
assert!((yearly_rate - 0.080355).abs() < EPSILON);
}
proptest! {
#[test]
fn fee_calculation(
(numerator, denominator) in fee(),
(total_lamports, reward_lamports) in total_stake_and_rewards(),
) {
let epoch_fee = Fee { denominator, numerator };
let mut stake_pool = StakePool {
total_lamports,
pool_token_supply: total_lamports,
epoch_fee,
..StakePool::default()
};
let pool_token_fee = stake_pool.calc_epoch_fee_amount(reward_lamports).unwrap();
stake_pool.total_lamports += reward_lamports;
stake_pool.pool_token_supply += pool_token_fee;
let fee_lamports = stake_pool.calc_lamports_withdraw_amount(pool_token_fee).unwrap();
let max_fee_lamports = u64::try_from((reward_lamports as u128) * (epoch_fee.numerator as u128) / (epoch_fee.denominator as u128)).unwrap();
assert!(max_fee_lamports >= fee_lamports,
"Max possible fee must always be greater than or equal to what is actually withdrawn, max {} actual {}",
max_fee_lamports,
fee_lamports);
// since we do two "flooring" conversions, the max epsilon should be
// correct up to 2 lamports (one for each floor division), plus a
// correction for huge discrepancies between rewards and total stake
let epsilon = 2 + reward_lamports / total_lamports;
assert!(max_fee_lamports - fee_lamports <= epsilon,
"Max expected fee in lamports {}, actually receive {}, epsilon {}",
max_fee_lamports, fee_lamports, epsilon);
}
}
prop_compose! {
fn total_tokens_and_deposit()(total_lamports in 1..u64::MAX)(
total_lamports in Just(total_lamports),
pool_token_supply in 1..=total_lamports,
deposit_lamports in 1..total_lamports,
) -> (u64, u64, u64) {
(total_lamports - deposit_lamports, pool_token_supply.saturating_sub(deposit_lamports).max(1), deposit_lamports)
}
}
proptest! {
#[test]
fn deposit_and_withdraw(
(total_lamports, pool_token_supply, deposit_stake) in total_tokens_and_deposit()
) {
let mut stake_pool = StakePool {
total_lamports,
pool_token_supply,
..StakePool::default()
};
let deposit_result = stake_pool.calc_pool_tokens_for_deposit(deposit_stake).unwrap();
prop_assume!(deposit_result > 0);
stake_pool.total_lamports += deposit_stake;
stake_pool.pool_token_supply += deposit_result;
let withdraw_result = stake_pool.calc_lamports_withdraw_amount(deposit_result).unwrap();
assert!(withdraw_result <= deposit_stake);
}
}
}
|
pub fn transient_lamports_not_equal(data: &[u8], lamports_le_bytes: &[u8]) -> bool {
sol_memcmp(&data[8..16], lamports_le_bytes, 8) != 0
}
|
sign.rs
|
/*
MIT License
Copyright (c) 2020 Philipp Schuster
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
//! Parsing code for the actual number/numeric value.
use derive_more::Display;
#[derive(Debug, PartialEq, Copy, Clone, Display)]
pub enum Sign {
#[display(fmt = "")]
Positive,
#[display(fmt = "-")]
Negative,
}
impl Sign {
/// Parses the [`super::ns::NumeralSystem`] from the normalized and validated slice of the input
/// that corresponds to this type.
|
} else {
Sign::Positive
}
}
/// Convenient function to check if the sign is positive.
pub fn is_pos(&self) -> bool {
match self {
Sign::Positive => true,
Sign::Negative => false,
}
}
/// Convenient function to check if the sign is positive.
#[allow(dead_code)]
pub fn is_neg(&self) -> bool {
!self.is_pos()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_numeral_system() {
// we test only normalized inputs here
assert_eq!(
Sign::Positive,
Sign::from_input(""),
"Must be Sign::Positive"
);
assert_eq!(
Sign::Negative,
Sign::from_input("-"),
"Must be Sign::Negative"
);
}
}
|
/// * `part_str` slice of normalized and validated user input that corresponds to this type
pub fn from_input(normalized_input: &str) -> Sign {
if normalized_input == "-" {
Sign::Negative
|
ingestor_config.go
|
package config
|
"fmt"
"github.com/timescale/outflux/internal/schemamanagement/schemaconfig"
)
// IngestorConfig holds all the properties required to create and run an ingestor
type IngestorConfig struct {
IngestorID string
BatchSize uint16
RollbackOnExternalError bool
CommitStrategy CommitStrategy
SchemaStrategy schemaconfig.SchemaStrategy
Schema string
ChunkTimeInterval string
}
// CommitStrategy describes how the ingestor should handle the ingested data
// Single Transaction or Commit on Each Batch
type CommitStrategy int
// Available values for the CommitStrategy enum
const (
CommitOnEnd CommitStrategy = iota + 1
CommitOnEachBatch
)
// ParseStrategyString returns the enum value matching the string, or an error
func ParseStrategyString(strategy string) (CommitStrategy, error) {
switch strategy {
case "CommitOnEnd":
return CommitOnEnd, nil
case "CommitOnEachBatch":
return CommitOnEachBatch, nil
default:
return CommitOnEnd, fmt.Errorf("unknown commit strategy '%s'", strategy)
}
}
func (s CommitStrategy) String() string {
switch s {
case CommitOnEnd:
return "CommitOnEnd"
case CommitOnEachBatch:
return "CommitOnEachBatch"
default:
panic("unknown type")
}
}
|
import (
|
customer.py
|
from sqlalchemy import String, ForeignKey, Integer, Enum
|
class Customer(AbstractModel):
__tablename__ = "customer"
user_id = RequiredColumn(String(50))
stripe_customer_id = RequiredColumn(String(50))
|
from .base import AbstractModel, RequiredColumn
|
RunJobByStatus.py
|
"""
_RunJobByStatus_
Monitoring DAO classes for Jobs in BossAir database.
It groups jobs in each sched_status and bossAir status and guarantee
all sched_status are always present in the output.
"""
from __future__ import print_function, division
from WMCore.Database.DBFormatter import DBFormatter
class RunJobByStatus(DBFormatter):
sql = """
SELECT bl_status.name AS sched_status, count(bl_runjob.sched_status) AS count, bl_runjob.status
FROM bl_status
LEFT OUTER JOIN bl_runjob ON bl_runjob.sched_status = bl_status.id
GROUP BY bl_status.name, bl_runjob.status
"""
def formatDict(self, results):
"""
_formatDict_
Creates a dictionary of active (status=1) and completed (status=0)
jobs in BossAir with their sched_status and the amount of jobs in that status
"""
formattedResults = DBFormatter.formatDict(self, results)
results = {'active': {}, 'completed': {}}
for res in formattedResults:
results['active'].setdefault(res['sched_status'], 0)
results['completed'].setdefault(res['sched_status'], 0)
if res['status'] is None:
pass # job count is always 0 for this case
elif int(res['status']) == 0:
results['completed'][res['sched_status']] += int(res['count'])
else: # status = 1
results['active'][res['sched_status']] += int(res['count'])
return results
def execute(self, conn=None, transaction=False):
|
result = self.dbi.processData(self.sql, conn=conn, transaction=transaction)
return self.formatDict(result)
|
|
basic.ts
|
import { StageComponent, ComponentTester } from 'aurelia-testing';
import { bootstrap } from 'aurelia-bootstrapper';
import { RegistrationForm } from './resources/registration-form';
import { validateTrigger, ValidateEvent } from '../src/aurelia-validation';
import { configure, blur, change } from './shared';
describe('end to end', () => {
it('basic scenarios', (done: () => void) => {
const component: ComponentTester = StageComponent
.withResources()
.inView('<registration-form></registration-form>')
.boundTo({});
component.bootstrap(configure);
let firstName: HTMLInputElement;
let lastName: HTMLInputElement;
let number1: HTMLInputElement;
let number2: HTMLInputElement;
let password: HTMLInputElement;
let confirmPassword: HTMLInputElement;
let viewModel: RegistrationForm;
const renderer = { render: jasmine.createSpy() };
component.create(bootstrap as any)
// grab some references.
.then(() => {
viewModel = component.viewModel;
viewModel.controller.addRenderer(renderer);
firstName = component.element.querySelector('#firstName') as HTMLInputElement;
lastName = component.element.querySelector('#lastName') as HTMLInputElement;
number1 = component.element.querySelector('#number1') as HTMLInputElement;
number2 = component.element.querySelector('#number2') as HTMLInputElement;
password = component.element.querySelector('#password') as HTMLInputElement;
confirmPassword = component.element.querySelector('#confirmPassword') as HTMLInputElement;
})
// initially there should not be any errors
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// blur the firstName field- this should trigger validation.
.then(() => blur(firstName))
// confirm there's an error.
.then(() => expect(viewModel.controller.errors.length).toBe(1))
// make a model change to the firstName field.
// this should reset the errors for the firstName field.
.then(() => viewModel.firstName = 'test')
// confirm the errors were reset.
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// blur the lastName field- this should trigger validation.
.then(() => blur(lastName))
// confirm there's an error.
.then(() => {
expect(viewModel.controller.errors.length).toBe(1);
const calls = renderer.render.calls;
const renderInstruction = calls.argsFor(calls.count() - 1)[0];
expect(renderInstruction.render[0].elements[0]).toBe(lastName);
})
// blur the number1 field- this should trigger validation.
.then(() => blur(number1))
// confirm there's an error.
.then(() => {
expect(viewModel.controller.errors.length).toBe(2);
const calls = renderer.render.calls;
const renderInstruction = calls.argsFor(calls.count() - 1)[0];
expect(renderInstruction.render[0].elements[0]).toBe(number1);
})
// blur the number2 field- this should trigger validation.
.then(() => blur(number2))
// confirm there's an error.
.then(() => {
expect(viewModel.controller.errors.length).toBe(3);
const calls = renderer.render.calls;
const renderInstruction = calls.argsFor(calls.count() - 1)[0];
expect(renderInstruction.render[0].elements[0]).toBe(number2);
})
// make a model change to the number1 field.
// this should reset the errors for the number1 field.
.then(() => viewModel.number1 = 1)
// confirm the error was reset.
.then(() => expect(viewModel.controller.errors.length).toBe(2))
// make a model change to the number2 field.
// this should reset the errors for the number2 field.
.then(() => viewModel.number2 = 2)
// confirm the error was reset.
.then(() => expect(viewModel.controller.errors.length).toBe(1))
// change the numbers back to invalid values.
.then(() => {
viewModel.number1 = 0;
viewModel.number2 = 0;
})
// hide the form and change the validateTrigger.
.then(() => {
viewModel.showForm = false;
viewModel.controller.validateTrigger = validateTrigger.change;
})
// show the form
.then(() => viewModel.showForm = true)
// confirm hiding and showing the form reset the errors.
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// change the firstName field- this should trigger validation.
.then(() => change(firstName, 'test'))
// confirm there's no error.
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// change the firstName field- this should trigger validation.
.then(() => change(firstName, ''))
// confirm there's an error.
.then(() => expect(viewModel.controller.errors.length).toBe(1))
// change the number1 field- this should trigger validation.
.then(() => change(number1, '-1'))
// confirm there's an error.
.then(() => expect(viewModel.controller.errors.length).toBe(2))
// change the number2 field- this should trigger validation.
.then(() => change(number2.firstElementChild as HTMLInputElement, '-1'))
// confirm there's an error.
.then(() => expect(viewModel.controller.errors.length).toBe(3))
// change the number1 field- this should trigger validation.
.then(() => change(number1, '32'))
// confirm the error was reset.
.then(() => expect(viewModel.controller.errors.length).toBe(2))
// change the number2 field- this should trigger validation.
.then(() => change(number2.firstElementChild as HTMLInputElement, '23'))
// confirm the error was reset.
.then(() => expect(viewModel.controller.errors.length).toBe(1))
|
.then(() => {
viewModel.number1 = 0;
viewModel.number2 = 0;
viewModel.password = 'a';
viewModel.confirmPassword = 'a';
viewModel.controller.reset();
})
// make the passwords mismatch.
.then(() => change(confirmPassword, 'b'))
// confirm the custom validator worked
.then(() => expect(viewModel.controller.errors[0].message).toBe('Confirm Password must match Password'))
// hide the form and change the validateTrigger.
.then(() => {
viewModel.showForm = false;
viewModel.controller.validateTrigger = validateTrigger.manual;
})
// show the form
.then(() => viewModel.showForm = true)
// confirm hiding and showing the form reset the errors.
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// validate all bindings
.then(() => viewModel.controller.validate())
// confirm validating resulted in errors.
.then(() => expect(viewModel.controller.errors.length).toBe(6))
// reset all bindings
.then(() => viewModel.controller.reset())
// confirm resetting cleared all errors.
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// hide the form and change the validateTrigger.
.then(() => {
viewModel.showForm = false;
viewModel.controller.validateTrigger = validateTrigger.changeOrBlur;
})
// show the form
.then(() => viewModel.showForm = true)
// confirm hiding and showing the form reset the errors.
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// blur the firstName field- this should trigger validation.
.then(() => blur(firstName))
// confirm there's an error.
.then(() => expect(viewModel.controller.errors.length).toBe(1))
// make a model change to the firstName field.
// this should reset the errors for the firstName field.
.then(() => viewModel.firstName = 'test')
// confirm the errors were reset.
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// change the lastName field- this should trigger validation.
.then(() => change(lastName, 'abcdef'))
.then(() => change(lastName, ''))
// confirm there's an error.
.then(() => expect(viewModel.controller.errors.length).toBe(1))
// make lastName valid again
.then(() => change(lastName, 'ghi'))
// confirm there's an error.
.then(() => expect(viewModel.controller.errors.length).toBe(0))
// add some errors
.then(() => {
const error1 = viewModel.controller.addError('object error', viewModel);
expect(error1.message).toBe('object error');
expect(error1.object).toBe(viewModel);
expect(error1.propertyName).toBe(null);
const error2 = viewModel.controller.addError('string property error', viewModel, 'lastName');
expect(error2.message).toBe('string property error');
expect(error2.object).toBe(viewModel);
expect(error2.propertyName).toBe('lastName');
const error3 = viewModel.controller.addError('expression property error', viewModel, vm => vm.firstName);
expect(error3.message).toBe('expression property error');
expect(error3.object).toBe(viewModel);
expect(error3.propertyName).toBe('firstName');
expect(viewModel.controller.errors.length).toBe(3);
viewModel.controller.removeError(error1);
expect(viewModel.controller.errors.length).toBe(2);
viewModel.controller.removeError(error2);
expect(viewModel.controller.errors.length).toBe(1);
viewModel.controller.removeError(error3);
expect(viewModel.controller.errors.length).toBe(0);
})
// subscribe to error events
.then(() => {
let event1: ValidateEvent;
let event2: ValidateEvent;
const spy1 = jasmine.createSpy().and.callFake((event: ValidateEvent) => event1 = event);
const spy2 = jasmine.createSpy().and.callFake((event: ValidateEvent) => event2 = event);
viewModel.controller.subscribe(spy1);
viewModel.controller.subscribe(spy2);
return change(lastName, '')
.then(() => {
expect(spy1).toHaveBeenCalled();
expect(spy2).toHaveBeenCalled();
expect(event1).toBe(event2);
expect(event1.errors.length).toBe(1);
spy1.calls.reset();
spy2.calls.reset();
event1 = null as any;
event2 = null as any;
})
.then(() => change(firstName, ''))
.then(() => {
expect(spy1).toHaveBeenCalled();
expect(spy2).toHaveBeenCalled();
expect(event1).toBe(event2);
expect(event1.errors.length).toBe(2);
});
})
// cleanup and finish.
.then(() => component.dispose())
.then(done);
});
});
|
// change the numbers back to invalid values.
|
hint.rs
|
#![stable(feature = "core_hint", since = "1.27.0")]
//! Hints to compiler that affects how code should be emitted or optimized.
//! Hints may be compile time or runtime.
use crate::intrinsics;
/// Informs the compiler that this point in the code is not reachable, enabling
/// further optimizations.
///
/// # Safety
///
/// Reaching this function is completely *undefined behavior* (UB). In
/// particular, the compiler assumes that all UB must never happen, and
/// therefore will eliminate all branches that reach to a call to
/// `unreachable_unchecked()`.
///
/// Like all instances of UB, if this assumption turns out to be wrong, i.e., the
/// `unreachable_unchecked()` call is actually reachable among all possible
/// control flow, the compiler will apply the wrong optimization strategy, and
/// may sometimes even corrupt seemingly unrelated code, causing
/// difficult-to-debug problems.
///
/// Use this function only when you can prove that the code will never call it.
/// Otherwise, consider using the [`unreachable!`] macro, which does not allow
/// optimizations but will panic when executed.
///
/// # Example
///
/// ```
/// fn div_1(a: u32, b: u32) -> u32 {
/// use std::hint::unreachable_unchecked;
///
/// // `b.saturating_add(1)` is always positive (not zero),
/// // hence `checked_div` will never return `None`.
/// // Therefore, the else branch is unreachable.
/// a.checked_div(b.saturating_add(1))
/// .unwrap_or_else(|| unsafe { unreachable_unchecked() })
/// }
///
/// assert_eq!(div_1(7, 0), 7);
/// assert_eq!(div_1(9, 1), 4);
/// assert_eq!(div_1(11, u32::MAX), 0);
/// ```
#[inline]
#[stable(feature = "unreachable", since = "1.27.0")]
#[rustc_const_unstable(feature = "const_unreachable_unchecked", issue = "53188")]
pub const unsafe fn unreachable_unchecked() -> !
|
/// Emits a machine instruction to signal the processor that it is running in
/// a busy-wait spin-loop ("spin lock").
///
/// Upon receiving the spin-loop signal the processor can optimize its behavior by,
/// for example, saving power or switching hyper-threads.
///
/// This function is different from [`thread::yield_now`] which directly
/// yields to the system's scheduler, whereas `spin_loop` does not interact
/// with the operating system.
///
/// A common use case for `spin_loop` is implementing bounded optimistic
/// spinning in a CAS loop in synchronization primitives. To avoid problems
/// like priority inversion, it is strongly recommended that the spin loop is
/// terminated after a finite amount of iterations and an appropriate blocking
/// syscall is made.
///
/// **Note**: On platforms that do not support receiving spin-loop hints this
/// function does not do anything at all.
///
/// # Examples
///
/// ```
/// use std::sync::atomic::{AtomicBool, Ordering};
/// use std::sync::Arc;
/// use std::{hint, thread};
///
/// // A shared atomic value that threads will use to coordinate
/// let live = Arc::new(AtomicBool::new(false));
///
/// // In a background thread we'll eventually set the value
/// let bg_work = {
/// let live = live.clone();
/// thread::spawn(move || {
/// // Do some work, then make the value live
/// do_some_work();
/// live.store(true, Ordering::Release);
/// })
/// };
///
/// // Back on our current thread, we wait for the value to be set
/// while !live.load(Ordering::Acquire) {
/// // The spin loop is a hint to the CPU that we're waiting, but probably
/// // not for very long
/// hint::spin_loop();
/// }
///
/// // The value is now set
/// # fn do_some_work() {}
/// do_some_work();
/// bg_work.join()?;
/// # Ok::<(), Box<dyn core::any::Any + Send + 'static>>(())
/// ```
///
/// [`thread::yield_now`]: ../../std/thread/fn.yield_now.html
#[inline]
#[stable(feature = "renamed_spin_loop", since = "1.49.0")]
pub fn spin_loop() {
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), target_feature = "sse2"))]
{
#[cfg(target_arch = "x86")]
{
// SAFETY: the `cfg` attr ensures that we only execute this on x86 targets.
unsafe { crate::arch::x86::_mm_pause() };
}
#[cfg(target_arch = "x86_64")]
{
// SAFETY: the `cfg` attr ensures that we only execute this on x86_64 targets.
unsafe { crate::arch::x86_64::_mm_pause() };
}
}
#[cfg(any(target_arch = "aarch64", all(target_arch = "arm", target_feature = "v6")))]
{
#[cfg(target_arch = "aarch64")]
{
// SAFETY: the `cfg` attr ensures that we only execute this on aarch64 targets.
unsafe { crate::arch::aarch64::__yield() };
}
#[cfg(target_arch = "arm")]
{
// SAFETY: the `cfg` attr ensures that we only execute this on arm targets
// with support for the v6 feature.
unsafe { crate::arch::arm::__yield() };
}
}
}
/// An identity function that *__hints__* to the compiler to be maximally pessimistic about what
/// `black_box` could do.
///
/// Unlike [`std::convert::identity`], a Rust compiler is encouraged to assume that `black_box` can
/// use `dummy` in any possible valid way that Rust code is allowed to without introducing undefined
/// behavior in the calling code. This property makes `black_box` useful for writing code in which
/// certain optimizations are not desired, such as benchmarks.
///
/// Note however, that `black_box` is only (and can only be) provided on a "best-effort" basis. The
/// extent to which it can block optimisations may vary depending upon the platform and code-gen
/// backend used. Programs cannot rely on `black_box` for *correctness* in any way.
///
/// [`std::convert::identity`]: crate::convert::identity
#[cfg_attr(not(miri), inline)]
#[cfg_attr(miri, inline(never))]
#[unstable(feature = "bench_black_box", issue = "64102")]
#[cfg_attr(miri, allow(unused_mut))]
pub fn black_box<T>(mut dummy: T) -> T {
// We need to "use" the argument in some way LLVM can't introspect, and on
// targets that support it we can typically leverage inline assembly to do
// this. LLVM's interpretation of inline assembly is that it's, well, a black
// box. This isn't the greatest implementation since it probably deoptimizes
// more than we want, but it's so far good enough.
#[cfg(not(miri))] // This is just a hint, so it is fine to skip in Miri.
// SAFETY: the inline assembly is a no-op.
unsafe {
// FIXME: Cannot use `asm!` because it doesn't support MIPS and other architectures.
llvm_asm!("" : : "r"(&mut dummy) : "memory" : "volatile");
}
dummy
}
|
{
// SAFETY: the safety contract for `intrinsics::unreachable` must
// be upheld by the caller.
unsafe { intrinsics::unreachable() }
}
|
i2c.rs
|
//
// Copyright (C) 2018 Kubos Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License")
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Contributed by: Timothy Scott ([email protected]) and Sebastian Hamel ([email protected])
//
//! Wraps the I2C functionality of NOSEngine.
//!
//! # Examples
//!
//! ```norun
//! # extern crate nosengine_rust;
//! # use nosengine_rust::client::i2c::*;
//! # use nosengine_rust::ffi::i2c::I2CDirection;
//! # use std::slice;
//! let master = I2CMaster::new(9u16, "tcp://localhost:12001", "i2c19").unwrap();
//!
//! extern "C" fn callback(dir: I2CDirection, buffer: *mut u8, len: usize) -> usize {
//! let data = unsafe{ slice::from_raw_parts_mut(buffer, len) };
//! match dir {
//! I2CDirection::Read => {
//! for i in 0..len {
//! data[i] = (i + 5) as u8;
//! }
//! len
//! },
//! I2CDirection::Write => {
//! assert_eq!(data, &[1u8, 2, 3, 4]);
//! len
//! }
//! }
//! }
//!
//! let slave = I2CSlave::new(8u16, "tcp://localhost:12001", "i2c19", callback).unwrap();
//!
//! assert_eq!(master.write(8u16, &[1u8, 2, 3, 4]), Ok(()));
//! assert_eq!(master.read(8u16, 4), Ok(vec![5u8, 6, 7, 8]));
//! ```
use super::ffi::i2c;
use failure::Fail;
use std::error::Error;
use std::ffi;
use std::ffi::CString;
/// This enum represents any type of error that can occur when interacting with I2C
#[derive(Fail, Debug, Clone, PartialEq)]
pub enum I2CError {
/// An error occurred when converting a Rust string to a C string.
/// Specifically, the Rust string contained a null character, which cannot be represented
/// in C strings.
#[fail(
display = "String Error. Null character at index {}: {}",
position, description
)]
StringError {
/// Description from the underlying std::ffi::NulError
description: String,
/// Index in the original string of the problematic null character
position: usize,
},
/// There was an error when creating the I2C.
#[fail(display = "I2C Creation Error")]
I2CCreationError,
/// This error is raised when an I2C device is created with an invalid address.
#[fail(
display = "Invalid Address: {}. Must be between 8 and 127, inclusive.",
address
)]
InvalidAddress {
/// The address which was attempted
address: u16,
},
/// Attempted to read or write to an address that doesn't exist.
#[fail(display = "Unknown Address: {} not found on this bus.", address)]
UnknownAddress {
/// The address which was not found
address: u16,
},
}
impl From<ffi::NulError> for I2CError {
fn from(err: ffi::NulError) -> Self {
I2CError::StringError {
description: String::from(err.description()),
position: err.nul_position(),
}
}
}
/// This struct represents a master on an I2C bus.
pub struct I2CMaster<'a> {
i2c_ptr: *mut i2c::I2CHandle,
/// NOSEngine connection string
pub connection: &'a str,
/// Name of this bus to which this master is connected
pub bus: &'a str,
/// Address of this master
pub address: u16,
}
impl<'a> I2CMaster<'a> {
/// Creates a new I2C master on the given bus. There can be only one: If you attempt to create
/// another master with the same address on the same bus, this function will return `None`.
///
/// # Arguments
///
/// * `address`: Address of this I2C master
/// * `connection`: NOSEngine connection string
/// * `bus`: Name of the bus on which to create the master
///
/// # Examples
///
/// ```norun
/// # extern crate nosengine_rust;
/// # use nosengine_rust::client::i2c::*;
/// let master = I2CMaster::new(9u16, "tcp://localhost:12001", "i2c20");
/// assert!(master.is_ok());
/// // 2 masters on a bus is OK as long as they have different addresses
/// let master = I2CMaster::new(10u16, "tcp://localhost:12001", "i2c20");
/// assert!(master.is_ok());
/// // This fails because the address 10u16 is already taken.
/// let master = I2CMaster::new(10u16, "tcp://localhost:12001", "i2c20");
/// assert!(master.is_err());
/// ```
pub fn new(address: u16, connection: &'a str, bus: &'a str) -> Result<I2CMaster<'a>, I2CError> {
if address < 8 || address > 127 {
return Err(I2CError::InvalidAddress { address });
}
let c_connection = CString::new(connection)?;
let c_bus = CString::new(bus)?;
let i2c_ptr = i2c::i2c_init_master(address, c_connection.as_ptr(), c_bus.as_ptr());
if i2c_ptr.is_null() {
Err(I2CError::I2CCreationError)
} else {
Ok(I2CMaster {
i2c_ptr,
connection,
bus,
address,
})
}
}
/// This function reads bytes from the given address.
///
/// # Arguments
///
/// * `num_bytes`: How many bytes to read from the device
///
/// # Examples
///
/// See [`nosengine-rust::client::i2c`](../i2c/index.html#examples)
pub fn read(&self, address: u16, num_bytes: usize) -> Result<Vec<u8>, I2CError> {
if address < 8 || address > 127 {
return Err(I2CError::InvalidAddress { address });
}
let mut rbuf: Vec<u8> = vec![0u8; num_bytes];
match i2c::i2c_read(self.i2c_ptr, address, rbuf.as_mut_ptr(), num_bytes) {
i2c::I2CStatus::Success => Ok(rbuf),
i2c::I2CStatus::Failure => Err(I2CError::UnknownAddress { address }),
}
}
/// This function writes bytes to the given address.
///
/// # Arguments
///
/// * `data`: Bytes to write to the device
///
/// # Examples
///
/// See [`nosengine-rust::client::i2c`](../i2c/index.html#examples)
pub fn write(&self, address: u16, data: &[u8]) -> Result<(), I2CError> {
if address < 8 || address > 127 {
return Err(I2CError::InvalidAddress { address });
}
match i2c::i2c_write(self.i2c_ptr, address, data.as_ptr(), data.len()) {
i2c::I2CStatus::Success => Ok(()),
i2c::I2CStatus::Failure => Err(I2CError::UnknownAddress { address }),
}
}
/// This function writes bytes to the given address followed by a read
/// # Arguments
///
/// * `tx_data`: Bytes to write to the device
/// * `rx_len`: Number of bytes expected to be read
pub fn transaction(
&self,
address: u16,
tx_data: &[u8],
rx_len: usize,
) -> Result<Vec<u8>, I2CError> {
let mut rbuf: Vec<u8> = vec![0u8; rx_len];
match i2c::i2c_transaction(
self.i2c_ptr,
address,
tx_data.as_ptr(),
tx_data.len(),
rbuf.as_mut_ptr(),
rx_len,
) {
i2c::I2CStatus::Success => Ok(rbuf),
i2c::I2CStatus::Failure => Err(I2CError::UnknownAddress { address }),
}
}
}
impl<'a> Drop for I2CMaster<'a> {
fn
|
(&mut self) {
i2c::i2c_close(&mut self.i2c_ptr as *mut *mut i2c::I2CHandle);
}
}
/// This struct represents an I2C Slave.
pub struct I2CSlave<'a> {
i2c_ptr: *mut i2c::I2CHandle,
/// The NOSEngine connection string
pub connection: &'a str,
/// Name of the bus to which this slave is connected
pub bus: &'a str,
/// Address of this slave
pub address: u16,
}
impl<'a> I2CSlave<'a> {
/// Constructs a new I2C slave. The given callback will run every time the master reads
/// from or writes to this slave. If a slave with the given address already exists on
/// this bus, this function returns `None`.
///
/// # Arguments
///
/// * `address`: Address for this slave. Must be unique on a bus
/// * `connection`: NOSEngine connection string
/// * `bus`: Name of the bus to connect to
/// * `callback`: Callback that runs every time the master reads from or writes to this device.
/// The callback is responsible for checking whether it is reading or writing, performing
/// the appropriate action, then returning the number of bytes read or written. The
/// arguments to the callback are:
/// * `I2CDirection`: Specifies whether this is a read or write
/// * `*mut u8`: The buffer which either contains the data being written to this device, or
/// to which this device should write data. It is guaranteed to have enough bytes of
/// valid memory based on the length argument
/// * `usize`: The number of bytes being read or written
pub fn new(
address: u16,
connection: &'a str,
bus: &'a str,
callback: extern "C" fn(i2c::I2CDirection, *mut u8, usize) -> usize,
) -> Result<I2CSlave<'a>, I2CError> {
let c_connection = CString::new(connection)?;
let c_bus = CString::new(bus)?;
let i2c_ptr = i2c::i2c_init_slave(address, c_connection.as_ptr(), c_bus.as_ptr(), callback);
if i2c_ptr.is_null() {
Err(I2CError::I2CCreationError)
} else {
Ok(I2CSlave {
i2c_ptr,
connection,
bus,
address,
})
}
}
}
impl<'a> Drop for I2CSlave<'a> {
fn drop(&mut self) {
i2c::i2c_close(&mut self.i2c_ptr as *mut *mut i2c::I2CHandle);
}
}
|
drop
|
StringVectorParameterUI.py
|
##########################################################################
#
# Copyright (c) 2010, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import maya.cmds
import IECore
import IECoreMaya
## \todo: this is incredibly similar to NumericVectorParameterUI. Is it possible to generalize
## a ParameterUI for all *VectorParameters?
class StringVectorParameterUI( IECoreMaya.ParameterUI ) :
def __init__( self, node, parameter, **kw ) :
topLevelUI = maya.cmds.columnLayout()
IECoreMaya.ParameterUI.__init__( self, node, parameter, topLevelUI, **kw )
self.__column = maya.cmds.columnLayout( parent=topLevelUI )
row = maya.cmds.rowLayout(
parent = topLevelUI,
numberOfColumns = 2,
columnAlign = ( 1, "right" ),
columnWidth2 = [ IECoreMaya.ParameterUI.textColumnWidthIndex, IECoreMaya.ParameterUI.singleWidgetWidthIndex ]
)
maya.cmds.text(
parent = row,
label = self.label(),
font = "smallPlainLabelFont",
align = "right",
annotation = self.description()
)
addButton = maya.cmds.button( parent=row, label='Add Item', command=self._createCallback( self.__addItem ) )
self.__fields = []
self.__attributeChangedCallbackId = IECoreMaya.CallbackId(
maya.OpenMaya.MNodeMessage.addAttributeChangedCallback( self.node(), self.__attributeChanged )
)
self.replace( self.node(), self.parameter )
def replace( self, node, parameter ) :
IECoreMaya.ParameterUI.replace( self, node, parameter )
# disabling copy/paste from the ParameterClipboardUI as the results will be misleading to users
parameter.userData().update( StringVectorParameterUI.__disableCopyPaste )
vector = maya.cmds.getAttr( self.plugName() ) or []
# delete un-needed fields
self.__fields = self.__fields[:len(vector)]
rows = maya.cmds.columnLayout( self.__column, q=True, childArray=True ) or []
rowsToKeep = rows[:len(vector)]
rowsToDelete = rows[len(vector):]
for row in rowsToDelete :
maya.cmds.deleteUI( row )
# create new fields
for i in range( len(rowsToKeep), len(vector) ) :
self.__createRow( self.label() + ": %d" % i )
self.__setUIFromPlug()
def _topLevelUIDeleted( self ) :
self.__attributeChangedCallbackId = None
def __createRow( self, label ) :
row = maya.cmds.rowLayout(
parent = self.__column,
numberOfColumns = 2,
columnAlign2 = ( "right", "left" ),
columnWidth2 = [ IECoreMaya.ParameterUI.textColumnWidthIndex, IECoreMaya.ParameterUI.singleWidgetWidthIndex ],
)
## \todo: there is a slight text misalignment if the window exists when __createRow is called
maya.cmds.text(
parent = row,
label = label,
font = "smallPlainLabelFont",
align = "right",
annotation = self.description(),
width = IECoreMaya.ParameterUI.textColumnWidthIndex,
)
self.__fields.append(
maya.cmds.textField(
parent = row,
changeCommand = self._createCallback( self.__setPlugFromUI ),
width = IECoreMaya.ParameterUI.singleWidgetWidthIndex,
)
)
i = len(self.__fields) - 1
self._addPopupMenu( parentUI=self.__fields[i], index=i )
def _popupMenuDefinition( self, **kw ) :
definition = IECore.MenuDefinition()
definition.append( "/Remove Item", { "command" : self._createCallback( IECore.curry( self.__removeItem, index=kw['index'] ) ) } )
return definition
def __addItem( self ) :
vector = maya.cmds.getAttr( self.plugName() ) or []
vector.append( "" )
self.__setPlug( vector )
self.replace( self.node(), self.parameter )
def __removeItem( self, index ) :
vector = maya.cmds.getAttr( self.plugName() ) or []
vector = vector[:index] + vector[index+1:]
self.__setPlug( vector )
self.replace( self.node(), self.parameter )
def __attributeChanged( self, changeType, plug, otherPlug, userData ) :
if not ( changeType & maya.OpenMaya.MNodeMessage.kAttributeSet ) :
return
try :
myPlug = self.plug()
except :
# this situation can occur when our parameter has been removed but the
# ui we represent is not quite yet dead
return
if not plug == myPlug :
return
self.replace( self.node(), self.parameter )
def __setUIFromPlug( self ) :
vector = maya.cmds.getAttr( self.plugName() ) or []
for i in range( 0, len(vector) ) :
maya.cmds.textField( self.__fields[i], e=True, text=vector[i] )
def __setPlugFromUI( self ) :
|
def __setPlug( self, value ) :
## \todo: do this in python if maya ever fixes the nonsense required to call setAttr on a stringArray
plugType = maya.cmds.getAttr( self.plugName(), type=True )
cmd = 'setAttr %s -type %s %d' % ( self.plugName(), plugType, len(value) )
for val in value :
cmd += ' "%s"' % val
maya.mel.eval( cmd )
__disableCopyPaste = IECore.CompoundObject( {
"UI" : IECore.CompoundObject( {
"copyPaste" : IECore.BoolData( False ),
} ),
} )
IECoreMaya.ParameterUI.registerUI( IECore.TypeId.StringVectorParameter, StringVectorParameterUI )
|
vector = []
for field in self.__fields :
vector.append( maya.cmds.textField( field, q=True, text=True ) )
self.__setPlug( vector )
|
wkt-console.js
|
/**
* @license
* Copyright (c) 2021, Oracle and/or its affiliates.
* Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
*/
'use strict';
define(['knockout'],
function (ko) {
function WktConsole() {
// notify views to show the console
this.show = ko.observable();
this.show.extend({ notify: 'always' });
this.lines = ko.observableArray();
this.addLine = (line, outputType) => {
let value = {text: line, type: outputType};
this.lines.push(value);
};
this.clear = () => {
this.lines.removeAll();
};
}
// Return a singleton instance
return new WktConsole();
|
}
);
|
|
isolation.ts
|
import 'mocha';
import xs, {Stream} from 'xstream';
import {createElement, PureComponent} from 'react';
import isolate from '@cycle/isolate';
import * as renderer from 'react-test-renderer';
import {h, ReactSource, makeCycleReactComponent} from '../src/index';
const assert = require('assert');
class Inspect extends PureComponent<any, any> {
public press() {
if (this.props.onPress) {
this.props.onPress(this.props.name);
}
}
public render() {
return null;
}
}
describe('Isolation', function() {
it('prevents parent from selecting inside the child', done => {
function child(sources: {react: ReactSource}) {
sources.react
.select('bar')
.events('press')
.addListener({
next: name => {
// This listener exists just to make sure the child's inspect
// has an onPress prop
},
});
const vdom$ = xs.of(
h('div', {sel: 'foo'}, [h(Inspect, {sel: 'bar', name: 'wrong'})]),
);
return {
react: vdom$,
};
}
function parent(sources: {react: ReactSource}) {
const childSinks = isolate(child, 'ISOLATION')(sources);
const vdom$ = childSinks.react.map(child =>
h('div', {sel: 'top-most'}, [
h(Inspect, {sel: 'bar', name: 'correct'}),
child,
]),
);
return {
react: vdom$,
};
}
let times = 0;
const RootComponent = makeCycleReactComponent(() => {
const source = new ReactSource();
const sink = parent({react: source}).react;
source
.select('bar')
.events('press')
.addListener({
next: name => {
assert.strictEqual(name, 'correct');
assert.strictEqual(times, 0);
times += 1;
},
});
return {source, sink};
});
const r = renderer.create(createElement(RootComponent));
const root = r.root;
setTimeout(() => {
const allInspects = root.findAllByType(Inspect, {deep: true});
const [correct, wrong] = allInspects;
assert.strictEqual(correct.props.name, 'correct');
assert.strictEqual(wrong.props.name, 'wrong');
wrong.instance.press();
setTimeout(() => {
correct.instance.press();
}, 100);
}, 100);
setTimeout(() => {
assert.strictEqual(times, 1);
done();
}, 300);
});
it('prevents component from selecting inside sibling', done => {
let times = 0;
function firstborn(sources: {react: ReactSource}) {
sources.react
.select('bar')
.events('press')
.addListener({
next: name => {
assert.strictEqual(name, 'correct');
assert.strictEqual(times, 0);
times += 1;
},
});
const vdom$ = xs.of(
h('div', {sel: 'foo'}, [h(Inspect, {sel: 'bar', name: 'correct'})]),
);
return {
react: vdom$,
};
}
function secondborn(sources: {react: ReactSource}) {
sources.react
.select('bar')
.events('press')
.addListener({
next: name => {
// This listener exists just to make sure the child's inspect
// has an onPress prop
},
});
const vdom$ = xs.of(
h('div', {sel: 'foo'}, [h(Inspect, {sel: 'bar', name: 'wrong'})]),
);
return {
react: vdom$,
};
}
function
|
(sources: {react: ReactSource}) {
type Sinks = {react: Stream<React.ReactElement<any>>};
const firstSinks: Sinks = isolate(firstborn, 'first')(sources);
const secondSinks: Sinks = isolate(secondborn, 'second')(sources);
const vdom$ = xs
.combine(firstSinks.react, secondSinks.react)
.map(([firstChild, secondChild]) =>
h('div', {sel: 'top-most'}, [firstChild, secondChild]),
);
return {
react: vdom$,
};
}
const RootComponent = makeCycleReactComponent(() => {
const source = new ReactSource();
const sink = parent({react: source}).react;
return {source, sink};
});
const r = renderer.create(createElement(RootComponent));
const root = r.root;
setTimeout(() => {
const allInspects = root.findAllByType(Inspect, {deep: true});
const [correct, wrong] = allInspects;
assert.strictEqual(correct.props.name, 'correct');
assert.strictEqual(wrong.props.name, 'wrong');
wrong.instance.press();
setTimeout(() => {
correct.instance.press();
}, 100);
}, 100);
setTimeout(() => {
assert.strictEqual(times, 1);
done();
}, 300);
});
});
|
parent
|
command.go
|
package genschema
import (
"github.com/goadesign/goa/goagen/codegen"
"github.com/goadesign/goa/goagen/meta"
)
// ServiceURL is used to build the JSON schema ID of the root document.
var ServiceURL string
// Command is the goa application code generator command line data structure.
// It implements meta.Command.
type Command struct {
*codegen.BaseCommand
}
// NewCommand instantiates a new command.
func
|
() *Command {
base := codegen.NewBaseCommand("schema", "Generate application JSON schema controller")
return &Command{BaseCommand: base}
}
// RegisterFlags registers the command line flags with the given registry.
func (c *Command) RegisterFlags(r codegen.FlagRegistry) {
r.Flags().StringVarP(&ServiceURL, "url", "u", "http://localhost", "API base URL used to build JSON schema ID, e.g. https://www.myapi.com")
}
// Run simply calls the meta generator.
func (c *Command) Run() ([]string, error) {
flags := map[string]string{"url": ServiceURL}
gen := meta.NewGenerator(
"genschema.Generate",
[]*codegen.ImportSpec{codegen.SimpleImport("github.com/goadesign/goa/goagen/gen_schema")},
flags,
)
return gen.Generate()
}
|
NewCommand
|
transfer_characteristic.rs
|
use std::ffi::CStr;
use std::str::from_utf8_unchecked;
use ffi::AVColorTransferCharacteristic::*;
use ffi::*;
#[derive(Eq, PartialEq, Clone, Copy, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))]
pub enum
|
{
Reserved0,
BT709,
Unspecified,
Reserved,
GAMMA22,
GAMMA28,
SMPTE170M,
SMPTE240M,
Linear,
Log,
LogSqrt,
IEC61966_2_4,
BT1361_ECG,
IEC61966_2_1,
BT2020_10,
BT2020_12,
SMPTE2084,
SMPTE428,
ARIB_STD_B67,
}
impl TransferCharacteristic {
pub fn name(&self) -> Option<&'static str> {
if *self == TransferCharacteristic::Unspecified {
return None;
}
unsafe {
let ptr = av_color_transfer_name((*self).into());
ptr.as_ref()
.map(|ptr| from_utf8_unchecked(CStr::from_ptr(ptr).to_bytes()))
}
}
}
impl From<AVColorTransferCharacteristic> for TransferCharacteristic {
fn from(value: AVColorTransferCharacteristic) -> TransferCharacteristic {
match value {
AVCOL_TRC_RESERVED0 => TransferCharacteristic::Reserved0,
AVCOL_TRC_BT709 => TransferCharacteristic::BT709,
AVCOL_TRC_UNSPECIFIED => TransferCharacteristic::Unspecified,
AVCOL_TRC_RESERVED => TransferCharacteristic::Reserved,
AVCOL_TRC_GAMMA22 => TransferCharacteristic::GAMMA22,
AVCOL_TRC_GAMMA28 => TransferCharacteristic::GAMMA28,
AVCOL_TRC_SMPTE170M => TransferCharacteristic::SMPTE170M,
AVCOL_TRC_SMPTE240M => TransferCharacteristic::SMPTE240M,
AVCOL_TRC_LINEAR => TransferCharacteristic::Linear,
AVCOL_TRC_LOG => TransferCharacteristic::Log,
AVCOL_TRC_LOG_SQRT => TransferCharacteristic::LogSqrt,
AVCOL_TRC_IEC61966_2_4 => TransferCharacteristic::IEC61966_2_4,
AVCOL_TRC_BT1361_ECG => TransferCharacteristic::BT1361_ECG,
AVCOL_TRC_IEC61966_2_1 => TransferCharacteristic::IEC61966_2_1,
AVCOL_TRC_BT2020_10 => TransferCharacteristic::BT2020_10,
AVCOL_TRC_BT2020_12 => TransferCharacteristic::BT2020_12,
AVCOL_TRC_NB => TransferCharacteristic::Reserved0,
AVCOL_TRC_SMPTE2084 => TransferCharacteristic::SMPTE2084,
AVCOL_TRC_SMPTE428 => TransferCharacteristic::SMPTE428,
AVCOL_TRC_ARIB_STD_B67 => TransferCharacteristic::ARIB_STD_B67,
}
}
}
impl Into<AVColorTransferCharacteristic> for TransferCharacteristic {
fn into(self) -> AVColorTransferCharacteristic {
match self {
TransferCharacteristic::Reserved0 => AVCOL_TRC_RESERVED0,
TransferCharacteristic::BT709 => AVCOL_TRC_BT709,
TransferCharacteristic::Unspecified => AVCOL_TRC_UNSPECIFIED,
TransferCharacteristic::Reserved => AVCOL_TRC_RESERVED,
TransferCharacteristic::GAMMA22 => AVCOL_TRC_GAMMA22,
TransferCharacteristic::GAMMA28 => AVCOL_TRC_GAMMA28,
TransferCharacteristic::SMPTE170M => AVCOL_TRC_SMPTE170M,
TransferCharacteristic::SMPTE240M => AVCOL_TRC_SMPTE240M,
TransferCharacteristic::Linear => AVCOL_TRC_LINEAR,
TransferCharacteristic::Log => AVCOL_TRC_LOG,
TransferCharacteristic::LogSqrt => AVCOL_TRC_LOG_SQRT,
TransferCharacteristic::IEC61966_2_4 => AVCOL_TRC_IEC61966_2_4,
TransferCharacteristic::BT1361_ECG => AVCOL_TRC_BT1361_ECG,
TransferCharacteristic::IEC61966_2_1 => AVCOL_TRC_IEC61966_2_1,
TransferCharacteristic::BT2020_10 => AVCOL_TRC_BT2020_10,
TransferCharacteristic::BT2020_12 => AVCOL_TRC_BT2020_12,
TransferCharacteristic::SMPTE2084 => AVCOL_TRC_SMPTE2084,
TransferCharacteristic::SMPTE428 => AVCOL_TRC_SMPTE428,
TransferCharacteristic::ARIB_STD_B67 => AVCOL_TRC_ARIB_STD_B67,
}
}
}
|
TransferCharacteristic
|
products.go
|
package papi
import (
"fmt"
// "github.com/akamai/AkamaiOPEN-edgegrid-golang/client-v1"
"github.com/dccarswell/AkamaiOPEN-edgegrid-golang/client-v1"
// "akamai/client-v1"
)
// Products represents a collection of products
type Products struct {
client.Resource
AccountID string `json:"accountId"`
ContractID string `json:"contractId"`
Products struct {
Items []*Product `json:"items"`
} `json:"products"`
}
// NewProducts creates a new Products
func NewProducts() *Products {
products := &Products{}
products.Init()
return products
}
// PostUnmarshalJSON is called after JSON unmarshaling into EdgeHostnames
//
// See: jsonhooks-v1/jsonhooks.Unmarshal()
func (products *Products) PostUnmarshalJSON() error {
products.Init()
for key, product := range products.Products.Items {
products.Products.Items[key].parent = products
if err := product.PostUnmarshalJSON(); err != nil {
return err
}
}
return nil
}
// GetProducts populates Products with product data
//
// API Docs: https://developer.akamai.com/api/luna/papi/resources.html#listproducts
// Endpoint: GET /papi/v1/products/{?contractId}
func (products *Products) GetProducts(contract *Contract) error {
req, err := client.NewRequest(
Config,
"GET",
fmt.Sprintf(
"/papi/v1/products?contractId=%s",
contract.ContractID,
),
nil,
)
if err != nil {
return err
}
res, err := client.Do(Config, req)
if err != nil {
return err
}
if client.IsError(res) {
return client.NewAPIError(res)
}
if err = client.BodyJSON(res, products); err != nil {
return err
}
return nil
}
// FindProduct finds a specific product by ID
func (products *Products) FindProduct(id string) (*Product, error) {
var product *Product
var productFound bool
for _, product = range products.Products.Items {
if product.ProductID == id {
productFound = true
break
}
}
if !productFound {
return nil, fmt.Errorf("Unable to find product: \"%s\"", id)
}
return product, nil
}
// Product represents a product resource
type Product struct {
|
parent *Products
ProductName string `json:"productName"`
ProductID string `json:"productId"`
}
// NewProduct creates a new Product
func NewProduct(parent *Products) *Product {
product := &Product{parent: parent}
product.Init()
return product
}
|
client.Resource
|
networking.py
|
"""
Defines helper methods useful for setting up ports, launching servers, and handling `ngrok`
"""
import os
import socket
import threading
from flask import Flask, request, jsonify, abort, send_file, render_template
from flask_cachebuster import CacheBuster
from flask_cors import CORS
import threading
import pkg_resources
from distutils import dir_util
import time
import json
import urllib.request
from shutil import copyfile
import requests
import sys
import csv
import logging
import gradio as gr
from gradio.embeddings import calculate_similarity, fit_pca_to_embeddings, transform_with_pca
from gradio.tunneling import create_tunnel
INITIAL_PORT_VALUE = int(os.getenv(
'GRADIO_SERVER_PORT', "7860")) # The http server will try to open on port 7860. If not available, 7861, 7862, etc.
TRY_NUM_PORTS = int(os.getenv(
'GRADIO_NUM_PORTS', "100")) # Number of ports to try before giving up and throwing an exception.
LOCALHOST_NAME = os.getenv(
'GRADIO_SERVER_NAME', "127.0.0.1")
GRADIO_API_SERVER = "https://api.gradio.app/v1/tunnel-request"
GRADIO_FEATURE_ANALYTICS_URL = "https://api.gradio.app/gradio-feature-analytics/"
STATIC_TEMPLATE_LIB = pkg_resources.resource_filename("gradio", "templates/")
STATIC_PATH_LIB = pkg_resources.resource_filename("gradio", "static/")
GRADIO_STATIC_ROOT = "https://gradio.app"
app = Flask(__name__,
template_folder=STATIC_TEMPLATE_LIB,
static_folder=STATIC_PATH_LIB,
static_url_path="/static/")
CORS(app)
cache_buster = CacheBuster(config={'extensions': ['.js', '.css'], 'hash_size': 5})
cache_buster.init_app(app)
app.app_globals = {}
# Hide Flask default message
cli = sys.modules['flask.cli']
cli.show_server_banner = lambda *x: None
def set_meta_tags(title, description, thumbnail):
app.app_globals.update({
"title": title,
"description": description,
"thumbnail": thumbnail
})
def set_config(config):
app.app_globals["config"] = config
def get_local_ip_address():
try:
ip_address = requests.get('https://api.ipify.org').text
except requests.ConnectionError:
ip_address = "No internet connection"
return ip_address
IP_ADDRESS = get_local_ip_address()
def get_first_available_port(initial, final):
"""
Gets the first open port in a specified range of port numbers
:param initial: the initial value in the range of port numbers
:param final: final (exclusive) value in the range of port numbers, should be greater than `initial`
:return:
"""
for port in range(initial, final):
try:
s = socket.socket() # create a socket object
s.bind((LOCALHOST_NAME, port)) # Bind to the port
s.close()
return port
except OSError:
pass
raise OSError(
"All ports from {} to {} are in use. Please close a port.".format(
initial, final
)
)
@app.route("/", methods=["GET"])
def main():
return render_template("index.html",
title=app.app_globals["title"],
description=app.app_globals["description"],
thumbnail=app.app_globals["thumbnail"],
vendor_prefix=(GRADIO_STATIC_ROOT if app.interface.share else "")
)
@app.route("/config/", methods=["GET"])
def config():
return jsonify(app.app_globals["config"])
@app.route("/enable_sharing/<path:path>", methods=["GET"])
def enable_sharing(path):
if path == "None":
path = None
app.app_globals["config"]["share_url"] = path
return jsonify(success=True)
@app.route("/api/predict/", methods=["POST"])
def predict():
raw_input = request.json["data"]
prediction, durations = app.interface.process(raw_input)
output = {"data": prediction, "durations": durations}
return jsonify(output)
def log_feature_analytics(feature):
if app.interface.analytics_enabled:
try:
requests.post(GRADIO_FEATURE_ANALYTICS_URL,
data={
'ip_address': IP_ADDRESS,
'feature': feature})
except requests.ConnectionError:
pass # do not push analytics if no network
@app.route("/api/score_similarity/", methods=["POST"])
def score_similarity():
raw_input = request.json["data"]
preprocessed_input = [input_interface.preprocess(raw_input[i])
for i, input_interface in enumerate(app.interface.input_interfaces)]
input_embedding = app.interface.embed(preprocessed_input)
scores = list()
for example in app.interface.examples:
preprocessed_example = [iface.preprocess(iface.preprocess_example(example))
for iface, example in zip(app.interface.input_interfaces, example)]
example_embedding = app.interface.embed(preprocessed_example)
scores.append(calculate_similarity(input_embedding, example_embedding))
log_feature_analytics('score_similarity')
return jsonify({"data": scores})
@app.route("/api/view_embeddings/", methods=["POST"])
def view_embeddings():
sample_embedding = []
if "data" in request.json:
raw_input = request.json["data"]
preprocessed_input = [input_interface.preprocess(raw_input[i])
for i, input_interface in enumerate(app.interface.input_interfaces)]
sample_embedding.append(app.interface.embed(preprocessed_input))
example_embeddings = []
for example in app.interface.examples:
preprocessed_example = [iface.preprocess(iface.preprocess_example(example))
for iface, example in zip(app.interface.input_interfaces, example)]
example_embedding = app.interface.embed(preprocessed_example)
example_embeddings.append(example_embedding)
pca_model, embeddings_2d = fit_pca_to_embeddings(sample_embedding + example_embeddings)
sample_embedding_2d = embeddings_2d[:len(sample_embedding)]
example_embeddings_2d = embeddings_2d[len(sample_embedding):]
app.pca_model = pca_model
log_feature_analytics('view_embeddings')
return jsonify({"sample_embedding_2d": sample_embedding_2d, "example_embeddings_2d": example_embeddings_2d})
@app.route("/api/update_embeddings/", methods=["POST"])
def update_embeddings():
sample_embedding, sample_embedding_2d = [], []
if "data" in request.json:
raw_input = request.json["data"]
preprocessed_input = [input_interface.preprocess(raw_input[i])
for i, input_interface in enumerate(app.interface.input_interfaces)]
sample_embedding.append(app.interface.embed(preprocessed_input))
sample_embedding_2d = transform_with_pca(app.pca_model, sample_embedding)
return jsonify({"sample_embedding_2d": sample_embedding_2d})
@app.route("/api/predict_examples/", methods=["POST"])
def predict_examples():
example_ids = request.json["data"]
predictions_set = {}
for example_id in example_ids:
example_set = app.interface.examples[example_id]
processed_example_set = [iface.preprocess_example(example)
for iface, example in zip(app.interface.input_interfaces, example_set)]
try:
predictions, _ = app.interface.process(processed_example_set)
except:
continue
predictions_set[example_id] = predictions
output = {"data": predictions_set}
return jsonify(output)
@app.route("/api/flag/", methods=["POST"])
def flag():
log_feature_analytics('flag')
flag_path = os.path.join(app.cwd, app.interface.flagging_dir)
os.makedirs(flag_path,
exist_ok=True)
output = {'inputs': [app.interface.input_interfaces[
i].rebuild(
flag_path, request.json['data']['input_data'][i]) for i
in range(len(app.interface.input_interfaces))],
'outputs': [app.interface.output_interfaces[
i].rebuild(
flag_path, request.json['data']['output_data'][i])
for i
in range(len(app.interface.output_interfaces))]}
log_fp = "{}/log.csv".format(flag_path)
is_new = not os.path.exists(log_fp)
|
headers = ["input_{}".format(i) for i in range(len(
output["inputs"]))] + ["output_{}".format(i) for i in
range(len(output["outputs"]))]
writer = csv.DictWriter(csvfile, delimiter=',',
lineterminator='\n',
fieldnames=headers)
if is_new:
writer.writeheader()
writer.writerow(
dict(zip(headers, output["inputs"] +
output["outputs"]))
)
return jsonify(success=True)
@app.route("/api/interpret/", methods=["POST"])
def interpret():
log_feature_analytics('interpret')
raw_input = request.json["data"]
interpretation_scores, alternative_outputs = app.interface.interpret(raw_input)
return jsonify({
"interpretation_scores": interpretation_scores,
"alternative_outputs": alternative_outputs
})
@app.route("/file/<path:path>", methods=["GET"])
def file(path):
return send_file(os.path.join(app.cwd, path))
def start_server(interface, server_name, server_port=None):
if server_port is None:
server_port = INITIAL_PORT_VALUE
port = get_first_available_port(
server_port, server_port + TRY_NUM_PORTS
)
app.interface = interface
app.cwd = os.getcwd()
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
if interface.save_to is not None:
interface.save_to["port"] = port
thread = threading.Thread(target=app.run,
kwargs={"port": port, "host": server_name},
daemon=True)
thread.start()
return port, app, thread
def close_server(process):
process.terminate()
process.join()
def url_request(url):
try:
req = urllib.request.Request(
url=url, headers={"content-type": "application/json"}
)
res = urllib.request.urlopen(req, timeout=10)
return res
except Exception as e:
raise RuntimeError(str(e))
def setup_tunnel(local_server_port):
response = url_request(GRADIO_API_SERVER)
if response and response.code == 200:
try:
payload = json.loads(response.read().decode("utf-8"))[0]
return create_tunnel(payload, LOCALHOST_NAME, local_server_port)
except Exception as e:
raise RuntimeError(str(e))
def url_ok(url):
try:
r = requests.head(url)
return r.status_code == 200
except ConnectionError:
return False
|
with open(log_fp, "a") as csvfile:
|
x86_64_pc_windows_msvc.rs
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use target::Target;
pub fn target() -> Target
|
{
let mut base = super::windows_msvc_base::opts();
base.cpu = "x86-64".to_string();
Target {
// This is currently in sync with the specification for
// x86_64-pc-windows-gnu but there's a comment in that file questioning
// whether this is valid or not. Sounds like the two should stay in sync
// at least for now.
data_layout: "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_string(),
llvm_target: "x86_64-pc-windows-msvc".to_string(),
target_endian: "little".to_string(),
target_pointer_width: "64".to_string(),
arch: "x86_64".to_string(),
target_os: "windows".to_string(),
target_env: "msvc".to_string(),
options: base,
}
}
|
|
ej.spellcheck.min.js
|
/*!
* filename: ej.spellcheck.min.js
* version : 19.1.0.63
* Copyright Syncfusion Inc. 2001 - 2018. All rights reserved.
* Use of this code is subject to the terms of our license.
|
(function(n){typeof define=="function"&&define.amd?define(["./../common/ej.core.min","./../common/ej.data.min","./../common/ej.globalize.min","./../common/ej.scroller.min","./../common/ej.draggable.min","./ej.dialog.min","./ej.button.min","./ej.listbox.min","./ej.menu.min"],n):n()})(function(){"use strict";var n=this&&this.__extends||function(n,t){function r(){this.constructor=n}for(var i in t)t.hasOwnProperty(i)&&(n[i]=t[i]);n.prototype=t===null?Object.create(t):(r.prototype=t.prototype,new r)};(function(t){var i=function(i){function r(){i.apply(this,arguments);this.rootCSS="e-spellcheck";this.validTags=["div","span","textarea"];this.PluginName="ejSpellCheck";this._id="null";this.defaults={locale:"en-US",misspellWordCss:"e-errorword",ignoreSettings:{ignoreAlphaNumericWords:!0,ignoreHtmlTags:!0,ignoreEmailAddress:!0,ignoreMixedCaseWords:!0,ignoreUpperCase:!0,ignoreUrl:!0,ignoreFileNames:!0},dictionarySettings:{dictionaryUrl:"",customDictionaryUrl:""},maxSuggestionCount:6,ajaxDataType:"jsonp",ajaxRequestType:"GET",ignoreWords:[],contextMenuSettings:{enable:!0,menuItems:[{id:"IgnoreAll",text:"Ignore All"},{id:"AddToDictionary",text:"Add to Dictionary"}]},isResponsive:!0,enableValidateOnType:!1,controlsToValidate:null,enableAsync:!0,actionSuccess:null,actionBegin:null,actionFailure:null,start:null,complete:null,contextOpen:null,contextClick:null,dialogBeforeOpen:null,dialogOpen:null,dialogClose:null,validating:null,targetUpdating:null};this.dataTypes={locale:"string",misspellWordCss:"string",ignoreSettings:{ignoreAlphaNumericWords:"boolean",ignoreHtmlTags:"boolean",ignoreEmailAddress:"boolean",ignoreMixedCaseWords:"boolean",ignoreUpperCase:"boolean",ignoreUrl:"boolean",ignoreFileNames:"boolean"},dictionarySettings:{dictionaryUrl:"string",customDictionaryUrl:"string",customDictionaryPath:"string"},maxSuggestionCount:"number",ajaxDataType:"string",ajaxRequestType:"string",ignoreWords:"array",contextMenuSettings:{enable:"boolean",menuItems:"array"}};this._tags=[{tag:"ignoreSettings",attr:[]},{tag:"dictionarySettings",attr:[]},{tag:"contextMenuSettings.menuItems",attr:["id","text"]},];this._localizedLabels=null;this._statusFlag=!0;this._words=[];this._inputWords=[];this._controlIds=[];this._control=[];this._targetStatus=!0;this._statusMultiTarget=!1;this._changeAllWords=[];this._subElements=[];this._iframeStatus=!1;this._elementStatus=!0;this._suggestedWordCollection=[];this._ignoreStatus=!0;this._suggestedWords=[];this.webMethod=!1;this.model=this.defaults}return n(r,i),r.prototype._init=function(){ej.isNullOrUndefined(this.element)||this._renderSpellCheck()},r.prototype._renderSpellCheck=function(){this._initLocalize();this._renderControls()},r.prototype._initLocalize=function(){this._localizedLabels=this._getLocalizedLabels()},r.prototype._renderControls=function(){var u,i,n,r,f;if(ej.isNullOrUndefined(this.model.controlsToValidate))this._addAttributes(this,this.element);else{for(t(this.element).attr("style","display:none"),this._controlIds=this.model.controlsToValidate.split(","),u=!1,i=0;i<this._controlIds.length;i++)if(n=t(this._controlIds[i]),n.length>0)if(u=!0,n.length>1)for(r=0;r<n.length;r++)f=t(n[r]),this._addAttributes(this,f);else this._addAttributes(this,n);this._elementStatus=this._statusFlag=u}this.model.isResponsive&&this._on(t(window),"resize",t.proxy(this._resizeSpellCheck,this))},r.prototype._addAttributes=function(n,i){t(i).addClass("e-spellcheck");i[0].spellcheck=!1;n._addEventListeners(n,i)},r.prototype._addEventListeners=function(n,i){n._isIframe(i)?(t(i)[0].contentWindow.document.addEventListener("input",function(){n._changeStatus(n)},!1),n.model.contextMenuSettings.enable&&(t(i)[0].contentWindow.document.addEventListener("mousedown",function(){n._elementRightClick(n)},!1),t(document)[0].addEventListener("mousedown",function(){n._elementRightClick(n)},!1),t(i)[0].contentWindow.document.addEventListener("keydown",function(t){n._spellValidateOnType(t)},!1))):(i[0].addEventListener("input",function(){n._changeStatus(n)},!1),n.model.contextMenuSettings.enable&&(n._on(t(document),"mousedown",t.proxy(n._elementRightClick,n)),n._on(t(i[0]),"keydown","",this._spellValidateOnType)))},r.prototype._changeStatus=function(n){n._statusFlag=!0;ej.isNullOrUndefined(n.model.controlsToValidate)||(n._controlIds=n.model.controlsToValidate.split(","),n._targetStatus=!0)},r.prototype._isIframe=function(n){return t(n)[0].tagName==="IFRAME"},r.prototype._resizeSpellCheck=function(){var n=!ej.isNullOrUndefined(this._spellCheckWindow)&&this._spellCheckWindow.parents().find(".e-spellcheck.e-dialog-wrap"),t=!ej.isNullOrUndefined(this._spellCheckWindow)&&n.length>0&&this._spellCheckWindow.data("ejDialog"),i,r,u;this.model.isResponsive?!ej.isNullOrUndefined(this._spellCheckWindow)&&n.length>0&&this._spellCheckWindow.ejDialog("isOpen")&&(t._dialogPosition(),i=this._spellCheckWindow.find(".e-suggesteditems").data("ejListBox"),i.refresh(!0),r=this._spellCheckWindow.find(".e-sentence .e-sentencescroller").data("ejScroller"),setTimeout(function(){n.find(".e-dialog-scroller").width(n.width()-2);n.find(".e-suggestionlist .e-content").width(n.find(".e-suggestionlist .e-content").width()-2);r.refresh()},4)):t._dialogPosition();!ej.isNullOrUndefined(this._alertWindow)&&this._alertWindow.data("ejDialog")&&(u=!ej.isNullOrUndefined(this._alertWindow)&&this._alertWindow.data("ejDialog"),u._dialogPosition())},r.prototype.showInDialog=function(){this._statusFlag?this._renderDialogWindow():this._alertWindowRender("show")},r.prototype.validate=function(){var i,r,n,u,f,e,s,o,h;if(this._statusFlag){if(i=[],this.model.contextMenuSettings.enable&&!ej.isNullOrUndefined(this.model.dictionarySettings.dictionaryUrl)){if(n="",this._controlIds.length>0&&!this._currentActiveElement&&this.model.enableValidateOnType||this._controlIds.length>0&&(!this.model.enableValidateOnType||!this._statusMultiTarget)){for(u=0;u<this._controlIds.length;u++)if(f=t(this._controlIds[u]),f.length>0)for(e=0;e<f.length;e++)s=t(f[e]),o="",o=this._elementTextProcess(this,s),n=n===""?n.concat(o):n.concat(" "+o)}else this.model.enableValidateOnType&&this._currentActiveElement?n=this._elementTextProcess(this,t(this._currentActiveElement)):(this._isIframe(this.element)&&t(this.element).contents().find("body").addClass("e-spellcheck"),n=this._elementTextProcess(this,this.element));if(i=this._filteringDiffWords(this,n),this._splitWords(n,this),r={targetSentence:n,requestType:"validate",additionalParams:null,webMethod:!1},this._trigger("actionBegin",r))return!1;i.length>0?this._ajaxRequest(this,i.join(" "),"validateOnType",r):i.length!==0||this._ignoreStatus?ej.isNullOrUndefined(this._errorWordDetails)?this.model.enableValidateOnType||this._alertWindowRender("show"):(h=this._filterErrorData(this,this._errorWordDetails),this._validateOnTypeOperations(this,h,n,"validateOnType")):(this._splitInputWords(n,this),i=ej.dataUtil.distinct(this._inputWords),this._ajaxRequest(this,i.join(" "),"validateOnType",r))}}else this._alertWindowRender("show")},r.prototype._filteringDiffWords=function(n,t){var f=n._inputWords,r=[],i,e,u;for(n._splitInputWords(t,n),i=n._inputWords,u=0;u<i.length;u++)e=f.indexOf(i[u]),e===-1&&r.push(i[u]);return i.length!==f.length&&r.length!==0&&i.length===r.length&&(r=i),r},r.prototype._elementTextProcess=function(n,i){return this.model.contextMenuSettings.enable&&this.model.enableValidateOnType&&(this._controlIds.length>0?i[0].nodeType==9&&i[0].nodeName=="#document"&&(i=t(this._controlIds[0])):i=t(this.element[0])),n._isIframe(i)?t(i).contents().find("body").text():ej.isNullOrUndefined(t(i)[0].value)?(t(i)[0].innerText||t(i)[0].textContent).trim():t(i)[0].value.trim()},r.prototype._splitWords=function(n,t){var i=n.split(/[^0-9a-zA-Z\'_]/);i=i.filter(function(n){return/\S/.test(n)});t._words=i},r.prototype._splitInputWords=function(n,t){var i=n.split(" ");t._inputWords=i},r.prototype.spellCheck=function(n,t){var i={targetSentence:n,misspelledWordCss:t,requestType:"spellCheck",webMethod:!1};if(this._trigger("actionBegin",i))return!1;this._misspelledWordCss=t;this._ajaxRequest(this,n,"spellCheck",i)},r.prototype.ignoreAll=function(n,t){var i;return ej.isNullOrUndefined(n)||n===""||ej.isNullOrUndefined(t)||t===""?!1:(i={ignoreWord:n,targetContent:t,requestType:"ignoreAll"},this._trigger("validating",i))?!1:(this.model.ignoreWords.push(n),this._updateErrorContent(n,t,null,"ignoreAll",null))},r.prototype.ignore=function(n,t,i){var r;return ej.isNullOrUndefined(n)||n===""||ej.isNullOrUndefined(t)||t===""?!1:(r={ignoreWord:n,targetContent:t,requestType:"ignore"},this._trigger("validating",r))?!1:this._updateErrorContent(n,t,null,"ignore",i)},r.prototype.change=function(n,t,i,r){var u;return ej.isNullOrUndefined(n)||n===""||ej.isNullOrUndefined(t)||t===""||ej.isNullOrUndefined(i)||i===""?!1:(u={changableWord:n,targetContent:t,changeWord:i,requestType:"changeWord"},this._trigger("validating",u))?!1:this._updateErrorContent(n,t,i,"changeWord",r)},r.prototype.changeAll=function(n,t,i){var u,r;return ej.isNullOrUndefined(n)||n===""||ej.isNullOrUndefined(t)||t===""||ej.isNullOrUndefined(i)||i===""?!1:(u={changableWord:n,targetContent:t,changeWord:i,requestType:"changeAll"},this._trigger("validating",u))?!1:(r={},r.ErrorWord=n,r.ReplaceWord=i,this._changeAllWords.push(r),this._updateErrorContent(n,t,i,"changeAll",null))},r.prototype.addToDictionary=function(n){if(ej.isNullOrUndefined(n)||n==="")return!1;var t={customWord:n,requestType:"addToDictionary",additionalParams:null};if(this._trigger("validating",t))return!1;this._customWord=n;this._ajaxRequest(this,null,"addToDictionary",t)},r.prototype._updateErrorContent=function(n,t,i,r,u){var s,f,o,h,e,c;if(t.indexOf(n)!==-1){if(f='<span class="errorspan '+this.model.misspellWordCss+'">'+n+"<\/span>",o=r==="ignoreAll"||r==="addToDictionary"||r==="ignore"?n:i,r==="ignoreAll"||r==="addToDictionary"||r==="changeAll")t=t.replace(new RegExp(f,"g"),o);else if(r==="ignore"||r==="changeWord")if(ej.isNullOrUndefined(u))t=t.replace(f,o);else{for(h=[],e=t.indexOf(f);e!==-1;)h.push(e),e=t.indexOf(f,++e);c=h[u];t=t.substr(0,c)+o+t.substr(c+f.length)}s={resultHTML:t}}else s=!1;return s},r.prototype._renderDialogWindow=function(){this._dialogWindowRendering();this._showDialog()},r.prototype._dialogWindowRendering=function(){var n=this,f,i;this._spellCheckWindow=ej.buildTag("div.e-spellcheckdialog#"+this._id+"ErrorCorrectionWindow");var e=ej.buildTag("div.e-dialogdiv"),o=ej.buildTag("div.e-row e-labelrow").append(ej.buildTag("div.e-labelcell").append(ej.buildTag("label.e-dictionarylabel",this._localizedLabels.NotInDictionary))),r=ej.buildTag("div.e-row e-sentencerow"),s=ej.buildTag("div.e-cell e-sentencecell").append(ej.buildTag("div.e-sentence","",{},{id:this._id+"_Sentences",name:"sentences",contenteditable:"false"}));r.append(s);var h=ej.buildTag("div.e-buttoncell"),c=ej.buildTag("button.e-btnignoreonce",this._localizedLabels.IgnoreOnceButtonText,{},{id:this._id+"_IgnoreOnce"}).attr("type","button"),l=ej.buildTag("button.e-btnignoreall",this._localizedLabels.IgnoreAllButtonText,{},{id:this._id+"_IgnoreAll"}).attr("type","button"),a=ej.buildTag("button.e-btnaddtodictionary",this._localizedLabels.AddToDictionary,{},{id:this._id+"_AddToDictionary"}).attr("type","button");r.append(h.append(c).append(l).append(a));var v=ej.buildTag("div.e-row e-labelrow").append(ej.buildTag("div.e-labelcell").append(ej.buildTag("label.e-lablesuggestions",this._localizedLabels.SuggestionLabel))),u=ej.buildTag("div.e-row e-suggestionsrow"),y=ej.buildTag("div.e-cell e-suggestioncell").append(ej.buildTag("ul.e-suggesteditems","",{},{id:this._id+"_Suggestions"}));u.append(y);var p=ej.buildTag("div.e-buttoncell"),w=ej.buildTag("button.e-btnchange",this._localizedLabels.ChangeButtonText,{},{id:this._id+"_Change"}).attr("type","button"),b=ej.buildTag("button.e-btnchangeall",this._localizedLabels.ChangeAllButtonText,{},{id:this._id+"_ChangeAll"}).attr("type","button"),k=ej.buildTag("button.e-btnclose",this._localizedLabels.CloseButtonText,{},{id:this._id+"_Close"}).attr("type","button");for(u.append(p.append(w).append(b).append(k)),e.append(o).append(r).append(v).append(u),this._spellCheckWindow.append(e),this._spellCheckWindow.ejDialog({width:462,minHeight:305,enableModal:!0,enableResize:!1,showOnInit:!1,allowKeyboardNavigation:!1,target:t("body"),title:this._localizedLabels.SpellCheckButtonText,close:function(){n._close()},cssClass:"e-spellcheck",isResponsive:this.model.isResponsive}),f=[".e-btnignoreonce",".e-btnignoreall",".e-btnaddtodictionary",".e-btnchange",".e-btnchangeall",".e-btnclose"],i=0;i<f.length;i++)this._spellCheckWindow.find(f[i]).ejButton({width:this.model.isResponsive?"100%":140,click:function(t){t.model.text===n._localizedLabels.CloseButtonText?n._close():n._changeErrorWord(t)},cssClass:"e-spellbuttons"});this._spellCheckWindow.find(".e-sentence").append(ej.buildTag("div.e-sentencescroller").append(ej.buildTag("div").append(ej.buildTag("div.e-sentencecontent","",{},{id:this._id+"_SentenceContent"}))));this._spellCheckWindow.find(".e-sentence .e-sentencescroller").ejScroller({height:"100%",scrollerSize:20});this._spellCheckWindow.find(".e-suggesteditems").ejListBox({width:"100%",height:"100%",dataSource:null,selectedIndex:0,cssClass:"e-suggestionlist"})},r.prototype._alertWindowRender=function(n){this._renderAlertWindow(n);this._elementStatus||this._alertWindow.find(".e-alerttext").html(this._localizedLabels.NotValidElement);var t={spellCheckDialog:this._renderAlertWindow,requestType:"alertBeforeOpen"};if(this._trigger("dialogBeforeOpen",t))return!ej.isNullOrUndefined(this._spellCheckWindow)&&this._spellCheckWindow.parents().find(".e-spellcheck.e-dialog-wrap").length>0&&this._close(),!1;this._alertWindow.ejDialog("open")},r.prototype._renderAlertWindow=function(n){var i=this;this._alertWindow=ej.buildTag("div.e-alertdialog#"+this._id+"alertWindow");this._elementStatus||this._alertWindow.addClass("e-missingalert");var u=ej.buildTag("div.e-alertbtn","",{"text-align":"center"}).append(ej.buildTag("button.e-alertbutton e-alertspellok",this._localizedLabels.Ok,{},{id:this._id+"alertok"}).attr("type","button")),r=ej.buildTag("div.e-alerttextdiv"),f=ej.buildTag("div.e-alertnotifydiv").append(ej.buildTag("div.e-alertnotification e-icon e-notification")),e=ej.buildTag("div.e-alerttext",this._localizedLabels.CompletionPopupMessage,{"text-align":"left",padding:"5px"});r.append(f).append(e);this._alertWindow.append(r).append(u);this.element.append(this._alertWindow);this._alertWindow.find(".e-alertbutton").ejButton({showRoundedCorner:!0,width:this._elementStatus?"70px":"100px",click:function(){i._alertClose()},cssClass:"e-flat"});this._alertWindow.ejDialog({width:this._elementStatus?240:420,minHeight:140,showOnInit:!1,enableModal:!0,title:this._localizedLabels.CompletionPopupTitle,enableResize:!1,allowKeyboardNavigation:!1,target:n==="validating"?".e-spellcheckdialog":t("body"),cssClass:this._elementStatus?"e-spellalert":"e-spellalert e-elementmissing",close:function(){i._alertClose()},isResponsive:this.model.isResponsive})},r.prototype._renderContextMenu=function(){var n=this,u,r,i,f;if(this._contextMenu=ej.buildTag("ul#"+n._id+"contextMenu"),ej.isNullOrUndefined(n.model.controlsToValidate))u=n._isIframe(this.element)?n.element.contents()[0]:"."+n.model.misspellWordCss;else{for(r=!1,i=0;i<n._controlIds.length;i++)f=n._isIframe(t(n._controlIds[i])),f&&(r=!0);u=r?t(n._controlIds[0]).contents()[0]:"."+n.model.misspellWordCss}this._contextMenu.ejMenu({fields:{id:"id",text:"text",parentId:"parentId"},menuType:ej.MenuType.ContextMenu,openOnClick:!1,width:"auto",cssClass:"e-spellmenu",click:function(t){n._onMenuSelect(t)}})},r.prototype._contextMenuPosition=function(n,i){var u,r,e,f,o,s;return!ej.isNullOrUndefined(i._activeElement)&&i._isIframe(t(i.element))?(e=ej.isNullOrUndefined(i.model.controlsToValidate)?t(i.element):t(i._control[0].controlId),u=(n.clientX==undefined?0:n.clientX)+e.offset().left,r=(n.clientY==undefined?0:n.clientY)+e.offset().top,f=t(i._contextMenu).attr("style","visibility: visible;display:block;").height(),o=t(i._contextMenu).width(),r=r+f<t(document).scrollTop()+t(window).height()?r:r-f<0?r:r-f,u=u+o<t(document).scrollLeft()+t(window).width()?u:u-o):(u=n.clientX+i._contextMenu.width()<t(window).width()?n.pageX:n.pageX-i._contextMenu.width(),r=n.clientY+i._contextMenu.height()<t(window).height()?n.pageY:n.clientY>i._contextMenu.height()?n.pageY-i._contextMenu.height():t(window).height()-i._contextMenu.outerHeight(),s=t("body").css("position")!=="static"?t("body").offset():{left:0,top:0},u-=s.left,r-=s.top),{X:u,Y:r}},r.prototype._showDialog=function(){var v={spellCheckDialog:this._spellCheckWindow,requestType:"dialogBeforeOpen"},i,n,f,e,o,u,s,r,c,l,h,a;if(this._trigger("dialogBeforeOpen",v))return!1;if(this._spellCheckWindow.ejDialog("open"),i="",this._subElements=[],this._controlIds.length>0){for(f=0;f<this._controlIds.length;f++)if(e=t(this._controlIds[f]),e.length>0)for(o=0;o<e.length;o++)u=t(e[o]),this._activeElement=this._isIframe(u)?t(u).contents().find("body")[0]:t(u)[0],this._removeSpan(this),this._subElements.push(u[0]);i=this._inputTextProcess(this,t(this._subElements[0]),i);this._proElements=this._subElements.length>0&&t(this._subElements[0]);this._currentTargetElement=n=t(this._subElements[0]);this._subElements=this._subElements.slice(1)}else n=this.element,this._activeElement=this._isIframe(n)?this._getIframeElement(n):t(n)[0],this._removeSpan(this),i=this._inputTextProcess(this,n,i);if(s="",this.element=ej.isNullOrUndefined(this.model.controlsToValidate)?this.element:n,this.element.length>0&&(s=this._isIframe(this.element)?t(this.element).contents().find("body").html():t(n)[0].tagName==="TEXTAREA"||t(n)[0].tagName==="INPUT"?t(n)[0].value:t(n)[0].innerHTML),r=this._filteringDiffWords(this,i),this._splitWords(i,this),!ej.isNullOrUndefined(this.model.controlsToValidate)&&(c={previousElement:null,currentElement:n,targetHtml:s},this._trigger("targetUpdating",c)))return this._close(),!1;if((this._spellCheckWindow.find(".e-sentence .e-sentencecontent")[0].innerHTML=s,l={targetText:i,requestType:"dialogOpen"},this._trigger("dialogOpen",l))||(h={targetSentence:i,requestType:"spellCheck",additionalParams:null,webMethod:!1},this._trigger("actionBegin",h)))return!1;r.length>0?this._ajaxRequest(this,r.join(" "),"spellCheckDialog",h):r.length!==0||this._ignoreStatus?ej.isNullOrUndefined(this._errorWordDetails)?this._alertWindowRender("show"):(a=this._filterErrorData(this,this._errorWordDetails),this._dialogModeOperations(this,a,i,"spellCheckDialog")):(this._splitInputWords(i,this),r=ej.dataUtil.distinct(this._inputWords),this._ajaxRequest(this,r.join(" "),"spellCheckDialog",h))},r.prototype._getIframeElement=function(n){return t(n).contents().find("body")[0]},r.prototype._inputTextProcess=function(n,i,r){var u,f;return n._isIframe(i)?(u=t(i).contents().find("body").text(),r=r===""?u:r+u):(f=ej.isNullOrUndefined(t(i)[0].value)?(t(i)[0].innerText||t(i)[0].textContent).trim():t(i)[0].value.trim(),r=r===""?f:r+" "+f),r},r.prototype._ajaxRequest=function(n,i,r,u){var f=r==="addToDictionary"?JSON.stringify({customWord:n._customWord,additionalParams:u.additionalParams}):this._getModelValues(this,i,u);this.webMethod=this.webMethod?this.webMethod:u.webMethod;t.ajax({type:this.model.ajaxRequestType,async:this.model.enableAsync,url:r==="addToDictionary"?this.model.dictionarySettings.customDictionaryUrl:this.model.dictionarySettings.dictionaryUrl,data:this.model.ajaxDataType==="json"&&this.model.ajaxRequestType==="POST"?this.webMethod?JSON.stringify({data:f}):JSON.stringify(f):{data:f},contentType:"application/json; charset=utf-8",dataType:this.model.ajaxDataType,crossDomain:!0,success:function(u){var f,c,b,o,s,e,k,h,d,it,g,p,l,nt,w,v,y,a,tt;if(u=u&&u.d&&typeof u.d=="object"?u.d:u,c=typeof u=="string"&&r!=="addToDictionary"?JSON.parse(u):u,r==="addToDictionary"?ej.isNullOrUndefined(n._errorWordDetails)||ej.isNullOrUndefined(n._currentElement)?f=[]:(f=n._errorWordDetails,ej.isNullOrUndefined(c)||(n._filterData(c.toString(),n),n._errorWordDetails=n._errorWordsData)):f=n._updateErrorDetails(n,c),o=i,f.length>0){if(r==="spellCheckDialog"||r==="validateOnType"||r==="validateOnRender")e=n._filterErrorData(n,f),e.length>0?r==="spellCheckDialog"?n._dialogModeOperations(n,e,o,r):(r==="validateOnType"||r==="validateOnRender")&&n._validateOnTypeOperations(n,e,o,r):(r==="spellCheckDialog"&&n._spellCheckWindow.ejDialog("isOpen")&&n._spellCheckWindow.ejDialog("close"),n._alertWindowRender("validating"));else if(r==="spellCheck"){if(f.length>0){for(k=n._getFilterData(f,n),e=ej.dataUtil.distinct(k),h=0;h<e.length;h++)d=(new ej.Query).where("ErrorWord",ej.FilterOperators.equal,e[h]),it=new ej.DataManager(f).executeLocal(d),e.length>0&&(b='<span class="errorspan '+(!ej.isNullOrUndefined(n._misspelledWordCss)&&n._misspelledWordCss!==""?n._misspelledWordCss:n.model.misspellWordCss)+'">'+e[h]+"<\/span>",g=new RegExp(e[h],"gi"),o=o.replace(g,b));s={resultHTML:o,errorWordDetails:f,requestType:"spellCheck"};n._misspelledWordCss=null}else s={resultHTML:o,errorWordDetails:f,requestType:"spellCheck"};n._trigger("actionSuccess",s)}else if(r==="addToDictionary"&&(ej.isNullOrUndefined(n._currentElement)||(p=t(n._currentElement)[0].tagName==="IFRAME"?t(n._currentElement).contents().find("body").html():t(n._currentElement).html().trim()),l=n._updateErrorContent(n._customWord,p,null,"addToDictionary",null),!ej.isNullOrUndefined(p))){if(!ej.isNullOrUndefined(n._spellCheckWindow)&&n._spellCheckWindow.find(".e-btnaddtodictionary").hasClass("e-select"))nt=n._spellCheckWindow.find(".e-suggesteditems"),w=n._spellCheckWindow.find(".e-sentence .e-sentencecontent"),n._errorWordsData.length>0?(w[0].innerHTML=l.resultHTML,n._replaceErrorText(w,n._customWord.toString()),n._listBoxDataUpdate(n)):(nt.ejListBox({dataSource:null}),n._statusFlag=!1,n._alertWindowRender("validating"));else if(!ej.isNullOrUndefined(n._contextMenu)){if(n._isIframe(n.element)?t(n.element).contents().find("body").html(l.resultHTML):t(n._currentElement)[0].innerHTML=l.resultHTML,n._controlIds.length>0)for(v=0;v<n._controlIds.length;v++)for(y=t(n._controlIds[v]),a=0;a<y.length;a++)t(n._currentElement)[0]!==t(y[a])[0]&&(tt=t(y[a]),n._replaceErrorText(tt,n._customWord.toString()));n._renderMenu(n)}s={resultHTML:l.resultHTML,errorWordDetails:c,requestType:"addToDictionary"};n._trigger("actionSuccess",s)}}else n._subElements.length>0?n._updateTargetText(n):(r==="spellCheckDialog"&&n._spellCheckWindow.ejDialog("isOpen")&&n._spellCheckWindow.ejDialog("close"),r==="spellCheck"&&(s={resultHTML:i,errorWordDetails:f,requestType:"spellCheck"},n._trigger("actionSuccess",s)),r==="validateOnType"&&n._removeSpan(n),r!=="spellCheck"&&r!=="addToDictionary"&&n._alertWindowRender("load"))},error:function(t,i,u){var f={errorMessage:u,requestType:r};n._trigger("actionFailure",f)}})},r.prototype.getSuggestionWords=function(n){this._selectedValue=n;this._suggestionsRequest(this,null,n,"getSuggestions")},r.prototype._suggestionsRequest=function(n,i,r,u){var f;f=u==="validateByMenu"||u==="suggestionsUpdate"||u==="getSuggestions"?JSON.stringify({requestType:"getSuggestions",errorWord:r}):n._getModelValues(n,r,null);t.ajax({type:this.model.ajaxRequestType,async:n.model.enableAsync,url:n.model.dictionarySettings.dictionaryUrl,data:this.model.ajaxDataType==="json"&&this.model.ajaxRequestType==="POST"?this.webMethod?JSON.stringify({data:f}):JSON.stringify(f):{data:f},contentType:"application/json; charset=utf-8",dataType:n.model.ajaxDataType,crossDomain:!0,success:function(t){var e={},f,s,o,h,c;t=t&&t.d&&typeof t.d=="object"?t.d:t;f=typeof t=="string"?JSON.parse(t):t;e.ErrorWord=r;e.SuggestedWords=f[r];n._suggestedWordCollection.push(e);u==="getSuggestions"?n._suggestedWords=f[n._selectedValue]:u==="validateByMenu"?(s=f[r],n._contextMenuDisplay(n,s)):u==="validateByDialog"?(o=n._updateErrorDetails(n,f),o.length>0?(h=n._filterErrorData(n,o),n._splitWords(i[0].innerText,n),n._processNode(n,i[0],h,"spellCheckDialog"),n._activeElement=i[0],n._changeAllErrors(n),n._listBoxDataUpdate(n)):n._subElements.length>0?n._updateTargetText(n):n._completionCheck(n)):u==="suggestionsUpdate"&&(c=f[i[0].innerText],n._dialogSuggestionsUpdate(n,c))}})},r.prototype._filterErrorData=function(n,t){var i=n._getFilterData(t,n);return ej.dataUtil.distinct(i)},r.prototype._updateErrorDetails=function(n,t){var i=[],r;if(ej.isNullOrUndefined(n._errorWordDetails))i=n._errorWordDetails=t;else if(t.length>0)if(n._ignoreStatus)for(r=0;r<t.length;r++)n._errorWordDetails.push(t[r]),i=n._errorWordDetails;else i=n._errorWordDetails=t,n._ignoreStatus=!0;else i=n._errorWordDetails;return i},r.prototype._contextMenuDisplay=function(n,i){var r,o,u,c,a,v,s,f,h,e,l;if(ej.isNullOrUndefined(n._contextMenu)&&n._renderContextMenu(),r=n._contextMenu.data("ejMenu"),o=n.model.contextMenuSettings.menuItems,i.length>0&&this.model.maxSuggestionCount>0){for(u=[],c=n.model.maxSuggestionCount<i.length?n.model.maxSuggestionCount:i.length,u=n._convertData(i.slice(0,c),"menuData"),a=u.length,v=u[c-1].id,s=0;s<o.length;s++)u.push(o[s]);for(r.option("fields.dataSource",u),f=r.element.find(".e-list"),h=0;h<a;h++)t(f[h]).addClass("e-errorsuggestions");for(e=0;e<f.length;e++)f[e].attributes.id.value===v&&t(f[e]).addClass("e-separator")}else r.option("fields.dataSource",o);l=n._contextMenuPosition(n._menuEvents,n);t(r.element).css({left:l.X,top:l.Y});t(r.element).css("display","block")},r.prototype._dialogSuggestionsUpdate=function(n,i){var f=n._spellCheckWindow.find(".e-suggesteditems"),e=t("#"+n._id+"_Suggestions").data("ejListBox"),r,o,u;i.length>0?(n._spellCheckWindow.find(".e-btnchange").hasClass("e-disable")&&n._spellCheckWindow.find(".e-btnchangeall").hasClass("e-disable")&&(n._spellCheckWindow.find(".e-btnchange").removeClass("e-disable"),n._spellCheckWindow.find(".e-btnchangeall").removeClass("e-disable")),o=n.model.maxSuggestionCount<i.length?n.model.maxSuggestionCount:i.length,r=i.slice(0,o)):(n._spellCheckWindow.find(".e-btnchange").addClass("e-disable"),n._spellCheckWindow.find(".e-btnchangeall").addClass("e-disable"),r=[n._localizedLabels.NoSuggestionMessage]);f.ejListBox({selectedIndex:null});f.ejListBox({dataSource:n._convertData(r,"dictionaryData"),selectedIndex:0});ej.isNullOrUndefined(e)||e.refresh();u=n._spellCheckWindow.find(".e-sentence .e-sentencescroller").data("ejScroller");!ej.isNullOrUndefined(u)&&u.isVScroll()&&t(n._spellCheckWindow.find("."+n.model.misspellWordCss)).get(0).scrollIntoView(!1)},r.prototype._replaceErrorText=function(n,i){for(var f,u=t(n).find(".errorspan"),r=0;r<u.length;r++)f=u[r].innerText||u[r].textContent,f===i&&t(u[r]).replaceWith(f)},r.prototype._dialogModeOperations=function(n,t,i,r){var o={errorWords:n._errorWordDetails,targetText:i,requestType:r},u=n._spellCheckWindow.find(".e-sentence .e-sentencecontent"),f,e;if(t.length>0){if(n._removeSpan(n),n._processNode(n,u[0],t,r),this._trigger("start",o))return!1;f=n._spellCheckWindow.find(".e-sentence .e-sentencescroller").data("ejScroller");f.refresh();n._listBoxDataUpdate(n)}else e=n._spellCheckWindow.find(".e-suggesteditems"),r==="spellCheckDialog"&&(u[0].innerHTML=i),e.ejListBox({dataSource:null}),n._statusFlag=!1,this._alertWindowRender("load")},r.prototype._validateOnTypeOperations=function(n,i,r,u){var o,e,h,c,s,l,f;if(i.length>0){if(this._controlIds.length>0&&!this._currentActiveElement&&this.model.enableValidateOnType||this._controlIds.length>0&&(!this.model.enableValidateOnType||!this._statusMultiTarget))for(f=0;f<n._controlIds.length;f++)for(o=t(this._controlIds[f]),e=0;e<o.length;e++)h=n._isIframe(t(o[e]))?n._getIframeElement(t(o[e])):t(o[e])[0],n._activeElement=h,n._removeSpan(n),n._processNode(n,h,i,u),c={},c.controlId=n._controlIds[f],c.errorHtml=h.innerHTML,n._control.push(c);else this.model.enableValidateOnType&&this._currentActiveElement?(n._removeSpan(n),n._processNode(n,this._currentActiveElement,i,u),this._statusMultiTarget=!1,n._isIframe(n.element)&&(s=n._getIframeElement(n.element),n._activeElement=s)):n._isIframe(n.element)?(s=n._getIframeElement(n.element),n._activeElement=s,n._removeSpan(n),n._processNode(n,s,i,u)):(n._removeSpan(n),n._processNode(n,t(n.element)[0],i,u));if(n._statusFlag=!0,l=this._controlIds.length>0?{errorWords:n._errorWordDetails,targetControls:this._control,requestType:u}:{errorWords:n._errorWordDetails,targetText:t(n.element)[0].innerText,requestType:u},this._trigger("start",l))return!1;if(n._isIframe(this.element))n._bindBeforeOpen(n,t(this.element).contents().find("body"));else if(n._controlIds.length>0)for(f=0;f<n._controlIds.length;f++)n._bindBeforeOpen(n,t(n._controlIds[f]));else n._bindBeforeOpen(n,t(this.element))}else n._removeSpan(n),n._statusFlag=!1,n._alertWindowRender("show");this.model.enableValidateOnType&&n.setCursorPosition(n._currentCursorTarget)},r.prototype._bindBeforeOpen=function(n,i){n._on(t(i).find("."+this.model.misspellWordCss),"contextmenu",t.proxy(n._contextOpen,n))},r.prototype._contextOpen=function(n){var u=t(n.target),i,r,e,f;if(u.hasClass("errorspan")){if(n.preventDefault(),i=this,r=i._selectedValue=u[0].innerText,i._selectedTarget=u[0],i._menuEvents=n,e={selectedErrorWord:r,requestType:"contextOpen"},i._trigger("contextOpen",e))return!1;f=i._filterSuggestions(i,r);f.length>0?i._contextMenuDisplay(i,f[0].SuggestedWords):i._suggestionsRequest(i,null,r,"validateByMenu")}else this._elementRightClick(n)},r.prototype._processNode=function(n,t,i,r){for(var b,k,c,u,p,s=n._filterTextNodes(n,t),e=0;e<s.length;e++){var o=s[e],w=[s[e]],h=s[e].data,l=!1,a=!1,v=!1;if(n.model.ignoreSettings.ignoreUrl&&(b=/^((http|ftp|https)?:\/\/)?(www\.)?[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(:[0-9]{1,5})?(\/.*)?$/,l=b.test(o.wholeText),l&&(v=l)),n.model.ignoreSettings.ignoreEmailAddress&&(k=/^[-a-z0-9~!$%^&*_=+}{\'?]+(\.[-a-z0-9~!$%^&*_=+}{\'?]+)*@([a-z0-9_][-a-z0-9_]*(\.[-a-z0-9_]+)*\.(aero|arpa|biz|com|coop|edu|gov|info|int|mil|museum|name|net|org|pro|travel|mobi|[a-z][a-z])|([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}))(:[0-9]{1,5})?$/i,a=k.test(o.wholeText),a&&(v=a)),!v)for(c=0;c<n._words.length;c++)for(u=0;u<i.length;u++)if(n._words[c]===i[u]&&!ej.isNullOrUndefined(h.match(new RegExp("\\b"+i[u]+"\\b","gi")))&&h.indexOf(i[u])!==-1){var d=h.indexOf(i[u]),g=i[u].length,f=o.splitText(d),y=document.createElement("span");y.className=r==="validateOnType"?"errorspan "+this.model.misspellWordCss:"errorspan";p=document.createTextNode(i[u]);y.appendChild(p);o.parentNode.insertBefore(y,f);f.data=f.data.substr(g);o=f;w.push(p);w.push(f);h=f.data}}},r.prototype._findRepeatWords=function(n,t,i,r){for(var o,f,s,e,h=/([{^}:[\\.,;><?|@!~`#$%&*()_=+'"])/g,u=n;u<=t.length;u++)if(e=!1,f=t.charAt(u),s=f.charCodeAt(f),e=h.test(f),f==" "||s==160||f==""||e)if(o=t.slice(n,u),o===i[r])break;else n=u+1;return n},r.prototype._spellValidateOnType=function(n){var i,t;if(this.model.enableValidateOnType&&this.model.contextMenuSettings.enable&&(i={events:n,requestType:"validate"},this._trigger("validating",i),this._statusMultiTarget=!1,this._currentActiveElement=i.events.currentTarget,i.events.cancelable===!0)){if(t=n.keyCode,t>=16&&t<=31)return;if(t>=37&&t<=40)return;(t===32||t===13)&&(this._statusMultiTarget=!0,this._triggerSpelling())}},r.prototype._triggerSpelling=function(){var n=this;setTimeout(function(){n.getCursorPosition();n.validate()},2)},r.prototype.getCursorPosition=function(){var n,f,r,i,e,o,u,s,h,c,l;if(this.model.enableValidateOnType&&this.model.contextMenuSettings.enable){if(n=this,f=String.fromCharCode(7),this._controlIds.length>0)for(e=0;e<this._controlIds.length;e++)o=t(this._controlIds[e]),this._isIframe(o)?(r=o[0].contentWindow.getSelection(),i=o[0].contentDocument.createRange()):(r=document.getSelection(),i=document.createRange());else this._isIframe(this.element)?(r=this.element[0].contentWindow.getSelection(),i=this.element[0].contentDocument.createRange()):(r=document.getSelection(),i=document.createRange());if(u=r.getRangeAt(0),u.deleteContents(),this._isIframe(this.element)&&ej.browserInfo().name==="msie"){for(s=this.element[0].contentDocument.createElement("div"),s.innerHTML=f,h=t(this.element[0]).contents()[0].createDocumentFragment();c=s.firstChild;)l=h.appendChild(c);u.insertNode(h)}else u.insertNode(document.createTextNode(f));return t(u.startContainer.parentElement).hasClass("errorspan")&&(this.model.controlsToValidate?n._normalizeTextNodes(this._currentActiveElement):n._normalizeTextNodes(t(n.element)[0])),n._currentCursorTarget=n._getActiveTarget(n,f),i.collapse(!0),i.setStart(n._currentCursorTarget.node,n._currentCursorTarget.offset),i.setEnd(n._currentCursorTarget.node,n._currentCursorTarget.offset),r.removeAllRanges(),r.addRange(i),n._currentCursorTarget}},r.prototype._getActiveTarget=function(n,i){var u,f,e,r;for(u=this.model.enableValidateOnType?n._filterTextNodes(n,this._currentActiveElement):n._filterTextNodes(n,t(n.element)[0]),f=null,e=null,r=0;r<u.length;r++)if(u[r].data.indexOf(i)>-1)return e=u[r],f=u[r].data.indexOf(i),u[r].data=u[r].data.replace(i,""),{node:e,offset:f}},r.prototype.setCursorPosition=function(n){var s,h,v,r,k,o,l,u,f,y,e,w,i,b;if(this._controlIds.length>0)for(i=0;i<this._controlIds.length;i++)v=t(this._controlIds[i]),this._isIframe(v)?(s=v[0].contentDocument.getSelection(),h=v[0].contentDocument.createRange()):(s=document.getSelection(),h=document.createRange());else this._isIframe(this.element)?(s=this.element[0].contentDocument.getSelection(),h=this.element[0].contentDocument.createRange()):(s=document.getSelection(),h=document.createRange());if(s.getRangeAt&&s.rangeCount&&(k=String.fromCharCode(7),n)){for(r=this.model.controlsToValidate?this._filterTextNodes(this,this._currentActiveElement):this._filterTextNodes(this,t(this.element)[0]),o=n.node,l=n.offset,i=0;i<r.length;i++)r[i]===o&&(u=i);if(f="",u===undefined){var a="",c="",p="",f="",d="",g=!1,nt=!1;for(y=0;y<o.length;y++)e=o.data.charAt(y),e.charCodeAt(0)!=160?e.charCodeAt(0)!=32?a===""?f=f+e:e!=k&&(c=c+e):(a=f+e,c=e):(a=f+e,p=" "+p+e);for(f=f+c,d=a.trim(),i=0;i<r.length;i++){if(r[i].data===f&&(u=i),r[i].data===a&&(u=i),r[i].data===c&&c!=""&&(u=i,g=!0),r[i].data===f&&f!=""){u=i;nt=!0;break}if(r[i].data===d&&(u=i),r[i].data===f&&r[i+1]!==undefined&&r[i+1].data.charCodeAt(0)===160){u=i;break}if(r[i].data===f&&r[i+1]!==undefined&&r[i+1].data.charCodeAt(1)===160&&p.length>=1){u=i;break}if((r[i].data===a||r[i].data===f)&&r[i+1]==undefined){w=document.createTextNode("");r.push(w);this._currentActiveElement.appendChild(w);u=i+1;l=r[u].data.length;o=r[u];break}}}for(i=u;i<r.length-1;i++){if(l<=r[i].data.length){o=r[i];break}if(g===!1||c===undefined||c==="")l-=r[i].data.length,o=r[i+1];else{l=1;o=r[i];break}}b=o;h.collapse(!0);h.setStart(b,l);h.setEnd(b,l);s.removeAllRanges();s.addRange(h)}},r.prototype._normalizeTextNodes=function(n){n.normalize();return},r.prototype._filterTextNodes=function(n,t){function r(n){for(var t,u=0;u<n.childNodes.length;u++)t=n.childNodes[u],t.nodeType===3?i.push(t):t.childNodes&&r(t)}var i=[];return r(t),i},r.prototype._removeSpan=function(n){var u,r,i,f;for(n.model.enableValidateOnType&&n._statusMultiTarget?n._currentActiveElement&&(u=n._currentActiveElement):u=!ej.isNullOrUndefined(n.model.controlsToValidate)||n._isIframe(n.element)?n._activeElement:n.element[0],r=t(u).find("span.errorspan"),i=0;i<r.length;i++)f=r[i].innerText||r[i].textContent,t(r[i]).replaceWith(f)},r.prototype._getFilterData=function(n,t){var u=[],i,r;for(t._errorWordsData=t._errorWordDetails=n,i=0;i<t.model.ignoreWords.length;i++)t._filterData(t.model.ignoreWords[i],t);for(r=0;r<t._errorWordsData.length;r++)u.push(t._errorWordsData[r].ErrorWord);return u},r.prototype._filterData=function(n,t){var i=(new ej.Query).where("ErrorWord",ej.FilterOperators.notEqual,n);t._errorWordsData=new ej.DataManager(t._errorWordsData).executeLocal(i)},r.prototype._formHtml=function(n,t){for(var r,u,i=0;i<n.length;i++)r='<span class="errorspan">'+n[i]+"<\/span>",u=new RegExp("\\b"+n[i]+"\\b","gi"),t=t.replace(u,r);return t},r.prototype._listBoxDataUpdate=function(n){var i=n._spellCheckWindow.find(".e-sentence .e-sentencecontent").find(".errorspan"),r,u;t(i[0]).addClass(this.model.misspellWordCss);i.length>0?(r=n._filterSuggestions(n,i[0].innerText),r.length>0?n._dialogSuggestionsUpdate(n,r[0].SuggestedWords):n._suggestionsRequest(n,i,i[0].innerText,"suggestionsUpdate")):!ej.isNullOrUndefined(this.model.controlsToValidate)&&n._targetStatus?n._updateTargetText(n):(u=n._spellCheckWindow.find(".e-sentence .e-sentencecontent")[0].innerHTML,n._validationComplete(n,u))},r.prototype._filterSuggestions=function(n,t){var i=[],r,u;return n._suggestedWordCollection.length>0&&(r=(new ej.Query).where("ErrorWord",ej.FilterOperators.equal,t),u=new ej.DataManager(n._suggestedWordCollection).executeLocal(r),i=u),i},r.prototype._validationComplete=function(n,t){n._updateTargetString(n);var i=ej.isNullOrUndefined(n._activeElement)?n.element:n._activeElement,r={targetElement:i,targetText:t,requestType:"changeErrorWord"};if(this._trigger("complete",r))return!1;n._statusFlag=!1;n._alertWindowRender("validating")},r.prototype._onMenuSelect=function(n){var s=n.events.ID.split("_"),e=!1,l,i,u,a,v,h,c,r,o,f,y,p,w,b;if(ej.isNullOrUndefined(this.model.controlsToValidate))e=this._isIframe(this.element);else for(r=0;r<this._controlIds.length;r++)l=this._isIframe(t(this._controlIds[r])),l&&(e=!0),e&&(this.element=t(this._controlIds[0]));if(i=e?this.element:this._selectedTarget.parentElement,this._activeElement=i,u="",u=this._isIframe(t(i))?t(i).contents().find("body").html():t(i).html().trim(),a={selectedOption:s[0],requestType:"menuSelect",targetContent:u,selectedValue:this._selectedValue},this._trigger("contextClick",a))return!1;switch(s[0]){case"AddToDictionary":v=(this._selectedTarget.innerText||this._selectedTarget.textContent).trim();this._currentElement=t(i);this.addToDictionary(v);break;case"IgnoreAll":if(h=(this._selectedTarget.innerText||this._selectedTarget.textContent).trim(),c=this.ignoreAll(h,u),u=c.resultHTML,t(i).html(c.resultHTML),this._controlIds.length>0)for(r=0;r<this._controlIds.length;r++)for(o=t(this._controlIds[r]),f=0;f<o.length;f++)t(i)[0]!==t(o[f])[0]&&(y=t(o[f]),this._replaceErrorText(y,h));this._renderMenu(this);break;default:p=s[0];w=t(n.element).hasClass("e-errorsuggestions");w&&(this._selectedTarget.innerHTML=p,b=document.createTextNode(this._selectedTarget.innerText||this._selectedTarget.textContent),this._selectedTarget.parentNode.insertBefore(b,this._selectedTarget),t(this._selectedTarget).remove(),u=t(i).html());this._renderMenu(this)}},r.prototype._renderMenu=function(n){var i,r=ej.isNullOrUndefined(n._activeElement)?n.element:n._activeElement,u,e,f;if(n._controlIds.length>0){for(u=0;u<n._controlIds.length;u++)if(i=n._getErrorLength(n,t(n._controlIds[u])),i>0)break}else i=n._getErrorLength(n,t(n.element));if(i===0&&(e={targetElement:r,requestType:"validate"},n._trigger("complete",e)))return!1;n._statusFlag=i>0?!0:!1;f=n._contextMenu.data("ejMenu");t(f.element).is(":visible")&&f.hide();n._isIframe(t(r))?n._bindBeforeOpen(n,t(r).contents().find("body")):n._bindBeforeOpen(n,t(r))},r.prototype._getErrorLength=function(n,i){var r=n._isIframe(i)?t(i).contents().find("body")[0]:t(i);return t(r).find(".errorspan").length},r.prototype._getElement=function(){for(var t=document.getElementsByTagName("span"),r=this._selectedValue,i=[],n=0;n<t.length;n++)t[n].textContent===r&&i.push(t[n]);return i},r.prototype._alertClose=function(){!ej.isNullOrUndefined(this._alertWindow)&&this._alertWindow.parents().find(".e-alertdialog").length>0&&(this._alertWindow.ejDialog("close"),this._alertWindow.parents().find(".e-alertdialog").remove(),this._close())},r.prototype._close=function(){var r,i,n,f,u,e;if(!ej.isNullOrUndefined(this._spellCheckWindow)&&this._spellCheckWindow.parents().find(".e-spellcheck.e-dialog-wrap").length>0){for(r=this._spellCheckWindow.find(".e-sentence .e-sentencecontent"),i=t(r[0]).find("span.errorspan"),n=0;n<i.length;n++)f=i[n].innerText||i[n].textContent,t(i[n]).replaceWith(f);if(this._updateTargetString(this),u=r.html(),e=ej.isNullOrUndefined(this.model.controlsToValidate)?{updatedText:u,requestType:"dialogClose"}:{updatedText:u,targetElement:this._currentTargetElement,requestType:"dialogClose"},this._trigger("dialogClose",e))return!1;this._spellCheckWindow.ejDialog("isOpen")&&this._spellCheckWindow.ejDialog("close");this._spellCheckWindow.parents().find(".e-spellcheck.e-dialog-wrap").remove();this._changeAllWords=[];ej.isNullOrUndefined(this.model.controlsToValidate)||(this._controlIds=this.model.controlsToValidate.split(","),this._subElements=[])}},r.prototype._changeErrorWord=function(n){var u=t("#"+this._id+"_Suggestions").ejListBox("option","value"),e=this._spellCheckWindow.find(".e-sentence .e-sentencecontent"),f=this._spellCheckWindow.find(".e-sentence .e-sentencecontent")[0].innerHTML,r=t(this._spellCheckWindow.find(".e-sentence .e-sentencecontent").find("."+this.model.misspellWordCss)[0]).text().trim(),i;u=u===this._localizedLabels.NoSuggestionMessage?r:u;n.model.text===this._localizedLabels.AddToDictionary?(this._currentElement=t(e),this.addToDictionary(r)):n.model.text===this._localizedLabels.IgnoreOnceButtonText?(i=this.ignore(r,f,null),i!==!1&&this._updateErrorWord(this,i,n,r,null,"ignore")):n.model.text===this._localizedLabels.IgnoreAllButtonText?(i=this.ignoreAll(r,f),i!==!1&&this._updateErrorWord(this,i,n,r,null,"ignoreAll")):n.model.text===this._localizedLabels.ChangeButtonText?(i=this.change(r,f,u,null),i!==!1&&this._updateErrorWord(this,i,n,r,u,"change")):n.model.text===this._localizedLabels.ChangeAllButtonText&&(i=this.changeAll(r,f,u),i!==!1&&this._updateErrorWord(this,i,n,r,u,"changeAll"))},r.prototype._convertData=function(n,t){for(var r=[],i=0;i<n.length;i++)t==="dictionaryData"?r.push({field:n[i]}):t==="menuData"&&r.push({id:n[i],text:n[i]});return r},r.prototype._updateErrorWord=function(n,i,r,u,f,e){var s=this._spellCheckWindow.find(".e-suggesteditems"),o,h,c;n._spellCheckWindow.find(".e-sentence .e-sentencecontent")[0].innerHTML=i.resultHTML;o=this._spellCheckWindow.find(".e-sentence .e-sentencecontent").find(".errorspan");o.length>0?n._targetUpdate(n,o,u,e,f):!ej.isNullOrUndefined(this.model.controlsToValidate)&&n._targetStatus?n._updateTargetText(n):(ej.isNullOrUndefined(this.model.controlsToValidate)||t(this._proElements).html(n._spellCheckWindow.find(".e-sentence .e-sentencecontent")[0].innerHTML),c=[this._localizedLabels.NoSuggestionMessage],s.ejListBox({selectedItemIndex:null}),s.ejListBox({dataSource:this._convertData(c,"dictionaryData"),selectedItemIndex:0}),h=t("#"+this._id+"_Suggestions").data("ejListBox"),h.refresh(),n._validationComplete(n,i.resultHTML))},r.prototype._targetUpdate=function(n,i,r,u,f){var e,c,s,o,h;if(u==="changeAll")for(e=0;e<i.length;e++)c=i[e].innerText||i[e].textContent,c===r&&t(i[e]).replaceWith(f);for(s=0;s<this.model.ignoreWords.length;s++)for(o=0;o<i.length;o++)h=i[o].innerText||i[o].textContent,h===n.model.ignoreWords[s]&&t(i[o]).replaceWith(h);n._listBoxDataUpdate(n)},r.prototype._updateTargetText=function(n){var e,i,r,o,u,s,f,h;if(n._updateTargetString(n),e=ej.isNullOrUndefined(n.model.controlsToValidate)?t("#"+n._id):t(n._proElements),n._proElements=t(n._subElements[0]),n._proElements.length>0||n._subElements.length>0){if(i=t(n._subElements[0]),n._currentTargetElement=i,r=t(i)[0].tagName==="TEXTAREA"||t(i)[0].tagName==="INPUT"?t(i)[0].value:t(i)[0].innerHTML,o={previousElement:e,currentElement:i,targetHtml:r,requestType:"updateText"},n._trigger("targetUpdating",o))return n._close(),!1;n._spellCheckWindow.find(".e-sentence .e-sentencecontent")[0].innerHTML=r;n._subElements=n._subElements.slice(1);u=n._spellCheckWindow.find(".e-sentence .e-sentencecontent");s=n._filteringDiffWords(n,u[0].innerText);n._suggestionsRequest(n,u,s.toString(),"validateByDialog")}else f=n._spellCheckWindow.find(".e-sentence .e-sentencecontent").find(".errorspan"),f.length===0&&n._subElements.length>0?n._updateTargetText(n):f.length>0?(h=n._spellCheckWindow.find(".e-sentence .e-sentencescroller").data("ejScroller"),h.refresh(),n._listBoxDataUpdate(n)):n._completionCheck(n)},r.prototype._updateTargetString=function(n){var i=ej.isNullOrUndefined(n.model.controlsToValidate)?t("#"+n._id):t(n._proElements),r;i.length>0&&(r=n._spellCheckWindow.find(".e-sentence .e-sentencecontent")[0].innerHTML,n._isIframe(i)?i.contents().find("body").html(r):ej.isNullOrUndefined(i[0].value)?i.html(r):i.val(r))},r.prototype._completionCheck=function(n){n._subElements=n._subElements.slice(1);n._subElements.length===0&&(n._targetStatus=!1);n._validationComplete(n,"")},r.prototype._changeAllErrors=function(n){for(var f,i,r=t(n._activeElement).find(".errorspan"),u=0;u<r.length;u++)for(f=r[u].innerText||r[u].textContent,i=0;i<n._changeAllWords.length;i++)f===n._changeAllWords[i].ErrorWord&&t(r[i]).replaceWith(n._changeAllWords[i].ReplaceWord)},r.prototype._setModel=function(n){var e=this,i,r,u,f;for(i in n)if(n.hasOwnProperty(i))switch(i){case"locale":this.model.locale=n[i];this._localizedLabels=ej.getLocalizedConstants("ej.SpellCheck",this.model.locale);break;case"misspellWordCss":if(this.model.misspellWordCss=n[i],this.model.contextMenuSettings.enable)if(ej.isNullOrUndefined(this.model.controlsToValidate))this._changeMisspellWordCss(this.element[0]);else for(r=0;r<this._controlIds.length;r++)this._changeMisspellWordCss(this._controlIds[r]);break;case"contextMenuSettings":t.extend(this.model.contextMenuSettings,n[i]);this.model.contextMenuSettings.enable?(this.validate(),this._renderControls()):(ej.isNullOrUndefined(this._contextMenu)||this._contextMenu.parent().remove(),this._removeSpan(this));break;case"ignoreSettings":t.extend(this.model.ignoreSettings,n[i]);this._ignoreStatus=!1;this._statusFlag=!0;this.model.contextMenuSettings.enable&&(this.validate(),this._renderControls());break;case"dictionarySettings":t.extend(this.model.dictionarySettings,n[i]);break;case"maxSuggestionCount":this.model.maxSuggestionCount=n[i];break;case"ignoreWords":this.model.ignoreWords=n[i];this.model.contextMenuSettings.enable&&this.validate();break;case"controlsToValidate":if(this.model.controlsToValidate=n[i],ej.isNullOrUndefined(this.model.controlsToValidate))for(t(this.element).attr("style","display:block"),u=0;u<this._controlIds.length;u++)f=t(this._controlIds[u]),f.removeClass("e-spellcheck"),f[0].spellcheck=!0,f[0].addEventListener("input",function(){e._statusFlag=!1},!1);this._renderControls();break;case"isResponsive":this.model.isResponsive=n[i];this._renderControls();break;case"enableValidateOnType":this.model.enableValidateOnType=n[i];this._renderControls()}},r.prototype._changeMisspellWordCss=function(n){var i=t(n).find("span.errorspan").attr("class").toString().split(" ")[1];t(n).find("span.errorspan").removeClass(i).addClass(this.model.misspellWordCss)},r.prototype._getModelValues=function(n,t,i){var r={ignoreAlphaNumericWords:n.model.ignoreSettings.ignoreAlphaNumericWords,ignoreEmailAddress:n.model.ignoreSettings.ignoreEmailAddress,ignoreHtmlTags:n.model.ignoreSettings.ignoreHtmlTags,ignoreMixedCaseWords:n.model.ignoreSettings.ignoreMixedCaseWords,ignoreUpperCase:n.model.ignoreSettings.ignoreUpperCase,ignoreUrl:n.model.ignoreSettings.ignoreUrl,ignoreFileNames:n.model.ignoreSettings.ignoreFileNames};return JSON.stringify({requestType:"checkWords",model:r,text:t,additionalParams:ej.isNullOrUndefined(i)?null:i.additionalParams})},r.prototype._getLocalizedLabels=function(){return ej.getLocalizedConstants(this.sfType,this.model.locale)},r.prototype._elementRightClick=function(n){if(!ej.isNullOrUndefined(this._contextMenu)&&!t(n.target).hasClass("e-menulink")){var i=this._contextMenu.data("ejMenu");ej.isNullOrUndefined(i)||t(i.element).is(":visible")&&i.hide()}},r}(ej.WidgetBase);ej.widget("ejSpellCheck","ej.SpellCheck",new i)})(jQuery);ej.SpellCheck.Locale=ej.SpellCheck.Locale||{};ej.SpellCheck.Locale["default"]=ej.SpellCheck.Locale["en-US"]={SpellCheckButtonText:"Spelling:",NotInDictionary:"Not in Dictionary:",SuggestionLabel:"Suggestions:",IgnoreOnceButtonText:"Ignore Once",IgnoreAllButtonText:"Ignore All",AddToDictionary:"Add to Dictionary",ChangeButtonText:"Change",ChangeAllButtonText:"Change All",CloseButtonText:"Close",CompletionPopupMessage:"Spell check is complete",CompletionPopupTitle:"Spell check",Ok:"OK",NoSuggestionMessage:"No suggestions available",NotValidElement:"Specify the valid control id or class name to spell check"}});
|
* A copy of the current license can be obtained at any time by e-mailing
* [email protected]. Any infringement will be prosecuted under
* applicable laws.
*/
|
cipshut.rs
|
use crate::{Error, SerialReadTimeout};
use super::{AtCommand, AtDecode, AtExecute, Decoder};
pub struct Cipshut;
impl AtCommand for Cipshut {
const COMMAND: &'static str = "AT+CIPSHUT";
}
#[derive(Clone, Copy)]
pub enum DisconnectResult {
Failure,
Success,
}
impl AtDecode for DisconnectResult {
fn
|
<B: SerialReadTimeout>(
decoder: &mut Decoder<B>,
timeout_ms: u32,
) -> Result<Self, Error<B::SerialError>> {
let status = match decoder.remainder_str(timeout_ms)? {
"SHUT OK" => DisconnectResult::Success,
"ERROR" => DisconnectResult::Failure,
_ => return Err(crate::Error::DecodingFailed),
};
Ok(status)
}
}
impl AtExecute for Cipshut {
type Output = DisconnectResult;
}
|
decode
|
client.go
|
// Copyright (c) 2015, Segiusz 'q3k' Bazanski <[email protected]>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package crowbar
import (
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"strings"
"sync"
"encoding/base64"
"crypto/hmac"
"crypto/sha256"
)
type ProxyConnection struct {
uuid string
server string
read_buffer []byte
|
read_mutex sync.Mutex
}
func (c *ProxyConnection) Write(b []byte) (int, error) {
url_args := fmt.Sprintf("?uuid=" + c.uuid)
post_args := url.Values{}
post_args.Set("data", base64.StdEncoding.EncodeToString(b))
resp, err := http.PostForm(c.server + EndpointSync + url_args, post_args)
if err != nil {
return 0, err
}
data_bytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return 0, err
}
defer resp.Body.Close()
data := string(data_bytes)
if !strings.HasPrefix(data, PrefixOK) {
msg := fmt.Sprintf("Could not send to server: %s", data)
return 0, errors.New(msg)
}
return len(b), nil
}
func (c *ProxyConnection) FillReadBuffer() error {
args := fmt.Sprintf("?uuid=" + c.uuid)
resp, err := http.Get(c.server + EndpointSync + args)
if err != nil {
return err
}
data_bytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
defer resp.Body.Close()
data := string(data_bytes)
if strings.HasPrefix(data, PrefixData) {
data := data[len(PrefixData):]
decodeLen := base64.StdEncoding.DecodedLen(len(data))
bData := make([]byte, len(c.read_buffer) + decodeLen)
n, err := base64.StdEncoding.Decode(bData[len(c.read_buffer):], []byte(data))
if err != nil {
return err
}
bData = bData[:len(c.read_buffer)+n]
c.read_buffer = bData
} else {
return errors.New("Could not read from server")
}
return nil
}
func (c *ProxyConnection) Read(b []byte) (n int, err error) {
c.read_mutex.Lock()
// If local buffer is empty, get new data
if len(c.read_buffer) == 0 {
err := c.FillReadBuffer()
if err != nil {
c.read_mutex.Unlock()
return 0, err
}
}
// Return local buffer
count := len(b)
if count > len(c.read_buffer){
count = len(c.read_buffer)
}
copy(b, c.read_buffer[:count])
c.read_buffer = c.read_buffer[count:]
c.read_mutex.Unlock()
return count, nil
}
func Connect(server, username, password, remote string) (*ProxyConnection, error) {
if strings.HasSuffix(server, "/") {
server = server[:len(server)-1]
}
conn := ProxyConnection{server: server}
args := fmt.Sprintf("?username=%s", username)
resp, err := http.Get(conn.server + EndpointAuth + args)
if err != nil {
return &ProxyConnection{}, err
}
data_bytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return &ProxyConnection{}, err
}
defer resp.Body.Close()
data := string(data_bytes)
if !strings.HasPrefix(data, PrefixData) {
msg := fmt.Sprintf("crowbar: Invalid data returned by server: %s", data)
return &ProxyConnection{}, errors.New(msg)
}
nonce_b64 := data[len(PrefixData):]
decodeLen := base64.StdEncoding.DecodedLen(len(nonce_b64))
nonce := make([]byte, decodeLen)
n, err := base64.StdEncoding.Decode(nonce, []byte(nonce_b64))
if err != nil {
return &ProxyConnection{}, errors.New("crowbar: Invalid nonce")
}
nonce = nonce[:n]
mac := hmac.New(sha256.New, []byte(password))
mac.Write(nonce)
hmac := mac.Sum(nil)
v := url.Values{}
v.Set("remote_host", strings.Split(remote, ":")[0])
v.Set("remote_port", strings.Split(remote, ":")[1])
v.Set("username", username)
v.Set("proof", base64.StdEncoding.EncodeToString(hmac))
resp, err = http.Get(conn.server + EndpointConnect + "?" + v.Encode())
if err != nil {
return &ProxyConnection{}, err
}
data_bytes, err = ioutil.ReadAll(resp.Body)
if err != nil {
return &ProxyConnection{}, err
}
defer resp.Body.Close()
data = string(data_bytes)
if !strings.HasPrefix(data, PrefixOK) {
return &ProxyConnection{}, errors.New("crowbar: Authentication error")
}
conn.uuid = data[len(PrefixOK):]
return &conn, nil
}
| |
test_imageglobals.py
|
# emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the NiBabel package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
""" Tests for imageglobals module
"""
from nose.tools import (assert_true, assert_false, assert_raises,
assert_equal, assert_not_equal)
from .. import imageglobals as igs
|
def test_errorlevel():
orig_level = igs.error_level
for level in (10, 20, 30):
with igs.ErrorLevel(level):
assert_equal(igs.error_level, level)
assert_equal(igs.error_level, orig_level)
| |
03-multiplexing.go
|
package main
import (
"fmt"
"math/rand"
"time"
)
// docker run --interactive --tty --rm --volume $(pwd):/go golang:1.8 bash
// Generator: function that returns a channel
// Channels are first-class values, just like strings or integers.
func boring(msg string) <-chan string { // Returns receive-only channel of strings.
c := make(chan string)
go func() { // We launch the goroutine from inside the function.
for i := 0; ; i++ {
c <- fmt.Sprintf("%s %d", msg, i)
time.Sleep(time.Duration(rand.Intn(1e2)) * time.Millisecond)
}
}()
return c // Return the channel to the caller.
}
// Multiplexing
// We use a fan-in function to let whosoever is ready talk.
func fanIn(input1, input2 <-chan string) <-chan string { // func fanIn(input1 <-chan string, input2 <-chan string) <-chan string
c := make(chan string)
go func() { for { c <- <-input1 } }()
go func() { for { c <- <-input2 } }()
return c
}
// Channels as a handle on a service
// Our boring function returns a channel that lets us communicate with the boring service it provides.
// We can have more instances of the service.
func main()
|
{
c := fanIn(boring("Joe"), boring("Kit"))
for i := 0; i < 10; i++ {
fmt.Println(<-c)
}
fmt.Println("You're both boring; I'm leaving.")
}
|
|
install.go
|
/*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package install
import (
"stash.appscode.dev/apimachinery/apis/ui/v1alpha1"
"k8s.io/apimachinery/pkg/runtime"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
)
// Install registers the API group and adds types to a scheme
func
|
(scheme *runtime.Scheme) {
utilruntime.Must(v1alpha1.AddToScheme(scheme))
utilruntime.Must(scheme.SetVersionPriority(v1alpha1.SchemeGroupVersion))
}
|
Install
|
bot_v_bot.py
|
import time
# from dlgo.agent import naive
from dlgo.agent import naive
from dlgo import gotypes
from dlgo import goboard_slow as goboard
from dlgo.utils import print_board, print_move
def main():
|
if __name__ == '__main__':
main()
# x = naive.RandomBot()
|
board_size = 9
game = goboard.GameState.new_game(board_size)
bots = {
gotypes.Player.black: naive.RandomBot(),
gotypes.Player.white: naive.RandomBot()
}
while not game.is_over():
time.sleep(0.3)
# print(chr(27) + "[2J")
# print_board(game.board)
bot_move = bots[game.next_player].select_move(game)
print_move(game.next_player, bot_move)
game = game.apply_move(bot_move)
|
codecommit_repository.go
|
package lister
import (
"fmt"
"github.com/aws/aws-sdk-go-v2/service/codecommit"
"github.com/trek10inc/awsets/context"
"github.com/trek10inc/awsets/resource"
)
type AWSCodeCommitRepository struct {
}
func init() {
i := AWSCodeCommitRepository{}
listers = append(listers, i)
}
func (l AWSCodeCommitRepository) Types() []resource.ResourceType {
return []resource.ResourceType{resource.CodeCommitRepository}
}
func (l AWSCodeCommitRepository) List(ctx context.AWSetsCtx) (*resource.Group, error) {
svc := codecommit.NewFromConfig(ctx.AWSCfg)
rg := resource.NewGroup()
err := Paginator(func(nt *string) (*string, error) {
res, err := svc.ListRepositories(ctx.Context, &codecommit.ListRepositoriesInput{
NextToken: nt,
})
if err != nil {
return nil, err
}
for _, id := range res.Repositories {
repo, err := svc.GetRepository(ctx.Context, &codecommit.GetRepositoryInput{
RepositoryName: id.RepositoryName,
})
if err != nil {
return nil, fmt.Errorf("failed to get repository %s: %w", *id.RepositoryId, err)
}
if v := repo.RepositoryMetadata; v != nil {
r := resource.New(ctx, resource.CodeCommitRepository, v.RepositoryId, v.RepositoryName, v)
rg.AddResource(r)
}
}
return res.NextToken, nil
})
|
return rg, err
}
|
|
cache.go
|
package plugin
import (
"encoding/json"
"time"
models "github.com/erdemkosk/go-config-service/api/models"
"github.com/go-redis/redis"
)
var redisClient *redis.Client
func InitializeRedis() {
redisClient = redis.NewClient(&redis.Options{
Addr: GetEnvConfig("REDIS_HOST"),
PoolSize: 100,
MaxRetries: 2,
Password: GetEnvConfig("REDIS_PASSWORD"),
DB: 0,
})
ping, err := redisClient.Ping().Result()
if err == nil && len(ping) > 0 {
println("Connected to Redis")
} else {
println("Redis Connection Failed")
}
}
func GetValue(key string) (models.Config, error) {
var deserializedValue models.Config
serializedValue, err := redisClient.Get(key).Result()
json.Unmarshal([]byte(serializedValue), &deserializedValue)
return deserializedValue, err
}
func SetValue(key string, value interface{}) (bool, error) {
serializedValue, _ := json.Marshal(value)
err := redisClient.Set(key, string(serializedValue), 0).Err()
return true, err
}
func SetValueWithTTL(key string, value interface{}, ttl int) (bool, error) {
serializedValue, _ := json.Marshal(value)
err := redisClient.Set(key, string(serializedValue), time.Duration(ttl)*time.Second).Err()
return true, err
}
func MGet(keys []string) ([]interface{}, error) {
return redisClient.MGet(keys...).Result()
}
func RPush(key string, valueList []string) (bool, error) {
err := redisClient.RPush(key, valueList).Err()
return true, err
}
func
|
(key string, valueList []string, ttl int) (bool, error) {
err := redisClient.RPush(key, valueList, ttl).Err()
return true, err
}
func LRange(key string) (bool, error) {
err := redisClient.LRange(key, 0, -1).Err()
return true, err
}
func ListLength(key string) int64 {
return redisClient.LLen(key).Val()
}
func Publish(channel string, message string) {
redisClient.Publish(channel, message)
}
func GetKeyListByPattern(pattern string) []string {
return redisClient.Keys(pattern).Val()
}
func IncrementValue(key string) int64 {
return redisClient.Incr(key).Val()
}
func DelKey(key string) error {
return redisClient.Del(key).Err()
}
|
RpushWithTTL
|
suite_test.go
|
package ber
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"math"
"testing"
)
var errEOF = io.ErrUnexpectedEOF.Error()
// Tests from http://www.strozhevsky.com/free_docs/free_asn1_testsuite_descr.pdf
// Source files and descriptions at http://www.strozhevsky.com/free_docs/TEST_SUITE.zip
var testCases = []struct {
// File contains the path to the BER-encoded file
File string
// Error indicates whether a decoding error is expected
Error string
// AbnormalEncoding indicates whether a normalized re-encoding is expected to differ from the original source
AbnormalEncoding bool
// IndefiniteEncoding indicates the source file used indefinite-length encoding, so the re-encoding is expected to differ (since the length is known)
IndefiniteEncoding bool
}{
// Common blocks
{File: "tests/tc1.ber", Error: "high-tag-number tag overflow"},
{File: "tests/tc2.ber", Error: errEOF},
{File: "tests/tc3.ber", Error: errEOF},
{File: "tests/tc4.ber", Error: "invalid length byte 0xff"},
{File: "tests/tc5.ber", Error: "", AbnormalEncoding: true},
// Real numbers (some expected failures are disabled until support is added)
{File: "tests/tc6.ber", Error: "REAL value +0 must be encoded with zero-length value block"},
{File: "tests/tc7.ber", Error: "REAL value -0 must be encoded as a special value"},
{File: "tests/tc8.ber", Error: `encoding of "special value" must not contain exponent and mantissa`},
{File: "tests/tc9.ber", Error: "bits 6 and 5 of information octet for REAL are equal to 11"},
{File: "tests/tc10.ber", Error: ""},
{File: "tests/tc11.ber", Error: "incorrect NR form"},
{File: "tests/tc12.ber", Error: `encoding of "special value" not from ASN.1 standard`},
{File: "tests/tc13.ber", Error: errEOF},
{File: "tests/tc14.ber", Error: errEOF},
{File: "tests/tc15.ber", Error: "too big value of exponent"},
{File: "tests/tc16.ber", Error: "too big value of mantissa"},
{File: "tests/tc17.ber", Error: "too big value of exponent"}, // Error: "Too big values for exponent and mantissa + using of "scaling factor" value"
// Integers
{File: "tests/tc18.ber", Error: ""},
{File: "tests/tc19.ber", Error: errEOF},
{File: "tests/tc20.ber", Error: ""},
// Object identifiers
{File: "tests/tc21.ber", Error: ""},
{File: "tests/tc22.ber", Error: ""},
{File: "tests/tc23.ber", Error: errEOF},
{File: "tests/tc24.ber", Error: ""},
// Booleans
{File: "tests/tc25.ber", Error: ""},
{File: "tests/tc26.ber", Error: ""},
{File: "tests/tc27.ber", Error: errEOF},
{File: "tests/tc28.ber", Error: ""},
{File: "tests/tc29.ber", Error: ""},
// Null
{File: "tests/tc30.ber", Error: ""},
{File: "tests/tc31.ber", Error: errEOF},
{File: "tests/tc32.ber", Error: ""},
// Bit string (some expected failures are disabled until support is added)
{File: "tests/tc33.ber", Error: ""}, // Error: "Too big value for "unused bits""
{File: "tests/tc34.ber", Error: errEOF},
{File: "tests/tc35.ber", Error: "", IndefiniteEncoding: true}, // Error: "Using of different from BIT STRING types as internal types for constructive encoding"
{File: "tests/tc36.ber", Error: "", IndefiniteEncoding: true}, // Error: "Using of "unused bits" in internal BIT STRINGs with constructive form of encoding"
{File: "tests/tc37.ber", Error: ""},
{File: "tests/tc38.ber", Error: "", IndefiniteEncoding: true},
{File: "tests/tc39.ber", Error: ""},
{File: "tests/tc40.ber", Error: ""},
// Octet string (some expected failures are disabled until support is added)
{File: "tests/tc41.ber", Error: "", IndefiniteEncoding: true}, // Error: "Using of different from OCTET STRING types as internal types for constructive encoding"
{File: "tests/tc42.ber", Error: errEOF},
{File: "tests/tc43.ber", Error: errEOF},
{File: "tests/tc44.ber", Error: ""},
{File: "tests/tc45.ber", Error: ""},
// Bit string
{File: "tests/tc46.ber", Error: "indefinite length used with primitive type"},
{File: "tests/tc47.ber", Error: "eoc child not allowed with definite length"},
{File: "tests/tc48.ber", Error: "", IndefiniteEncoding: true}, // Error: "Using of more than 7 "unused bits" in BIT STRING with constrictive encoding form"
{File: "tests/tc49.ber", Error: ""},
{File: "tests/tc50.ber", Error: is64bit("length cannot be less than -1", "long-form length overflow")},
{File: "tests/tc51.ber", Error: is64bit(fmt.Sprintf("length 206966894640 greater than maximum %v", MaxPacketLengthBytes), "long-form length overflow")},
}
func is64bit(a, b string) string {
maxInt64 := int64(math.MaxInt64)
length := int(maxInt64)
if int64(length) != maxInt64 {
return b
}
return a
}
func TestSuiteDecodePacket(t *testing.T) {
// Debug = true
for _, tc := range testCases {
file := tc.File
dataIn, err := ioutil.ReadFile(file)
if err != nil {
t.Errorf("%s: %v", file, err)
continue
}
// fmt.Printf("%s: decode %d\n", file, len(dataIn))
packet, err := DecodePacketErr(dataIn)
if err != nil {
if tc.Error == "" {
t.Errorf("%s: unexpected error during DecodePacket: %v", file, err)
} else if tc.Error != err.Error() {
t.Errorf("%s: expected error %q during DecodePacket, got %q", file, tc.Error, err)
}
continue
}
if tc.Error != "" {
|
}
dataOut := packet.Bytes()
if tc.AbnormalEncoding || tc.IndefiniteEncoding {
// Abnormal encodings and encodings that used indefinite length should re-encode differently
if bytes.Equal(dataOut, dataIn) {
t.Errorf("%s: data should have been re-encoded differently", file)
}
} else if !bytes.Equal(dataOut, dataIn) {
// Make sure the serialized data matches the source
t.Errorf("%s: data should be the same\nwant: %#v\ngot: %#v", file, dataIn, dataOut)
}
packet, err = DecodePacketErr(dataOut)
if err != nil {
t.Errorf("%s: unexpected error: %v", file, err)
continue
}
// Make sure the re-serialized data matches our original serialization
dataOut2 := packet.Bytes()
if !bytes.Equal(dataOut, dataOut2) {
t.Errorf("%s: data should be the same\nwant: %#v\ngot: %#v", file, dataOut, dataOut2)
}
}
}
func TestSuiteReadPacket(t *testing.T) {
for _, tc := range testCases {
file := tc.File
dataIn, err := ioutil.ReadFile(file)
if err != nil {
t.Errorf("%s: %v", file, err)
continue
}
buffer := bytes.NewBuffer(dataIn)
packet, err := ReadPacket(buffer)
if err != nil {
if tc.Error == "" {
t.Errorf("%s: unexpected error during ReadPacket: %v", file, err)
} else if tc.Error != err.Error() {
t.Errorf("%s: expected error %q during ReadPacket, got %q", file, tc.Error, err)
}
continue
}
if tc.Error != "" {
t.Errorf("%s: expected error %q, got none", file, tc.Error)
continue
}
dataOut := packet.Bytes()
if tc.AbnormalEncoding || tc.IndefiniteEncoding {
// Abnormal encodings and encodings that used indefinite length should re-encode differently
if bytes.Equal(dataOut, dataIn) {
t.Errorf("%s: data should have been re-encoded differently", file)
}
} else if !bytes.Equal(dataOut, dataIn) {
// Make sure the serialized data matches the source
t.Errorf("%s: data should be the same\nwant: %#v\ngot: %#v", file, dataIn, dataOut)
}
packet, err = DecodePacketErr(dataOut)
if err != nil {
t.Errorf("%s: unexpected error: %v", file, err)
continue
}
// Make sure the re-serialized data matches our original serialization
dataOut2 := packet.Bytes()
if !bytes.Equal(dataOut, dataOut2) {
t.Errorf("%s: data should be the same\nwant: %#v\ngot: %#v", file, dataOut, dataOut2)
}
}
}
|
t.Errorf("%s: expected error %q, got none", file, tc.Error)
continue
|
setup.py
|
# Packages up pygw so it's pip-installable
from setuptools import setup, find_packages
with open('README.md', 'r') as fh:
long_description = fh.read()
def get_version():
|
setup(
name='pygw',
author='GeoWave Contributors',
author_email='[email protected]',
description='GeoWave bindings for Python3',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://locationtech.github.io/geowave/',
project_urls={
'Documentation': 'https://locationtech.github.io/geowave/pydocs/',
'Source': 'https://github.com/locationtech/geowave/tree/master/python/src/main/python',
},
version=get_version(),
packages=find_packages(),
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
],
install_requires=['py4j==0.10.8.1', 'shapely==1.7'],
python_requires='>=3,<3.8' # py4j does not support python 3.8 yet
)
|
try:
from maven_version import get_maven_version
version = get_maven_version()
except ModuleNotFoundError:
# If maven version isn't found, it must be from the distribution
from pkg_resources import get_distribution
from pkg_resources import DistributionNotFound
version = get_distribution('pygw').version
return version
|
main.py
|
"""Tests for the main module."""
import unittest
from unittest.mock import Mock, patch
from yala.main import LinterRunner
class TestLinterRunner(unittest.TestCase):
"""Test the LinterRunner class."""
@patch('yala.main.Config')
def test_chosen_not_found(self, mock_config):
"""Should print an error when chosen linter is not found."""
# Linter chosen by the user
name = 'my linter'
mock_config.user_linters = [name]
_, stderr = self._path_and_run(mock_config, name)
self.assertIn('Did you install', stderr[0])
@patch('yala.main.Config')
def test_not_chosen_not_found(self, mock_config):
|
def _path_and_run(self, mock_config, name='my linter'):
cls = self._mock_linter_class(name)
mock_config.get_linter_classes.return_value = [cls]
with patch('yala.main.subprocess.run', side_effect=FileNotFoundError):
linter_cfg_tgts = cls, mock_config, []
return LinterRunner.run(linter_cfg_tgts)
@staticmethod
def _mock_linter_class(name):
linter_class = Mock()
linter = linter_class.return_value
linter.command_with_options = linter.name = name
return linter_class
|
"""Should not print an error when chosen linter is not found."""
# No linters chosen by the user
mock_config.user_linters = []
stdout, stderr = self._path_and_run(mock_config)
self.assertEqual(0, len(stdout))
self.assertEqual(0, len(stderr))
|
image.py
|
from project.fileviews.base import FileView
class
|
(FileView):
"""
Class for displaying image files.
"""
def render(self, request):
return super().render(request, 'project/file_view_image.html')
|
ImageFileView
|
test_cursor.py
|
def test_cursor_triggers_cursor_in_the_connection(open_connection):
|
def test_cursor_returns_a_cursor_in_the_handler(open_connection, mocker):
cursor_mock = mocker.Mock()
open_connection._connection_handler.cursor.return_value = cursor_mock
assert open_connection.cursor() == cursor_mock
|
open_connection.cursor()
open_connection._connection_handler.cursor.assert_called_once()
|
test_qidoc_clean.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2019 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" QiBuild """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
def
|
(qidoc_action):
""" Test Simple """
world_proj = qidoc_action.add_test_project("world")
build_dir = os.path.join(world_proj.path, "build-doc")
assert not os.path.exists(build_dir)
qidoc_action("build", "world")
assert os.path.exists(build_dir)
qidoc_action("clean", "world")
assert os.path.exists(build_dir)
qidoc_action("clean", "world", "--force")
assert not os.path.exists(build_dir)
|
test_simple
|
ExamSettingPanel.tsx
|
import { defineComponent } from 'vue';
import { createWorkerSetting } from '..';
import { useSettings } from '../../store';
import { CommonWorkSettingPanel } from './CommonWorkSettingPanel';
export const ExamSettingPanel = defineComponent({
setup () {
const settings = useSettings().zhs.exam;
return () => (
<div class="ocs-setting-panel">
<div class="ocs-setting-items">
<CommonWorkSettingPanel
settings={settings}
v-slots={{
upload: createWorkerSetting(
'自动答题',
{
selected: 'close',
options: [
{
label: '请自行检查后自行点击提交',
|
value: 'close'
}
]
},
(e: any) => (settings.upload = e.target.value)
)
}}>
</CommonWorkSettingPanel>
</div>
</div>
);
}
});
| |
test.rs
|
// Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
use snapshot::Snapshot;
use versionize::{VersionMap, Versionize, VersionizeError, VersionizeResult};
use versionize_derive::Versionize;
#[derive(Debug, PartialEq, Versionize)]
pub enum TestState {
Zero,
One(u32),
#[version(start = 2, default_fn = "default_state_two")]
Two(u64),
}
impl TestState {
fn default_state_two(&self, target_version: u16) -> VersionizeResult<TestState> {
match target_version {
1 => Ok(TestState::One(2)),
i => Err(VersionizeError::Serialize(format!(
"Unknown target version: {}",
i
))),
}
}
}
#[derive(Debug, PartialEq, Versionize)]
pub struct A {
a: u32,
#[version(start = 1, end = 2)]
b: Option<TestState>,
#[version(start = 2, default_fn = "default_c")]
c: String,
}
impl A {
fn default_c(_source_version: u16) -> String {
"some_string".to_owned()
}
}
#[test]
fn test_hardcoded_snapshot_deserialization() {
// We are testing representation compatibility between versions, at the `snapshot` crate
// level, by checking that only the version number and the newly added/removed fields changes
// between versions are reflected in the hardcoded snapshot.
#[rustfmt::skip]
let v1_hardcoded_snapshot: &[u8] = &[
// This blob is consisted of the following: magic_id (8 bytes),
0x01, 0x00,
#[cfg(target_arch = "aarch64")]
0xAA,
#[cfg(target_arch = "aarch64")]
|
0x64,
#[cfg(target_arch = "x86_64")]
0x86,
0x84, 0x19, 0x10, 0x07,
// target version (2 bytes) +
0x01, 0x00,
// `a` field +
0x10, 0x00, 0x00, 0x00,
// `b` field: Option variant type (1 byte) + inner enum variant type (4 bytes)
// + inner enum value (4 bytes).
0x01, 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
];
#[rustfmt::skip]
let v2_hardcoded_snapshot: &[u8] = &[
0x01,
0x00,
#[cfg(target_arch = "aarch64")]
0xAA,
#[cfg(target_arch = "aarch64")]
0xAA,
#[cfg(target_arch = "x86_64")]
0x64,
#[cfg(target_arch = "x86_64")]
0x86, 0x84, 0x19, 0x10, 0x07,
// Version 2 +
0x02, 0x00,
// `a` field +
0x10, 0x00, 0x00, 0x00,
// `c` field: String len (8 bytes) + actual String; the Option field is not available at v2.
0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x72, 0x61, 0x6E, 0x64, 0x6F, 0x6D,
];
let mut vm = VersionMap::new();
vm.new_version()
.set_type_version(A::type_id(), 2)
.set_type_version(TestState::type_id(), 2);
let mut snapshot_blob = v1_hardcoded_snapshot;
let mut restored_struct: A = Snapshot::load(&mut snapshot_blob, vm.clone()).unwrap();
let mut expected_struct = A {
a: 16u32,
b: Some(TestState::One(2)),
c: "some_string".to_owned(),
};
assert_eq!(restored_struct, expected_struct);
snapshot_blob = v2_hardcoded_snapshot;
restored_struct = Snapshot::load(&mut snapshot_blob, vm.clone()).unwrap();
expected_struct = A {
a: 16u32,
b: None,
c: "random".to_owned(),
};
assert_eq!(restored_struct, expected_struct);
}
|
0xAA,
#[cfg(target_arch = "x86_64")]
|
complex.rs
|
use std::{error::Error, path::Path, sync::Arc};
use lumiere::{
bvh::BVHNode, camera, image, material, object, scene::Scene, texture, Colour, Point3,
};
use rand::{rngs, Rng, SeedableRng};
fn
|
() -> Result<(), Box<dyn Error>> {
let mut rng = rngs::SmallRng::from_rng(rand::thread_rng()).unwrap();
// Image parameters
const ASPECT_RATIO: f64 = 16. / 9.;
const IMAGE_WIDTH: usize = 1024;
const IMAGE_HEIGHT: usize = (IMAGE_WIDTH as f64 / ASPECT_RATIO) as usize;
let samples_per_pixel: usize = 5000;
let max_depth = 50;
// Pixel array as height * rows * channels 8 bit values
const BUFFER_LENGTH: usize = 3 * IMAGE_WIDTH * IMAGE_HEIGHT;
let mut pixels = vec![0_u8; BUFFER_LENGTH];
// Generate the objects
// Camera
let camera_look_dir = Point3::new(-13., -2., -3.);
let camera = camera::CameraBuilder::new()
.origin(Point3::new(13., 2., 3.))
.look_dir(camera_look_dir)
.fov(20.)
.aspect_ratio(ASPECT_RATIO)
.aperture(0.1)
.focus_dist(10.)
.build();
// World
let mut world = object::HittableList::new();
// Ground
let checker = Arc::new(texture::CheckerTexture::from_colours(
0.32,
Colour::new(0.2, 0.3, 0.1),
Colour::new(0.9, 0.9, 0.9),
));
let material_ground = Arc::new(material::Lambertian::new(checker));
world.add(Arc::new(object::Sphere::new(
Point3::new(0., -1000., 0.),
1000.,
material_ground,
)));
// Random small balls
for a in -11..11 {
for b in -11..11 {
let choose_mat: f64 = rng.gen();
let centre = Point3::new(
a as f64 + 0.9 * choose_mat,
0.2,
b as f64 + 0.9 * choose_mat,
);
if (centre - Point3::new(4., 0.2, 0.)).length() > 0.9 {
match choose_mat {
a if a < 0.8 => {
// diffuse
let albedo = Colour::random_in_range_inclusive(&mut rng, 0.0..=1.0);
let sphere_material = Arc::new(material::Lambertian::from_colour(albedo));
let centre_1 = centre; // + Vec3::new(0., rng.gen_range(0.0..=0.5), 0.);
world.add(Arc::new(object::MovingSphere::new(
centre,
centre_1,
0.2,
sphere_material,
)));
}
a if a < 0.95 => {
// metal
let albedo = Colour::random_in_range_inclusive(&mut rng, 0.5..=1.0);
let fuzziness: f64 = rng.gen_range(0.0..0.5);
let sphere_material = Arc::new(material::Metal::new(albedo, fuzziness));
world.add(Arc::new(object::Sphere::new(centre, 0.2, sphere_material)));
}
_ => {
// glass
let sphere_material = Arc::new(material::Dielectric::new(1.5));
world.add(Arc::new(object::Sphere::new(centre, 0.2, sphere_material)));
}
}
}
}
}
let material_1 = Arc::new(material::Dielectric::new(1.5));
world.add(Arc::new(object::Sphere::new(
Point3::new(0., 1., 0.),
1.,
material_1,
)));
let material_2 = Arc::new(material::Lambertian::from_colour(Colour::new(
0.4, 0.2, 0.1,
)));
world.add(Arc::new(object::Sphere::new(
Point3::new(-4., 1., 0.),
1.,
material_2,
)));
let material_3 = Arc::new(material::Metal::new(Colour::new(0.7, 0.6, 0.5), 0.0));
world.add(Arc::new(object::Sphere::new(
Point3::new(4., 1., 0.),
1.,
material_3,
)));
// Generate BVH tree
let mut bvh_root = object::HittableList::new();
bvh_root.add(Arc::new(BVHNode::new(world, &mut rng)));
// Create scene
let scene = Scene::new(
bvh_root,
camera,
max_depth,
samples_per_pixel,
IMAGE_WIDTH,
IMAGE_HEIGHT,
Colour::new(0.7, 0.8, 1.),
);
// Render the scene to a frame buffer
scene.render(&mut pixels)?;
// Write the frame buffer to a file
image::png::write_image::<&Path, IMAGE_WIDTH, IMAGE_HEIGHT>(&pixels, Path::new("image.png"))?;
eprintln!("Saved image");
Ok(())
}
|
main
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.