file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
misc.py
# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous utility functions """ from __future__ import (print_function, unicode_literals, division, absolute_import) from builtins import next, str import sys import re from collections import Iterator from distutils.version import LooseVersion import numpy as np from future.utils import raise_from from future import standard_library try: from textwrap import indent as textwrap_indent except ImportError: def textwrap_indent(text, prefix): """ A textwrap.indent replacement for Python < 3.3 """ if not prefix: return text splittext = text.splitlines(True) return prefix + prefix.join(splittext) standard_library.install_aliases() def human_order_sorted(l): """Sorts string in human order (i.e. 'stat10' will go after 'stat2')""" def atoi(text): return int(text) if text.isdigit() else text def natural_keys(text): if isinstance(text, tuple): text = text[0] return [atoi(c) for c in re.split('(\d+)', text)] return sorted(l, key=natural_keys) def trim(docstring, marker=None): if isinstance(docstring, bytes): docstring = str(docstring, 'utf-8') if not docstring: return '' # Convert tabs to spaces (following the normal Python rules) # and split into a list of lines: lines = docstring.expandtabs().splitlines() # Determine minimum indentation (first line doesn't count): indent = sys.maxsize for line in lines[1:]: stripped = line.lstrip() if stripped: indent = min(indent, len(line) - len(stripped)) # Remove indentation (first line is special): trimmed = [lines[0].strip()] if indent < sys.maxsize: for line in lines[1:]: # replace existing REST marker with doc level marker stripped = line.lstrip().strip().rstrip() if marker is not None and stripped and \ all([s == stripped[0] for s in stripped]) and \ stripped[0] not in [':']: line = line.replace(stripped[0], marker) trimmed.append(line[indent:].rstrip()) # Strip off trailing and leading blank lines: while trimmed and not trimmed[-1]: trimmed.pop() while trimmed and not trimmed[0]: trimmed.pop(0) # Return a single string: return '\n'.join(trimmed) def find_indices(condition): "Return the indices where ravel(condition) is true" res, = np.nonzero(np.ravel(condition)) return res def is_container(item): """Checks if item is a container (list, tuple, dict, set) Parameters ---------- item : object object to check for .__iter__ Returns ------- output : Boolean True if container False if not (eg string) """ if isinstance(item, str): return False elif hasattr(item, '__iter__'): return True else: return False def container_to_string(cont): """Convert a container to a command line string. Elements of the container are joined with a space between them, suitable for a command line parameter. If the container `cont` is only a sequence, like a string and not a container, it is returned unmodified. Parameters ---------- cont : container A container object like a list, tuple, dict, or a set. Returns ------- cont_str : string Container elements joined into a string. """ if hasattr(cont, '__iter__') and not isinstance(cont, str): cont = ' '.join(cont) return str(cont) # Dependency checks. Copied this from Nipy, with some modificiations # (added app as a parameter). def package_check(pkg_name, version=None, app=None, checker=LooseVersion, exc_failed_import=ImportError, exc_failed_check=RuntimeError): """Check that the minimal version of the required package is installed. Parameters ---------- pkg_name : string Name of the required package. version : string, optional Minimal version number for required package. app : string, optional Application that is performing the check. For instance, the name of the tutorial being executed that depends on specific packages. Default is *Nipype*. checker : object, optional The class that will perform the version checking. Default is distutils.version.LooseVersion. exc_failed_import : Exception, optional Class of the exception to be thrown if import failed. exc_failed_check : Exception, optional Class of the exception to be thrown if version check failed. Examples -------- package_check('numpy', '1.3') package_check('scipy', '0.7', 'tutorial1') """ if app: msg = '%s requires %s' % (app, pkg_name) else: msg = 'Nipype requires %s' % pkg_name if version: msg += ' with version >= %s' % (version, ) try: mod = __import__(pkg_name) except ImportError as e: raise_from(exc_failed_import(msg), e) if not version: return try: have_version = mod.__version__ except AttributeError as e: raise_from( exc_failed_check('Cannot find version for %s' % pkg_name), e) if checker(have_version) < checker(version): raise exc_failed_check(msg) def str2bool(v): if isinstance(v, bool): return v lower = v.lower() if lower in ("yes", "true", "t", "1"): return True elif lower in ("no", "false", "n", "f", "0"): return False else: raise ValueError("%s cannot be converted to bool" % v) def flatten(S): if S == []: return S if isinstance(S[0], list): return flatten(S[0]) + flatten(S[1:]) return S[:1] + flatten(S[1:]) def unflatten(in_list, prev_structure): if not isinstance(in_list, Iterator): in_list = iter(in_list) if not isinstance(prev_structure, list): return next(in_list) out = [] for item in prev_structure: out.append(unflatten(in_list, item)) return out def normalize_mc_params(params, source): """ Normalize a single row of motion parameters to the SPM format. SPM saves motion parameters as: x Right-Left (mm) y Anterior-Posterior (mm) z Superior-Inferior (mm) rx Pitch (rad) ry Yaw (rad) rz Roll (rad) """ if source.upper() == 'FSL': params = params[[3, 4, 5, 0, 1, 2]] elif source.upper() in ('AFNI', 'FSFAST'): params = params[np.asarray([4, 5, 3, 1, 2, 0]) + (len(params) > 6)] params[3:] = params[3:] * np.pi / 180. elif source.upper() == 'NIPY': from nipy.algorithms.registration import to_matrix44, aff2euler matrix = to_matrix44(params) params = np.zeros(6) params[:3] = matrix[:3, 3] params[-1:2:-1] = aff2euler(matrix) return params def dict_diff(dold, dnew, indent=0): """Helper to log what actually changed from old to new values of dictionaries. typical use -- log difference for hashed_inputs """ # First check inputs, since they usually are lists of tuples # and dicts are required. if isinstance(dnew, list): dnew = dict(dnew) if isinstance(dold, list): dold = dict(dold) # Compare against hashed_inputs # Keys: should rarely differ new_keys = set(dnew.keys()) old_keys = set(dold.keys()) diff = [] if new_keys - old_keys: diff += [" * keys not previously seen: %s" % (new_keys - old_keys)] if old_keys - new_keys: diff += [" * keys not presently seen: %s" % (old_keys - new_keys)] # Add topical message if diff: diff.insert(0, "Dictionaries had differing keys:") diffkeys = len(diff) # Values in common keys would differ quite often, # so we need to join the messages together for k in new_keys.intersection(old_keys): same = False try: new, old = dnew[k], dold[k] same = new == old if not same:
same = old.__class__(new) == old except Exception: same = False if not same: diff += [" * %s: %r != %r" % (k, dnew[k], dold[k])] if len(diff) > diffkeys: diff.insert(diffkeys, "Some dictionary entries had differing values:") return textwrap_indent('\n'.join(diff), ' ' * indent)
# Since JSON does not discriminate between lists and # tuples, we might need to cast them into the same type # as the last resort. And lets try to be more generic
courses.js
'use strict'; const express = require('express'); const router = express.Router(); let Courses = require("../models").Courses; let Users = require("../models").Users; let sequelize = require("../models").sequelize; const { check, validationResult } = require('express-validator/check'); const bcryptjs = require('bcryptjs'); const atob = require('atob'); //getCredentials returns the user's credentials from the authorization header const getCredentials = (req) => { let credentials = null; const authHeaderVal = req.get('Authorization'); //if authorization is not empty and starts with the string 'basic ' then if (authHeaderVal && authHeaderVal.startsWith('Basic ')) { //the username and password are set to base64cred const base64Credentials = authHeaderVal.slice(6); //the username and password are converted from base64 ascii to binary const stringCredentials = atob(base64Credentials); //the username and password are split into an array const partsCredentials = stringCredentials.split(':'); //the credentials are set credentials = { email: partsCredentials[0], pass: partsCredentials[1], }; } return credentials; }; //returns list of all courses router.get('/courses', (req, res) => { Courses.findAll({ include: [ { model: Users, //Only send required attributes attributes: ["firstName", "lastName","emailAddress"] } ] }).then((courses)=>{ res.json({ courses }); }); }); //returns courses by id router.get('/courses/:id', (req, res, next) => { Courses.findOne({ where:{ id:req.params.id }, include: [ { model: Users, //Only send required attributes attributes: ["firstName", "lastName","emailAddress"] } ] }). then((course) => { //if it is found then is is returned if(course) { res.json({ course }); } else{ next(); } }); }); //posts a new course router.post('/courses', [ check('title') .exists({ checkNull: true, checkFalsy: true }) .withMessage('Please provide a value for title'), check('description') .exists({ checkNull: true, checkFalsy: true }) .withMessage('Please provide a value for description'), ], (req, res) => { // Attempt to get the validation result from the Request object. const courseErrors = validationResult(req); //user credentials are acquired from auth header const credentials = getCredentials(req); let newCourse = req.body; const checkTitleAndDescription = new Promise((resolve, reject) => { if(!courseErrors.isEmpty()) { const courseErrorMessages = courseErrors.array().map(error => error.msg); reject(Error(courseErrorMessages)); } else { console.log('Title and Description: passed'); resolve(); } }); const checkEmailAndPasswordProvided = new Promise((resolve, reject) => { if(credentials.pass && credentials.email) { console.log('Email Address and Password are present: passed'); resolve(); } else{ reject(Error('The Email Address/Password were not provided.')); } }); function checkEmailInDatabase(user) { return new Promise((resolve, reject) => { if(user == null) { reject(Error('The Email Address was not found.')); } else{ //validates if user is in database console.log('Email Address is present: passed'); resolve(user); } }); } function checkPassword(user){ return new Promise((resolve, reject) =>{ //the user password is compared with the database password const authenticated = bcryptjs.compareSync(credentials.pass, user[0].password); if(authenticated){ console.log('Password is a match: passed'); resolve(user); } else{ reject(Error('The Password is invalid.')); } }); } checkTitleAndDescription .then(()=>{return checkEmailAndPasswordProvided}) .then(()=>{return Users.findAll({ where: { emailAddress: credentials.email } }); }).then((user)=>{return checkEmailInDatabase(user); }) .then((user)=>{return checkPassword(user); }) .then((user)=>{ return Courses.build({ title: newCourse.title, description: newCourse.description, estimatedTime: newCourse.estimatedTime, materialsNeeded: newCourse.materialsNeeded, userId: user[0].id }).save() }).then((course)=>{ return res.redirect(201, '/courses/:course.id'); }) .catch((err)=>{ //err = 'There was an error processing your request.'; console.warn(err); return res.status(401).json({ error: err.message }); }); }); //updates a user's course router.put('/courses/:id', [ check('title') .exists({ checkNull: true, checkFalsy: true }) .withMessage('Please provide a value for title'), check('description') .exists({ checkNull: true, checkFalsy: true }) .withMessage('Please provide a value for description'), ], (req, res) => { // Attempt to get the validation result from the Request object. const courseErrors = validationResult(req); //user credentials are acquired from auth header const credentials = getCredentials(req); let newCourse = req.body; const checkTitleAndDescription = new Promise((resolve, reject) => { if(!courseErrors.isEmpty()) { const courseErrorMessages = courseErrors.array().map(error => error.msg); reject(Error(courseErrorMessages)); } else { console.log('Title and Description: passed'); resolve(); } }); const checkEmailAndPasswordProvided = new Promise((resolve, reject) => { if(credentials.pass && credentials.email) { console.log('Email Address and Password are present: passed'); resolve(); } else{ reject(Error('Email Address and Password are present: failed.')); } }); function checkEmailInDatabase(user) { return new Promise((resolve, reject) => { if(user == null) { reject(Error('Email Address is present: failed')); } else{ //validates if user is in database console.log('Email Address is present: passed'); resolve(user); } }); } function checkPassword(user){ return new Promise((resolve, reject) =>{ //the user password is compared with the database password const authenticated = bcryptjs.compareSync(credentials.pass, user[0].password); if(authenticated){ console.log('Password is a match: passed'); resolve(user); } else{ reject(Error('Password is a match: failed.')); } }); } function getCourseId(){ return Courses.findByPk(req.params.id); } //function to check if the course.userid matchs the user's id function checkCourseIDandUserID(courseAndUser){ return new Promise((resolve, reject) => { if(courseAndUser[0].userId === courseAndUser[1][0].id){ console.log('User and Course ID match: passed'); resolve(courseAndUser[0]); } else{ reject(Error('User and Course Id match: failed')); } }); } checkTitleAndDescription .then(()=>{return checkEmailAndPasswordProvided}) .then(()=>{return Users.findAll({ where: { emailAddress: credentials.email } }); }).then((user)=>{return checkEmailInDatabase(user); }) .then((user)=>{return checkPassword(user); }) .then((user)=>{ let courseId = getCourseId(); return Promise.all([courseId, user]); }).then((courseAndUser)=>{ return checkCourseIDandUserID(courseAndUser); }) .then((course)=>{ course.update(req.body); return res.status(204).json({ message: '' }); }) .catch((err)=>{ //err = 'There was an error processing your request.'; console.warn(err); return res.status(401).json({ error: err.message }); }); }); //delete's a user's course router.delete('/courses/:id', (req, res) => { //user credentials are acquired from auth header const credentials = getCredentials(req); const checkEmailAndPasswordProvided = new Promise((resolve, reject) => { if(credentials.pass && credentials.email) { console.log('Email Address and Password are present: passed'); resolve(); } else{ reject(Error('Email Address and Password are present: failed.')); } }); function checkEmailInDatabase(user) { return new Promise((resolve, reject) => { if(user == null) { reject(Error('Email Address is present: failed')); } else{ //validates if user is in database console.log('Email Address is present: passed'); resolve(user); } }); } function checkPassword(user){ return new Promise((resolve, reject) =>{ //the user password is compared with the database password const authenticated = bcryptjs.compareSync(credentials.pass, user[0].password); if(authenticated){ console.log('Password is a match: passed'); resolve(user); } else{ reject(Error('Password is a match: failed.')); } }); } function getCourseId(){ return Courses.findByPk(req.params.id); } //function to check if the course.userid matchs the user's id function checkCourseIDandUserID(courseAndUser){ return new Promise((resolve, reject) => { if(courseAndUser[0].userId === courseAndUser[1][0].id){ console.log('User and Course ID match: passed'); resolve(courseAndUser[0]); } else{ reject(Error('User and Course Id match: failed')); } }); } checkEmailAndPasswordProvided .then(()=>{return Users.findAll({ where: { emailAddress: credentials.email } }); }).then((user)=>{return checkEmailInDatabase(user); }) .then((user)=>{return checkPassword(user); }) .then((user)=>{ let courseId = getCourseId(); return Promise.all([courseId, user]); }).then((courseAndUser)=>{return checkCourseIDandUserID(courseAndUser); }) .then((course)=>{ course.destroy(); return res.status(204).json({ message: '' }); }).catch((err)=>{ //err = 'There was an error processing your request.'; console.warn(err); return res.status(401).json({ error: err.message }); }); });
module.exports = router;
complex_numbers_test.py
import math import unittest from complex_numbers import ( ComplexNumber, ) # Tests adapted from `problem-specifications//canonical-data.json` class ComplexNumbersTest(unittest.TestCase): # Real part def test_real_part_of_a_purely_real_number(self): self.assertEqual(ComplexNumber(1, 0).real, 1) def test_real_part_of_a_purely_imaginary_number(self): self.assertEqual(ComplexNumber(0, 1).real, 0) def test_real_part_of_a_number_with_real_and_imaginary_part(self): self.assertEqual(ComplexNumber(1, 2).real, 1) # Imaginary part def test_imaginary_part_of_a_purely_real_number(self): self.assertEqual(ComplexNumber(1, 0).imaginary, 0) def test_imaginary_part_of_a_purely_imaginary_number(self): self.assertEqual(ComplexNumber(0, 1).imaginary, 1) def test_imaginary_part_of_a_number_with_real_and_imaginary_part(self): self.assertEqual(ComplexNumber(1, 2).imaginary, 2) def test_imaginary_unit(self): self.assertEqual( ComplexNumber(0, 1) * ComplexNumber(0, 1), ComplexNumber(-1, 0) ) # Arithmetic # Addition def test_add_purely_real_numbers(self): self.assertEqual(ComplexNumber(1, 0) + ComplexNumber(2, 0), ComplexNumber(3, 0)) def test_add_purely_imaginary_numbers(self): self.assertEqual(ComplexNumber(0, 1) + ComplexNumber(0, 2), ComplexNumber(0, 3)) def test_add_numbers_with_real_and_imaginary_part(self): self.assertEqual(ComplexNumber(1, 2) + ComplexNumber(3, 4), ComplexNumber(4, 6)) # Subtraction def test_subtract_purely_real_numbers(self): self.assertEqual( ComplexNumber(1, 0) - ComplexNumber(2, 0), ComplexNumber(-1, 0) ) def test_subtract_purely_imaginary_numbers(self): self.assertEqual( ComplexNumber(0, 1) - ComplexNumber(0, 2), ComplexNumber(0, -1) ) def test_subtract_numbers_with_real_and_imaginary_part(self): self.assertEqual( ComplexNumber(1, 2) - ComplexNumber(3, 4), ComplexNumber(-2, -2) ) # Multiplication def test_multiply_purely_real_numbers(self): self.assertEqual(ComplexNumber(1, 0) * ComplexNumber(2, 0), ComplexNumber(2, 0)) def test_multiply_purely_imaginary_numbers(self): self.assertEqual( ComplexNumber(0, 1) * ComplexNumber(0, 2), ComplexNumber(-2, 0) ) def test_multiply_numbers_with_real_and_imaginary_part(self): self.assertEqual( ComplexNumber(1, 2) * ComplexNumber(3, 4), ComplexNumber(-5, 10) ) # Division def test_divide_purely_real_numbers(self): self.assertAlmostEqual( ComplexNumber(1, 0) / ComplexNumber(2, 0), ComplexNumber(0.5, 0) ) def test_divide_purely_imaginary_numbers(self): self.assertAlmostEqual( ComplexNumber(0, 1) / ComplexNumber(0, 2), ComplexNumber(0.5, 0) ) def test_divide_numbers_with_real_and_imaginary_part(self): self.assertAlmostEqual( ComplexNumber(1, 2) / ComplexNumber(3, 4), ComplexNumber(0.44, 0.08) ) # Absolute value def test_absolute_value_of_a_positive_purely_real_number(self): self.assertEqual(abs(ComplexNumber(5, 0)), 5) def test_absolute_value_of_a_negative_purely_real_number(self): self.assertEqual(abs(ComplexNumber(-5, 0)), 5) def test_absolute_value_of_a_purely_imaginary_number_with_positive_imaginary_part( self, ): self.assertEqual(abs(ComplexNumber(0, 5)), 5) def test_absolute_value_of_a_purely_imaginary_number_with_negative_imaginary_part( self, ): self.assertEqual(abs(ComplexNumber(0, -5)), 5) def test_absolute_value_of_a_number_with_real_and_imaginary_part(self): self.assertEqual(abs(ComplexNumber(3, 4)), 5) # Complex conjugate def test_conjugate_a_purely_real_number(self): self.assertEqual(ComplexNumber(5, 0).conjugate(), ComplexNumber(5, 0)) def test_conjugate_a_purely_imaginary_number(self): self.assertEqual(ComplexNumber(0, 5).conjugate(), ComplexNumber(0, -5)) def test_conjugate_a_number_with_real_and_imaginary_part(self): self.assertEqual(ComplexNumber(1, 1).conjugate(), ComplexNumber(1, -1)) # Complex exponential function def
(self): self.assertAlmostEqual(ComplexNumber(0, math.pi).exp(), ComplexNumber(-1, 0)) def test_exponential_of_0(self): self.assertAlmostEqual(ComplexNumber(0, 0).exp(), ComplexNumber(1, 0)) def test_exponential_of_a_purely_real_number(self): self.assertAlmostEqual(ComplexNumber(1, 0).exp(), ComplexNumber(math.e, 0)) def test_exponential_of_a_number_with_real_and_imaginary_part(self): self.assertAlmostEqual( ComplexNumber(math.log(2), math.pi).exp(), ComplexNumber(-2, 0) ) # Operations between real numbers and complex numbers def test_add_real_number_to_complex_number(self): self.assertEqual(ComplexNumber(1, 2) + 5, ComplexNumber(6, 2)) def test_add_complex_number_to_real_number(self): self.assertEqual(5 + ComplexNumber(1, 2), ComplexNumber(6, 2)) def test_subtract_real_number_from_complex_number(self): self.assertEqual(ComplexNumber(5, 7) - 4, ComplexNumber(1, 7)) def test_subtract_complex_number_from_real_number(self): self.assertEqual(4 - ComplexNumber(5, 7), ComplexNumber(-1, -7)) def test_multiply_complex_number_by_real_number(self): self.assertEqual(ComplexNumber(2, 5) * 5, ComplexNumber(10, 25)) def test_multiply_real_number_by_complex_number(self): self.assertEqual(5 * ComplexNumber(2, 5), ComplexNumber(10, 25)) def test_divide_complex_number_by_real_number(self): self.assertAlmostEqual(ComplexNumber(10, 100) / 10, ComplexNumber(1, 10)) def test_divide_real_number_by_complex_number(self): self.assertAlmostEqual(5 / ComplexNumber(1, 1), ComplexNumber(2.5, -2.5)) # Additional tests for this track def test_equality_of_complex_numbers(self): self.assertEqual(ComplexNumber(1, 2), ComplexNumber(1, 2)) def test_inequality_of_real_part(self): self.assertNotEqual(ComplexNumber(1, 2), ComplexNumber(2, 2)) def test_inequality_of_imaginary_part(self): self.assertNotEqual(ComplexNumber(1, 2), ComplexNumber(1, 1)) if __name__ == "__main__": unittest.main()
test_euler_s_identity_formula
operation.rs
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// Operation shape for `AbortMultipartUpload`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`abort_multipart_upload`](crate::client::Client::abort_multipart_upload). /// /// See [`crate::client::fluent_builders::AbortMultipartUpload`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct AbortMultipartUpload { _private: (), } impl AbortMultipartUpload { /// Creates a new builder-style object to manufacture [`AbortMultipartUploadInput`](crate::input::AbortMultipartUploadInput) pub fn builder() -> crate::input::abort_multipart_upload_input::Builder { crate::input::abort_multipart_upload_input::Builder::default() } /// Creates a new `AbortMultipartUpload` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for AbortMultipartUpload { type Output = std::result::Result< crate::output::AbortMultipartUploadOutput, crate::error::AbortMultipartUploadError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_abort_multipart_upload_error(response) } else { crate::operation_deser::parse_abort_multipart_upload_response(response) } } } /// Operation shape for `CompleteMultipartUpload`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`complete_multipart_upload`](crate::client::Client::complete_multipart_upload). /// /// See [`crate::client::fluent_builders::CompleteMultipartUpload`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CompleteMultipartUpload { _private: (), } impl CompleteMultipartUpload { /// Creates a new builder-style object to manufacture [`CompleteMultipartUploadInput`](crate::input::CompleteMultipartUploadInput) pub fn builder() -> crate::input::complete_multipart_upload_input::Builder { crate::input::complete_multipart_upload_input::Builder::default() } /// Creates a new `CompleteMultipartUpload` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CompleteMultipartUpload { type Output = std::result::Result< crate::output::CompleteMultipartUploadOutput, crate::error::CompleteMultipartUploadError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_complete_multipart_upload_error(response) } else { crate::operation_deser::parse_complete_multipart_upload_response(response) } } } /// Operation shape for `CopyObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`copy_object`](crate::client::Client::copy_object). /// /// See [`crate::client::fluent_builders::CopyObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CopyObject { _private: (), } impl CopyObject { /// Creates a new builder-style object to manufacture [`CopyObjectInput`](crate::input::CopyObjectInput) pub fn builder() -> crate::input::copy_object_input::Builder { crate::input::copy_object_input::Builder::default() } /// Creates a new `CopyObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CopyObject { type Output = std::result::Result<crate::output::CopyObjectOutput, crate::error::CopyObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_copy_object_error(response) } else { crate::operation_deser::parse_copy_object_response(response) } } } /// Operation shape for `CreateBucket`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_bucket`](crate::client::Client::create_bucket). /// /// See [`crate::client::fluent_builders::CreateBucket`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateBucket { _private: (), } impl CreateBucket { /// Creates a new builder-style object to manufacture [`CreateBucketInput`](crate::input::CreateBucketInput) pub fn builder() -> crate::input::create_bucket_input::Builder { crate::input::create_bucket_input::Builder::default() } /// Creates a new `CreateBucket` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateBucket { type Output = std::result::Result<crate::output::CreateBucketOutput, crate::error::CreateBucketError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_bucket_error(response) } else { crate::operation_deser::parse_create_bucket_response(response) } } } /// Operation shape for `CreateMultipartUpload`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_multipart_upload`](crate::client::Client::create_multipart_upload). /// /// See [`crate::client::fluent_builders::CreateMultipartUpload`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateMultipartUpload { _private: (), } impl CreateMultipartUpload { /// Creates a new builder-style object to manufacture [`CreateMultipartUploadInput`](crate::input::CreateMultipartUploadInput) pub fn builder() -> crate::input::create_multipart_upload_input::Builder { crate::input::create_multipart_upload_input::Builder::default() } /// Creates a new `CreateMultipartUpload` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateMultipartUpload { type Output = std::result::Result< crate::output::CreateMultipartUploadOutput, crate::error::CreateMultipartUploadError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_multipart_upload_error(response) } else { crate::operation_deser::parse_create_multipart_upload_response(response) } } } #[cfg(test)] #[allow(unreachable_code, unused_variables)] mod create_multipart_upload_request_test { /// This test validates that the URI for CreateMultipartUpload is created correctly /// Test ID: CreateMultipartUploadUriConstruction #[tokio::test] async fn create_multipart_upload_uri_construction_request() { let config = crate::config::Config::builder().build(); let input = crate::input::CreateMultipartUploadInput::builder() .set_bucket(Some("test-bucket".to_string())) .set_key(Some("object.txt".to_string())) .build() .unwrap() .make_operation(&config) .await .expect("operation failed to build"); let (http_request, parts) = input.into_request_response().0.into_parts(); assert_eq!(http_request.method(), "POST"); assert_eq!(http_request.uri().path(), "/test-bucket/object.txt"); let expected_query_params = &["uploads", "x-id=CreateMultipartUpload"]; aws_smithy_protocol_test::assert_ok(aws_smithy_protocol_test::validate_query_string( &http_request, expected_query_params, )); } } /// Operation shape for `DeleteBucket`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket`](crate::client::Client::delete_bucket). /// /// See [`crate::client::fluent_builders::DeleteBucket`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucket { _private: (), } impl DeleteBucket { /// Creates a new builder-style object to manufacture [`DeleteBucketInput`](crate::input::DeleteBucketInput) pub fn builder() -> crate::input::delete_bucket_input::Builder { crate::input::delete_bucket_input::Builder::default() } /// Creates a new `DeleteBucket` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucket { type Output = std::result::Result<crate::output::DeleteBucketOutput, crate::error::DeleteBucketError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_error(response) } else { crate::operation_deser::parse_delete_bucket_response(response) } } } /// Operation shape for `DeleteBucketAnalyticsConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_analytics_configuration`](crate::client::Client::delete_bucket_analytics_configuration). /// /// See [`crate::client::fluent_builders::DeleteBucketAnalyticsConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketAnalyticsConfiguration { _private: (), } impl DeleteBucketAnalyticsConfiguration { /// Creates a new builder-style object to manufacture [`DeleteBucketAnalyticsConfigurationInput`](crate::input::DeleteBucketAnalyticsConfigurationInput) pub fn builder() -> crate::input::delete_bucket_analytics_configuration_input::Builder { crate::input::delete_bucket_analytics_configuration_input::Builder::default() } /// Creates a new `DeleteBucketAnalyticsConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketAnalyticsConfiguration { type Output = std::result::Result< crate::output::DeleteBucketAnalyticsConfigurationOutput, crate::error::DeleteBucketAnalyticsConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_analytics_configuration_error(response) } else { crate::operation_deser::parse_delete_bucket_analytics_configuration_response(response) } } } /// Operation shape for `DeleteBucketCors`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_cors`](crate::client::Client::delete_bucket_cors). /// /// See [`crate::client::fluent_builders::DeleteBucketCors`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketCors { _private: (), } impl DeleteBucketCors { /// Creates a new builder-style object to manufacture [`DeleteBucketCorsInput`](crate::input::DeleteBucketCorsInput) pub fn builder() -> crate::input::delete_bucket_cors_input::Builder { crate::input::delete_bucket_cors_input::Builder::default() } /// Creates a new `DeleteBucketCors` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketCors { type Output = std::result::Result< crate::output::DeleteBucketCorsOutput, crate::error::DeleteBucketCorsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_cors_error(response) } else { crate::operation_deser::parse_delete_bucket_cors_response(response) } } } /// Operation shape for `DeleteBucketEncryption`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_encryption`](crate::client::Client::delete_bucket_encryption). /// /// See [`crate::client::fluent_builders::DeleteBucketEncryption`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketEncryption { _private: (), } impl DeleteBucketEncryption { /// Creates a new builder-style object to manufacture [`DeleteBucketEncryptionInput`](crate::input::DeleteBucketEncryptionInput) pub fn builder() -> crate::input::delete_bucket_encryption_input::Builder { crate::input::delete_bucket_encryption_input::Builder::default() } /// Creates a new `DeleteBucketEncryption` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketEncryption { type Output = std::result::Result< crate::output::DeleteBucketEncryptionOutput, crate::error::DeleteBucketEncryptionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_encryption_error(response) } else { crate::operation_deser::parse_delete_bucket_encryption_response(response) } } } /// Operation shape for `DeleteBucketIntelligentTieringConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_intelligent_tiering_configuration`](crate::client::Client::delete_bucket_intelligent_tiering_configuration). /// /// See [`crate::client::fluent_builders::DeleteBucketIntelligentTieringConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketIntelligentTieringConfiguration { _private: (), } impl DeleteBucketIntelligentTieringConfiguration { /// Creates a new builder-style object to manufacture [`DeleteBucketIntelligentTieringConfigurationInput`](crate::input::DeleteBucketIntelligentTieringConfigurationInput) pub fn builder() -> crate::input::delete_bucket_intelligent_tiering_configuration_input::Builder { crate::input::delete_bucket_intelligent_tiering_configuration_input::Builder::default() } /// Creates a new `DeleteBucketIntelligentTieringConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketIntelligentTieringConfiguration { type Output = std::result::Result< crate::output::DeleteBucketIntelligentTieringConfigurationOutput, crate::error::DeleteBucketIntelligentTieringConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_intelligent_tiering_configuration_error( response, ) } else { crate::operation_deser::parse_delete_bucket_intelligent_tiering_configuration_response( response, ) } } } /// Operation shape for `DeleteBucketInventoryConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_inventory_configuration`](crate::client::Client::delete_bucket_inventory_configuration). /// /// See [`crate::client::fluent_builders::DeleteBucketInventoryConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketInventoryConfiguration { _private: (), } impl DeleteBucketInventoryConfiguration { /// Creates a new builder-style object to manufacture [`DeleteBucketInventoryConfigurationInput`](crate::input::DeleteBucketInventoryConfigurationInput) pub fn builder() -> crate::input::delete_bucket_inventory_configuration_input::Builder { crate::input::delete_bucket_inventory_configuration_input::Builder::default() } /// Creates a new `DeleteBucketInventoryConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketInventoryConfiguration { type Output = std::result::Result< crate::output::DeleteBucketInventoryConfigurationOutput, crate::error::DeleteBucketInventoryConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_inventory_configuration_error(response) } else { crate::operation_deser::parse_delete_bucket_inventory_configuration_response(response) } } } /// Operation shape for `DeleteBucketLifecycle`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_lifecycle`](crate::client::Client::delete_bucket_lifecycle). /// /// See [`crate::client::fluent_builders::DeleteBucketLifecycle`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketLifecycle { _private: (), } impl DeleteBucketLifecycle { /// Creates a new builder-style object to manufacture [`DeleteBucketLifecycleInput`](crate::input::DeleteBucketLifecycleInput) pub fn builder() -> crate::input::delete_bucket_lifecycle_input::Builder { crate::input::delete_bucket_lifecycle_input::Builder::default() } /// Creates a new `DeleteBucketLifecycle` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketLifecycle { type Output = std::result::Result< crate::output::DeleteBucketLifecycleOutput, crate::error::DeleteBucketLifecycleError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_lifecycle_error(response) } else { crate::operation_deser::parse_delete_bucket_lifecycle_response(response) } } } /// Operation shape for `DeleteBucketMetricsConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_metrics_configuration`](crate::client::Client::delete_bucket_metrics_configuration). /// /// See [`crate::client::fluent_builders::DeleteBucketMetricsConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketMetricsConfiguration { _private: (), } impl DeleteBucketMetricsConfiguration { /// Creates a new builder-style object to manufacture [`DeleteBucketMetricsConfigurationInput`](crate::input::DeleteBucketMetricsConfigurationInput) pub fn builder() -> crate::input::delete_bucket_metrics_configuration_input::Builder { crate::input::delete_bucket_metrics_configuration_input::Builder::default() } /// Creates a new `DeleteBucketMetricsConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketMetricsConfiguration { type Output = std::result::Result< crate::output::DeleteBucketMetricsConfigurationOutput, crate::error::DeleteBucketMetricsConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_metrics_configuration_error(response) } else { crate::operation_deser::parse_delete_bucket_metrics_configuration_response(response) } } } /// Operation shape for `DeleteBucketOwnershipControls`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_ownership_controls`](crate::client::Client::delete_bucket_ownership_controls). /// /// See [`crate::client::fluent_builders::DeleteBucketOwnershipControls`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketOwnershipControls { _private: (), } impl DeleteBucketOwnershipControls { /// Creates a new builder-style object to manufacture [`DeleteBucketOwnershipControlsInput`](crate::input::DeleteBucketOwnershipControlsInput) pub fn builder() -> crate::input::delete_bucket_ownership_controls_input::Builder { crate::input::delete_bucket_ownership_controls_input::Builder::default() } /// Creates a new `DeleteBucketOwnershipControls` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketOwnershipControls { type Output = std::result::Result< crate::output::DeleteBucketOwnershipControlsOutput, crate::error::DeleteBucketOwnershipControlsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_ownership_controls_error(response) } else { crate::operation_deser::parse_delete_bucket_ownership_controls_response(response) } } } /// Operation shape for `DeleteBucketPolicy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_policy`](crate::client::Client::delete_bucket_policy). /// /// See [`crate::client::fluent_builders::DeleteBucketPolicy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketPolicy { _private: (), } impl DeleteBucketPolicy { /// Creates a new builder-style object to manufacture [`DeleteBucketPolicyInput`](crate::input::DeleteBucketPolicyInput) pub fn builder() -> crate::input::delete_bucket_policy_input::Builder { crate::input::delete_bucket_policy_input::Builder::default() } /// Creates a new `DeleteBucketPolicy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketPolicy { type Output = std::result::Result< crate::output::DeleteBucketPolicyOutput, crate::error::DeleteBucketPolicyError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_policy_error(response) } else { crate::operation_deser::parse_delete_bucket_policy_response(response) } } } /// Operation shape for `DeleteBucketReplication`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_replication`](crate::client::Client::delete_bucket_replication). /// /// See [`crate::client::fluent_builders::DeleteBucketReplication`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketReplication { _private: (), } impl DeleteBucketReplication { /// Creates a new builder-style object to manufacture [`DeleteBucketReplicationInput`](crate::input::DeleteBucketReplicationInput) pub fn builder() -> crate::input::delete_bucket_replication_input::Builder { crate::input::delete_bucket_replication_input::Builder::default() } /// Creates a new `DeleteBucketReplication` operation. pub fn new() -> Self
} impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketReplication { type Output = std::result::Result< crate::output::DeleteBucketReplicationOutput, crate::error::DeleteBucketReplicationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_replication_error(response) } else { crate::operation_deser::parse_delete_bucket_replication_response(response) } } } /// Operation shape for `DeleteBucketTagging`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_tagging`](crate::client::Client::delete_bucket_tagging). /// /// See [`crate::client::fluent_builders::DeleteBucketTagging`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketTagging { _private: (), } impl DeleteBucketTagging { /// Creates a new builder-style object to manufacture [`DeleteBucketTaggingInput`](crate::input::DeleteBucketTaggingInput) pub fn builder() -> crate::input::delete_bucket_tagging_input::Builder { crate::input::delete_bucket_tagging_input::Builder::default() } /// Creates a new `DeleteBucketTagging` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketTagging { type Output = std::result::Result< crate::output::DeleteBucketTaggingOutput, crate::error::DeleteBucketTaggingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_tagging_error(response) } else { crate::operation_deser::parse_delete_bucket_tagging_response(response) } } } /// Operation shape for `DeleteBucketWebsite`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_bucket_website`](crate::client::Client::delete_bucket_website). /// /// See [`crate::client::fluent_builders::DeleteBucketWebsite`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteBucketWebsite { _private: (), } impl DeleteBucketWebsite { /// Creates a new builder-style object to manufacture [`DeleteBucketWebsiteInput`](crate::input::DeleteBucketWebsiteInput) pub fn builder() -> crate::input::delete_bucket_website_input::Builder { crate::input::delete_bucket_website_input::Builder::default() } /// Creates a new `DeleteBucketWebsite` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteBucketWebsite { type Output = std::result::Result< crate::output::DeleteBucketWebsiteOutput, crate::error::DeleteBucketWebsiteError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_bucket_website_error(response) } else { crate::operation_deser::parse_delete_bucket_website_response(response) } } } /// Operation shape for `DeleteObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_object`](crate::client::Client::delete_object). /// /// See [`crate::client::fluent_builders::DeleteObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteObject { _private: (), } impl DeleteObject { /// Creates a new builder-style object to manufacture [`DeleteObjectInput`](crate::input::DeleteObjectInput) pub fn builder() -> crate::input::delete_object_input::Builder { crate::input::delete_object_input::Builder::default() } /// Creates a new `DeleteObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteObject { type Output = std::result::Result<crate::output::DeleteObjectOutput, crate::error::DeleteObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_object_error(response) } else { crate::operation_deser::parse_delete_object_response(response) } } } /// Operation shape for `DeleteObjects`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_objects`](crate::client::Client::delete_objects). /// /// See [`crate::client::fluent_builders::DeleteObjects`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteObjects { _private: (), } impl DeleteObjects { /// Creates a new builder-style object to manufacture [`DeleteObjectsInput`](crate::input::DeleteObjectsInput) pub fn builder() -> crate::input::delete_objects_input::Builder { crate::input::delete_objects_input::Builder::default() } /// Creates a new `DeleteObjects` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteObjects { type Output = std::result::Result<crate::output::DeleteObjectsOutput, crate::error::DeleteObjectsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_delete_objects_error(response) } else { crate::operation_deser::parse_delete_objects_response(response) } } } /// Operation shape for `DeleteObjectTagging`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_object_tagging`](crate::client::Client::delete_object_tagging). /// /// See [`crate::client::fluent_builders::DeleteObjectTagging`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteObjectTagging { _private: (), } impl DeleteObjectTagging { /// Creates a new builder-style object to manufacture [`DeleteObjectTaggingInput`](crate::input::DeleteObjectTaggingInput) pub fn builder() -> crate::input::delete_object_tagging_input::Builder { crate::input::delete_object_tagging_input::Builder::default() } /// Creates a new `DeleteObjectTagging` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteObjectTagging { type Output = std::result::Result< crate::output::DeleteObjectTaggingOutput, crate::error::DeleteObjectTaggingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_object_tagging_error(response) } else { crate::operation_deser::parse_delete_object_tagging_response(response) } } } /// Operation shape for `DeletePublicAccessBlock`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_public_access_block`](crate::client::Client::delete_public_access_block). /// /// See [`crate::client::fluent_builders::DeletePublicAccessBlock`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeletePublicAccessBlock { _private: (), } impl DeletePublicAccessBlock { /// Creates a new builder-style object to manufacture [`DeletePublicAccessBlockInput`](crate::input::DeletePublicAccessBlockInput) pub fn builder() -> crate::input::delete_public_access_block_input::Builder { crate::input::delete_public_access_block_input::Builder::default() } /// Creates a new `DeletePublicAccessBlock` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeletePublicAccessBlock { type Output = std::result::Result< crate::output::DeletePublicAccessBlockOutput, crate::error::DeletePublicAccessBlockError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 204 { crate::operation_deser::parse_delete_public_access_block_error(response) } else { crate::operation_deser::parse_delete_public_access_block_response(response) } } } /// Operation shape for `GetBucketAccelerateConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_accelerate_configuration`](crate::client::Client::get_bucket_accelerate_configuration). /// /// See [`crate::client::fluent_builders::GetBucketAccelerateConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketAccelerateConfiguration { _private: (), } impl GetBucketAccelerateConfiguration { /// Creates a new builder-style object to manufacture [`GetBucketAccelerateConfigurationInput`](crate::input::GetBucketAccelerateConfigurationInput) pub fn builder() -> crate::input::get_bucket_accelerate_configuration_input::Builder { crate::input::get_bucket_accelerate_configuration_input::Builder::default() } /// Creates a new `GetBucketAccelerateConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketAccelerateConfiguration { type Output = std::result::Result< crate::output::GetBucketAccelerateConfigurationOutput, crate::error::GetBucketAccelerateConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_accelerate_configuration_error(response) } else { crate::operation_deser::parse_get_bucket_accelerate_configuration_response(response) } } } /// Operation shape for `GetBucketAcl`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_acl`](crate::client::Client::get_bucket_acl). /// /// See [`crate::client::fluent_builders::GetBucketAcl`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketAcl { _private: (), } impl GetBucketAcl { /// Creates a new builder-style object to manufacture [`GetBucketAclInput`](crate::input::GetBucketAclInput) pub fn builder() -> crate::input::get_bucket_acl_input::Builder { crate::input::get_bucket_acl_input::Builder::default() } /// Creates a new `GetBucketAcl` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketAcl { type Output = std::result::Result<crate::output::GetBucketAclOutput, crate::error::GetBucketAclError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_acl_error(response) } else { crate::operation_deser::parse_get_bucket_acl_response(response) } } } /// Operation shape for `GetBucketAnalyticsConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_analytics_configuration`](crate::client::Client::get_bucket_analytics_configuration). /// /// See [`crate::client::fluent_builders::GetBucketAnalyticsConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketAnalyticsConfiguration { _private: (), } impl GetBucketAnalyticsConfiguration { /// Creates a new builder-style object to manufacture [`GetBucketAnalyticsConfigurationInput`](crate::input::GetBucketAnalyticsConfigurationInput) pub fn builder() -> crate::input::get_bucket_analytics_configuration_input::Builder { crate::input::get_bucket_analytics_configuration_input::Builder::default() } /// Creates a new `GetBucketAnalyticsConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketAnalyticsConfiguration { type Output = std::result::Result< crate::output::GetBucketAnalyticsConfigurationOutput, crate::error::GetBucketAnalyticsConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_analytics_configuration_error(response) } else { crate::operation_deser::parse_get_bucket_analytics_configuration_response(response) } } } /// Operation shape for `GetBucketCors`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_cors`](crate::client::Client::get_bucket_cors). /// /// See [`crate::client::fluent_builders::GetBucketCors`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketCors { _private: (), } impl GetBucketCors { /// Creates a new builder-style object to manufacture [`GetBucketCorsInput`](crate::input::GetBucketCorsInput) pub fn builder() -> crate::input::get_bucket_cors_input::Builder { crate::input::get_bucket_cors_input::Builder::default() } /// Creates a new `GetBucketCors` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketCors { type Output = std::result::Result<crate::output::GetBucketCorsOutput, crate::error::GetBucketCorsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_cors_error(response) } else { crate::operation_deser::parse_get_bucket_cors_response(response) } } } /// Operation shape for `GetBucketEncryption`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_encryption`](crate::client::Client::get_bucket_encryption). /// /// See [`crate::client::fluent_builders::GetBucketEncryption`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketEncryption { _private: (), } impl GetBucketEncryption { /// Creates a new builder-style object to manufacture [`GetBucketEncryptionInput`](crate::input::GetBucketEncryptionInput) pub fn builder() -> crate::input::get_bucket_encryption_input::Builder { crate::input::get_bucket_encryption_input::Builder::default() } /// Creates a new `GetBucketEncryption` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketEncryption { type Output = std::result::Result< crate::output::GetBucketEncryptionOutput, crate::error::GetBucketEncryptionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_encryption_error(response) } else { crate::operation_deser::parse_get_bucket_encryption_response(response) } } } /// Operation shape for `GetBucketIntelligentTieringConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_intelligent_tiering_configuration`](crate::client::Client::get_bucket_intelligent_tiering_configuration). /// /// See [`crate::client::fluent_builders::GetBucketIntelligentTieringConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketIntelligentTieringConfiguration { _private: (), } impl GetBucketIntelligentTieringConfiguration { /// Creates a new builder-style object to manufacture [`GetBucketIntelligentTieringConfigurationInput`](crate::input::GetBucketIntelligentTieringConfigurationInput) pub fn builder() -> crate::input::get_bucket_intelligent_tiering_configuration_input::Builder { crate::input::get_bucket_intelligent_tiering_configuration_input::Builder::default() } /// Creates a new `GetBucketIntelligentTieringConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketIntelligentTieringConfiguration { type Output = std::result::Result< crate::output::GetBucketIntelligentTieringConfigurationOutput, crate::error::GetBucketIntelligentTieringConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_intelligent_tiering_configuration_error( response, ) } else { crate::operation_deser::parse_get_bucket_intelligent_tiering_configuration_response( response, ) } } } /// Operation shape for `GetBucketInventoryConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_inventory_configuration`](crate::client::Client::get_bucket_inventory_configuration). /// /// See [`crate::client::fluent_builders::GetBucketInventoryConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketInventoryConfiguration { _private: (), } impl GetBucketInventoryConfiguration { /// Creates a new builder-style object to manufacture [`GetBucketInventoryConfigurationInput`](crate::input::GetBucketInventoryConfigurationInput) pub fn builder() -> crate::input::get_bucket_inventory_configuration_input::Builder { crate::input::get_bucket_inventory_configuration_input::Builder::default() } /// Creates a new `GetBucketInventoryConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketInventoryConfiguration { type Output = std::result::Result< crate::output::GetBucketInventoryConfigurationOutput, crate::error::GetBucketInventoryConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_inventory_configuration_error(response) } else { crate::operation_deser::parse_get_bucket_inventory_configuration_response(response) } } } /// Operation shape for `GetBucketLifecycleConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_lifecycle_configuration`](crate::client::Client::get_bucket_lifecycle_configuration). /// /// See [`crate::client::fluent_builders::GetBucketLifecycleConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketLifecycleConfiguration { _private: (), } impl GetBucketLifecycleConfiguration { /// Creates a new builder-style object to manufacture [`GetBucketLifecycleConfigurationInput`](crate::input::GetBucketLifecycleConfigurationInput) pub fn builder() -> crate::input::get_bucket_lifecycle_configuration_input::Builder { crate::input::get_bucket_lifecycle_configuration_input::Builder::default() } /// Creates a new `GetBucketLifecycleConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketLifecycleConfiguration { type Output = std::result::Result< crate::output::GetBucketLifecycleConfigurationOutput, crate::error::GetBucketLifecycleConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_lifecycle_configuration_error(response) } else { crate::operation_deser::parse_get_bucket_lifecycle_configuration_response(response) } } } /// Operation shape for `GetBucketLocation`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_location`](crate::client::Client::get_bucket_location). /// /// See [`crate::client::fluent_builders::GetBucketLocation`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketLocation { _private: (), } impl GetBucketLocation { /// Creates a new builder-style object to manufacture [`GetBucketLocationInput`](crate::input::GetBucketLocationInput) pub fn builder() -> crate::input::get_bucket_location_input::Builder { crate::input::get_bucket_location_input::Builder::default() } /// Creates a new `GetBucketLocation` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketLocation { type Output = std::result::Result< crate::output::GetBucketLocationOutput, crate::error::GetBucketLocationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_location_error(response) } else { crate::operation_deser::parse_get_bucket_location_response(response) } } } #[cfg(test)] #[allow(unreachable_code, unused_variables)] mod get_bucket_location_request_test { /// This test case validates https://github.com/awslabs/aws-sdk-rust/issues/116 /// Test ID: GetBucketLocation #[tokio::test] async fn get_bucket_location_response() { let expected_output = crate::output::GetBucketLocationOutput::builder() .set_location_constraint(Some(crate::model::BucketLocationConstraint::from( "us-west-2", ))) .build(); let http_response = http::response::Builder::new() .status(200) .body(aws_smithy_http::body::SdkBody::from("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<LocationConstraint xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">us-west-2</LocationConstraint>")) .unwrap(); let mut op_response = aws_smithy_http::operation::Response::new(http_response); use aws_smithy_http::response::ParseHttpResponse; let parser = crate::operation::GetBucketLocation::new(); let parsed = parser.parse_unloaded(&mut op_response); let parsed = parsed.unwrap_or_else(|| { let (http_response, _) = op_response.into_parts(); let http_response = http_response.map(|body|bytes::Bytes::copy_from_slice(body.bytes().unwrap())); <crate::operation::GetBucketLocation as aws_smithy_http::response::ParseHttpResponse>::parse_loaded(&parser, &http_response) }); let parsed = parsed.unwrap(); assert_eq!( parsed.location_constraint, expected_output.location_constraint, "Unexpected value for `location_constraint`" ); } } /// Operation shape for `GetBucketLogging`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_logging`](crate::client::Client::get_bucket_logging). /// /// See [`crate::client::fluent_builders::GetBucketLogging`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketLogging { _private: (), } impl GetBucketLogging { /// Creates a new builder-style object to manufacture [`GetBucketLoggingInput`](crate::input::GetBucketLoggingInput) pub fn builder() -> crate::input::get_bucket_logging_input::Builder { crate::input::get_bucket_logging_input::Builder::default() } /// Creates a new `GetBucketLogging` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketLogging { type Output = std::result::Result< crate::output::GetBucketLoggingOutput, crate::error::GetBucketLoggingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_logging_error(response) } else { crate::operation_deser::parse_get_bucket_logging_response(response) } } } /// Operation shape for `GetBucketMetricsConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_metrics_configuration`](crate::client::Client::get_bucket_metrics_configuration). /// /// See [`crate::client::fluent_builders::GetBucketMetricsConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketMetricsConfiguration { _private: (), } impl GetBucketMetricsConfiguration { /// Creates a new builder-style object to manufacture [`GetBucketMetricsConfigurationInput`](crate::input::GetBucketMetricsConfigurationInput) pub fn builder() -> crate::input::get_bucket_metrics_configuration_input::Builder { crate::input::get_bucket_metrics_configuration_input::Builder::default() } /// Creates a new `GetBucketMetricsConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketMetricsConfiguration { type Output = std::result::Result< crate::output::GetBucketMetricsConfigurationOutput, crate::error::GetBucketMetricsConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_metrics_configuration_error(response) } else { crate::operation_deser::parse_get_bucket_metrics_configuration_response(response) } } } /// Operation shape for `GetBucketNotificationConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_notification_configuration`](crate::client::Client::get_bucket_notification_configuration). /// /// See [`crate::client::fluent_builders::GetBucketNotificationConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketNotificationConfiguration { _private: (), } impl GetBucketNotificationConfiguration { /// Creates a new builder-style object to manufacture [`GetBucketNotificationConfigurationInput`](crate::input::GetBucketNotificationConfigurationInput) pub fn builder() -> crate::input::get_bucket_notification_configuration_input::Builder { crate::input::get_bucket_notification_configuration_input::Builder::default() } /// Creates a new `GetBucketNotificationConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketNotificationConfiguration { type Output = std::result::Result< crate::output::GetBucketNotificationConfigurationOutput, crate::error::GetBucketNotificationConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_notification_configuration_error(response) } else { crate::operation_deser::parse_get_bucket_notification_configuration_response(response) } } } /// Operation shape for `GetBucketOwnershipControls`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_ownership_controls`](crate::client::Client::get_bucket_ownership_controls). /// /// See [`crate::client::fluent_builders::GetBucketOwnershipControls`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketOwnershipControls { _private: (), } impl GetBucketOwnershipControls { /// Creates a new builder-style object to manufacture [`GetBucketOwnershipControlsInput`](crate::input::GetBucketOwnershipControlsInput) pub fn builder() -> crate::input::get_bucket_ownership_controls_input::Builder { crate::input::get_bucket_ownership_controls_input::Builder::default() } /// Creates a new `GetBucketOwnershipControls` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketOwnershipControls { type Output = std::result::Result< crate::output::GetBucketOwnershipControlsOutput, crate::error::GetBucketOwnershipControlsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_ownership_controls_error(response) } else { crate::operation_deser::parse_get_bucket_ownership_controls_response(response) } } } /// Operation shape for `GetBucketPolicy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_policy`](crate::client::Client::get_bucket_policy). /// /// See [`crate::client::fluent_builders::GetBucketPolicy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketPolicy { _private: (), } impl GetBucketPolicy { /// Creates a new builder-style object to manufacture [`GetBucketPolicyInput`](crate::input::GetBucketPolicyInput) pub fn builder() -> crate::input::get_bucket_policy_input::Builder { crate::input::get_bucket_policy_input::Builder::default() } /// Creates a new `GetBucketPolicy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketPolicy { type Output = std::result::Result< crate::output::GetBucketPolicyOutput, crate::error::GetBucketPolicyError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_policy_error(response) } else { crate::operation_deser::parse_get_bucket_policy_response(response) } } } /// Operation shape for `GetBucketPolicyStatus`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_policy_status`](crate::client::Client::get_bucket_policy_status). /// /// See [`crate::client::fluent_builders::GetBucketPolicyStatus`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketPolicyStatus { _private: (), } impl GetBucketPolicyStatus { /// Creates a new builder-style object to manufacture [`GetBucketPolicyStatusInput`](crate::input::GetBucketPolicyStatusInput) pub fn builder() -> crate::input::get_bucket_policy_status_input::Builder { crate::input::get_bucket_policy_status_input::Builder::default() } /// Creates a new `GetBucketPolicyStatus` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketPolicyStatus { type Output = std::result::Result< crate::output::GetBucketPolicyStatusOutput, crate::error::GetBucketPolicyStatusError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_policy_status_error(response) } else { crate::operation_deser::parse_get_bucket_policy_status_response(response) } } } /// Operation shape for `GetBucketReplication`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_replication`](crate::client::Client::get_bucket_replication). /// /// See [`crate::client::fluent_builders::GetBucketReplication`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketReplication { _private: (), } impl GetBucketReplication { /// Creates a new builder-style object to manufacture [`GetBucketReplicationInput`](crate::input::GetBucketReplicationInput) pub fn builder() -> crate::input::get_bucket_replication_input::Builder { crate::input::get_bucket_replication_input::Builder::default() } /// Creates a new `GetBucketReplication` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketReplication { type Output = std::result::Result< crate::output::GetBucketReplicationOutput, crate::error::GetBucketReplicationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_replication_error(response) } else { crate::operation_deser::parse_get_bucket_replication_response(response) } } } /// Operation shape for `GetBucketRequestPayment`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_request_payment`](crate::client::Client::get_bucket_request_payment). /// /// See [`crate::client::fluent_builders::GetBucketRequestPayment`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketRequestPayment { _private: (), } impl GetBucketRequestPayment { /// Creates a new builder-style object to manufacture [`GetBucketRequestPaymentInput`](crate::input::GetBucketRequestPaymentInput) pub fn builder() -> crate::input::get_bucket_request_payment_input::Builder { crate::input::get_bucket_request_payment_input::Builder::default() } /// Creates a new `GetBucketRequestPayment` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketRequestPayment { type Output = std::result::Result< crate::output::GetBucketRequestPaymentOutput, crate::error::GetBucketRequestPaymentError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_request_payment_error(response) } else { crate::operation_deser::parse_get_bucket_request_payment_response(response) } } } /// Operation shape for `GetBucketTagging`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_tagging`](crate::client::Client::get_bucket_tagging). /// /// See [`crate::client::fluent_builders::GetBucketTagging`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketTagging { _private: (), } impl GetBucketTagging { /// Creates a new builder-style object to manufacture [`GetBucketTaggingInput`](crate::input::GetBucketTaggingInput) pub fn builder() -> crate::input::get_bucket_tagging_input::Builder { crate::input::get_bucket_tagging_input::Builder::default() } /// Creates a new `GetBucketTagging` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketTagging { type Output = std::result::Result< crate::output::GetBucketTaggingOutput, crate::error::GetBucketTaggingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_tagging_error(response) } else { crate::operation_deser::parse_get_bucket_tagging_response(response) } } } /// Operation shape for `GetBucketVersioning`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_versioning`](crate::client::Client::get_bucket_versioning). /// /// See [`crate::client::fluent_builders::GetBucketVersioning`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketVersioning { _private: (), } impl GetBucketVersioning { /// Creates a new builder-style object to manufacture [`GetBucketVersioningInput`](crate::input::GetBucketVersioningInput) pub fn builder() -> crate::input::get_bucket_versioning_input::Builder { crate::input::get_bucket_versioning_input::Builder::default() } /// Creates a new `GetBucketVersioning` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketVersioning { type Output = std::result::Result< crate::output::GetBucketVersioningOutput, crate::error::GetBucketVersioningError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_versioning_error(response) } else { crate::operation_deser::parse_get_bucket_versioning_response(response) } } } /// Operation shape for `GetBucketWebsite`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_bucket_website`](crate::client::Client::get_bucket_website). /// /// See [`crate::client::fluent_builders::GetBucketWebsite`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetBucketWebsite { _private: (), } impl GetBucketWebsite { /// Creates a new builder-style object to manufacture [`GetBucketWebsiteInput`](crate::input::GetBucketWebsiteInput) pub fn builder() -> crate::input::get_bucket_website_input::Builder { crate::input::get_bucket_website_input::Builder::default() } /// Creates a new `GetBucketWebsite` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetBucketWebsite { type Output = std::result::Result< crate::output::GetBucketWebsiteOutput, crate::error::GetBucketWebsiteError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_bucket_website_error(response) } else { crate::operation_deser::parse_get_bucket_website_response(response) } } } /// Operation shape for `GetObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object`](crate::client::Client::get_object). /// /// See [`crate::client::fluent_builders::GetObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObject { _private: (), } impl GetObject { /// Creates a new builder-style object to manufacture [`GetObjectInput`](crate::input::GetObjectInput) pub fn builder() -> crate::input::get_object_input::Builder { crate::input::get_object_input::Builder::default() } /// Creates a new `GetObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseHttpResponse for GetObject { type Output = std::result::Result<crate::output::GetObjectOutput, crate::error::GetObjectError>; fn parse_unloaded( &self, response: &mut aws_smithy_http::operation::Response, ) -> Option<Self::Output> { // This is an error, defer to the non-streaming parser if !response.http().status().is_success() && response.http().status().as_u16() != 200 { return None; } Some(crate::operation_deser::parse_get_object(response)) } fn parse_loaded(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { // if streaming, we only hit this case if its an error crate::operation_deser::parse_get_object_error(response) } } /// Operation shape for `GetObjectAcl`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object_acl`](crate::client::Client::get_object_acl). /// /// See [`crate::client::fluent_builders::GetObjectAcl`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObjectAcl { _private: (), } impl GetObjectAcl { /// Creates a new builder-style object to manufacture [`GetObjectAclInput`](crate::input::GetObjectAclInput) pub fn builder() -> crate::input::get_object_acl_input::Builder { crate::input::get_object_acl_input::Builder::default() } /// Creates a new `GetObjectAcl` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetObjectAcl { type Output = std::result::Result<crate::output::GetObjectAclOutput, crate::error::GetObjectAclError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_object_acl_error(response) } else { crate::operation_deser::parse_get_object_acl_response(response) } } } /// Operation shape for `GetObjectLegalHold`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object_legal_hold`](crate::client::Client::get_object_legal_hold). /// /// See [`crate::client::fluent_builders::GetObjectLegalHold`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObjectLegalHold { _private: (), } impl GetObjectLegalHold { /// Creates a new builder-style object to manufacture [`GetObjectLegalHoldInput`](crate::input::GetObjectLegalHoldInput) pub fn builder() -> crate::input::get_object_legal_hold_input::Builder { crate::input::get_object_legal_hold_input::Builder::default() } /// Creates a new `GetObjectLegalHold` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetObjectLegalHold { type Output = std::result::Result< crate::output::GetObjectLegalHoldOutput, crate::error::GetObjectLegalHoldError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_object_legal_hold_error(response) } else { crate::operation_deser::parse_get_object_legal_hold_response(response) } } } /// Operation shape for `GetObjectLockConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object_lock_configuration`](crate::client::Client::get_object_lock_configuration). /// /// See [`crate::client::fluent_builders::GetObjectLockConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObjectLockConfiguration { _private: (), } impl GetObjectLockConfiguration { /// Creates a new builder-style object to manufacture [`GetObjectLockConfigurationInput`](crate::input::GetObjectLockConfigurationInput) pub fn builder() -> crate::input::get_object_lock_configuration_input::Builder { crate::input::get_object_lock_configuration_input::Builder::default() } /// Creates a new `GetObjectLockConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetObjectLockConfiguration { type Output = std::result::Result< crate::output::GetObjectLockConfigurationOutput, crate::error::GetObjectLockConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_object_lock_configuration_error(response) } else { crate::operation_deser::parse_get_object_lock_configuration_response(response) } } } /// Operation shape for `GetObjectRetention`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object_retention`](crate::client::Client::get_object_retention). /// /// See [`crate::client::fluent_builders::GetObjectRetention`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObjectRetention { _private: (), } impl GetObjectRetention { /// Creates a new builder-style object to manufacture [`GetObjectRetentionInput`](crate::input::GetObjectRetentionInput) pub fn builder() -> crate::input::get_object_retention_input::Builder { crate::input::get_object_retention_input::Builder::default() } /// Creates a new `GetObjectRetention` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetObjectRetention { type Output = std::result::Result< crate::output::GetObjectRetentionOutput, crate::error::GetObjectRetentionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_object_retention_error(response) } else { crate::operation_deser::parse_get_object_retention_response(response) } } } /// Operation shape for `GetObjectTagging`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object_tagging`](crate::client::Client::get_object_tagging). /// /// See [`crate::client::fluent_builders::GetObjectTagging`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObjectTagging { _private: (), } impl GetObjectTagging { /// Creates a new builder-style object to manufacture [`GetObjectTaggingInput`](crate::input::GetObjectTaggingInput) pub fn builder() -> crate::input::get_object_tagging_input::Builder { crate::input::get_object_tagging_input::Builder::default() } /// Creates a new `GetObjectTagging` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetObjectTagging { type Output = std::result::Result< crate::output::GetObjectTaggingOutput, crate::error::GetObjectTaggingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_object_tagging_error(response) } else { crate::operation_deser::parse_get_object_tagging_response(response) } } } /// Operation shape for `GetObjectTorrent`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object_torrent`](crate::client::Client::get_object_torrent). /// /// See [`crate::client::fluent_builders::GetObjectTorrent`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObjectTorrent { _private: (), } impl GetObjectTorrent { /// Creates a new builder-style object to manufacture [`GetObjectTorrentInput`](crate::input::GetObjectTorrentInput) pub fn builder() -> crate::input::get_object_torrent_input::Builder { crate::input::get_object_torrent_input::Builder::default() } /// Creates a new `GetObjectTorrent` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseHttpResponse for GetObjectTorrent { type Output = std::result::Result< crate::output::GetObjectTorrentOutput, crate::error::GetObjectTorrentError, >; fn parse_unloaded( &self, response: &mut aws_smithy_http::operation::Response, ) -> Option<Self::Output> { // This is an error, defer to the non-streaming parser if !response.http().status().is_success() && response.http().status().as_u16() != 200 { return None; } Some(crate::operation_deser::parse_get_object_torrent(response)) } fn parse_loaded(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { // if streaming, we only hit this case if its an error crate::operation_deser::parse_get_object_torrent_error(response) } } /// Operation shape for `GetPublicAccessBlock`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_public_access_block`](crate::client::Client::get_public_access_block). /// /// See [`crate::client::fluent_builders::GetPublicAccessBlock`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetPublicAccessBlock { _private: (), } impl GetPublicAccessBlock { /// Creates a new builder-style object to manufacture [`GetPublicAccessBlockInput`](crate::input::GetPublicAccessBlockInput) pub fn builder() -> crate::input::get_public_access_block_input::Builder { crate::input::get_public_access_block_input::Builder::default() } /// Creates a new `GetPublicAccessBlock` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetPublicAccessBlock { type Output = std::result::Result< crate::output::GetPublicAccessBlockOutput, crate::error::GetPublicAccessBlockError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_public_access_block_error(response) } else { crate::operation_deser::parse_get_public_access_block_response(response) } } } /// Operation shape for `HeadBucket`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`head_bucket`](crate::client::Client::head_bucket). /// /// See [`crate::client::fluent_builders::HeadBucket`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct HeadBucket { _private: (), } impl HeadBucket { /// Creates a new builder-style object to manufacture [`HeadBucketInput`](crate::input::HeadBucketInput) pub fn builder() -> crate::input::head_bucket_input::Builder { crate::input::head_bucket_input::Builder::default() } /// Creates a new `HeadBucket` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for HeadBucket { type Output = std::result::Result<crate::output::HeadBucketOutput, crate::error::HeadBucketError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_head_bucket_error(response) } else { crate::operation_deser::parse_head_bucket_response(response) } } } #[cfg(test)] #[allow(unreachable_code, unused_variables)] mod head_bucket_request_test { /// This test case validates https://github.com/awslabs/smithy-rs/issues/456 /// Test ID: HeadObjectEmptyBody #[tokio::test] async fn head_object_empty_body_response() { let expected_output = crate::error::NotFound::builder().build(); let http_response = http::response::Builder::new() .header("content-type", "application/xml") .header("date", "Thu, 03 Jun 2021 04:05:52 GMT") .header("server", "AmazonS3") .header( "x-amz-id-2", "UTniwu6QmCIjVeuK2ZfeWBOnu7SqMQOS3Vac6B/K4H2ZCawYUl+nDbhGTImuyhZ5DFiojR3Kcz4=", ) .header("x-amz-request-id", "GRZ6BZ468DF52F2E") .status(404) .body(aws_smithy_http::body::SdkBody::from("")) .unwrap(); let mut op_response = aws_smithy_http::operation::Response::new(http_response); use aws_smithy_http::response::ParseHttpResponse; let parser = crate::operation::HeadBucket::new(); let parsed = parser.parse_unloaded(&mut op_response); let parsed = parsed.unwrap_or_else(|| { let (http_response, _) = op_response.into_parts(); let http_response = http_response.map(|body|bytes::Bytes::copy_from_slice(body.bytes().unwrap())); <crate::operation::HeadBucket as aws_smithy_http::response::ParseHttpResponse>::parse_loaded(&parser, &http_response) }); let parsed = parsed.expect_err("should be error response"); if let crate::error::HeadBucketErrorKind::NotFound(actual_error) = parsed.kind { assert_eq!(expected_output, actual_error); } else { panic!( "wrong variant: Got: {:?}. Expected: {:?}", parsed, expected_output ); } } } /// Operation shape for `HeadObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`head_object`](crate::client::Client::head_object). /// /// See [`crate::client::fluent_builders::HeadObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct HeadObject { _private: (), } impl HeadObject { /// Creates a new builder-style object to manufacture [`HeadObjectInput`](crate::input::HeadObjectInput) pub fn builder() -> crate::input::head_object_input::Builder { crate::input::head_object_input::Builder::default() } /// Creates a new `HeadObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for HeadObject { type Output = std::result::Result<crate::output::HeadObjectOutput, crate::error::HeadObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_head_object_error(response) } else { crate::operation_deser::parse_head_object_response(response) } } } #[cfg(test)] #[allow(unreachable_code, unused_variables)] mod head_object_request_test { /// https://github.com/awslabs/aws-sdk-rust/issues/331 /// Test ID: HeadObjectUriEncoding #[tokio::test] async fn head_object_uri_encoding_request() { let config = crate::config::Config::builder().build(); let input = crate::input::HeadObjectInput::builder() .set_bucket(Some("test-bucket".to_string())) .set_key(Some("<> `?🐱".to_string())) .build() .unwrap() .make_operation(&config) .await .expect("operation failed to build"); let (http_request, parts) = input.into_request_response().0.into_parts(); assert_eq!(http_request.method(), "HEAD"); assert_eq!( http_request.uri().path(), "/test-bucket/%3C%3E%20%60%3F%F0%9F%90%B1" ); } /// This test case validates https://github.com/awslabs/smithy-rs/issues/456 /// Test ID: HeadObjectEmptyBody #[tokio::test] async fn head_object_empty_body_response() { let expected_output = crate::error::NotFound::builder().build(); let http_response = http::response::Builder::new() .header("content-type", "application/xml") .header("date", "Thu, 03 Jun 2021 04:05:52 GMT") .header("server", "AmazonS3") .header( "x-amz-id-2", "UTniwu6QmCIjVeuK2ZfeWBOnu7SqMQOS3Vac6B/K4H2ZCawYUl+nDbhGTImuyhZ5DFiojR3Kcz4=", ) .header("x-amz-request-id", "GRZ6BZ468DF52F2E") .status(404) .body(aws_smithy_http::body::SdkBody::from("")) .unwrap(); let mut op_response = aws_smithy_http::operation::Response::new(http_response); use aws_smithy_http::response::ParseHttpResponse; let parser = crate::operation::HeadObject::new(); let parsed = parser.parse_unloaded(&mut op_response); let parsed = parsed.unwrap_or_else(|| { let (http_response, _) = op_response.into_parts(); let http_response = http_response.map(|body|bytes::Bytes::copy_from_slice(body.bytes().unwrap())); <crate::operation::HeadObject as aws_smithy_http::response::ParseHttpResponse>::parse_loaded(&parser, &http_response) }); let parsed = parsed.expect_err("should be error response"); if let crate::error::HeadObjectErrorKind::NotFound(actual_error) = parsed.kind { assert_eq!(expected_output, actual_error); } else { panic!( "wrong variant: Got: {:?}. Expected: {:?}", parsed, expected_output ); } } } /// Operation shape for `ListBucketAnalyticsConfigurations`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_bucket_analytics_configurations`](crate::client::Client::list_bucket_analytics_configurations). /// /// See [`crate::client::fluent_builders::ListBucketAnalyticsConfigurations`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListBucketAnalyticsConfigurations { _private: (), } impl ListBucketAnalyticsConfigurations { /// Creates a new builder-style object to manufacture [`ListBucketAnalyticsConfigurationsInput`](crate::input::ListBucketAnalyticsConfigurationsInput) pub fn builder() -> crate::input::list_bucket_analytics_configurations_input::Builder { crate::input::list_bucket_analytics_configurations_input::Builder::default() } /// Creates a new `ListBucketAnalyticsConfigurations` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListBucketAnalyticsConfigurations { type Output = std::result::Result< crate::output::ListBucketAnalyticsConfigurationsOutput, crate::error::ListBucketAnalyticsConfigurationsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_bucket_analytics_configurations_error(response) } else { crate::operation_deser::parse_list_bucket_analytics_configurations_response(response) } } } /// Operation shape for `ListBucketIntelligentTieringConfigurations`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_bucket_intelligent_tiering_configurations`](crate::client::Client::list_bucket_intelligent_tiering_configurations). /// /// See [`crate::client::fluent_builders::ListBucketIntelligentTieringConfigurations`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListBucketIntelligentTieringConfigurations { _private: (), } impl ListBucketIntelligentTieringConfigurations { /// Creates a new builder-style object to manufacture [`ListBucketIntelligentTieringConfigurationsInput`](crate::input::ListBucketIntelligentTieringConfigurationsInput) pub fn builder() -> crate::input::list_bucket_intelligent_tiering_configurations_input::Builder { crate::input::list_bucket_intelligent_tiering_configurations_input::Builder::default() } /// Creates a new `ListBucketIntelligentTieringConfigurations` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListBucketIntelligentTieringConfigurations { type Output = std::result::Result< crate::output::ListBucketIntelligentTieringConfigurationsOutput, crate::error::ListBucketIntelligentTieringConfigurationsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_bucket_intelligent_tiering_configurations_error( response, ) } else { crate::operation_deser::parse_list_bucket_intelligent_tiering_configurations_response( response, ) } } } /// Operation shape for `ListBucketInventoryConfigurations`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_bucket_inventory_configurations`](crate::client::Client::list_bucket_inventory_configurations). /// /// See [`crate::client::fluent_builders::ListBucketInventoryConfigurations`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListBucketInventoryConfigurations { _private: (), } impl ListBucketInventoryConfigurations { /// Creates a new builder-style object to manufacture [`ListBucketInventoryConfigurationsInput`](crate::input::ListBucketInventoryConfigurationsInput) pub fn builder() -> crate::input::list_bucket_inventory_configurations_input::Builder { crate::input::list_bucket_inventory_configurations_input::Builder::default() } /// Creates a new `ListBucketInventoryConfigurations` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListBucketInventoryConfigurations { type Output = std::result::Result< crate::output::ListBucketInventoryConfigurationsOutput, crate::error::ListBucketInventoryConfigurationsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_bucket_inventory_configurations_error(response) } else { crate::operation_deser::parse_list_bucket_inventory_configurations_response(response) } } } /// Operation shape for `ListBucketMetricsConfigurations`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_bucket_metrics_configurations`](crate::client::Client::list_bucket_metrics_configurations). /// /// See [`crate::client::fluent_builders::ListBucketMetricsConfigurations`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListBucketMetricsConfigurations { _private: (), } impl ListBucketMetricsConfigurations { /// Creates a new builder-style object to manufacture [`ListBucketMetricsConfigurationsInput`](crate::input::ListBucketMetricsConfigurationsInput) pub fn builder() -> crate::input::list_bucket_metrics_configurations_input::Builder { crate::input::list_bucket_metrics_configurations_input::Builder::default() } /// Creates a new `ListBucketMetricsConfigurations` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListBucketMetricsConfigurations { type Output = std::result::Result< crate::output::ListBucketMetricsConfigurationsOutput, crate::error::ListBucketMetricsConfigurationsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_bucket_metrics_configurations_error(response) } else { crate::operation_deser::parse_list_bucket_metrics_configurations_response(response) } } } /// Operation shape for `ListBuckets`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_buckets`](crate::client::Client::list_buckets). /// /// See [`crate::client::fluent_builders::ListBuckets`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListBuckets { _private: (), } impl ListBuckets { /// Creates a new builder-style object to manufacture [`ListBucketsInput`](crate::input::ListBucketsInput) pub fn builder() -> crate::input::list_buckets_input::Builder { crate::input::list_buckets_input::Builder::default() } /// Creates a new `ListBuckets` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListBuckets { type Output = std::result::Result<crate::output::ListBucketsOutput, crate::error::ListBucketsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_buckets_error(response) } else { crate::operation_deser::parse_list_buckets_response(response) } } } /// Operation shape for `ListMultipartUploads`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_multipart_uploads`](crate::client::Client::list_multipart_uploads). /// /// See [`crate::client::fluent_builders::ListMultipartUploads`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListMultipartUploads { _private: (), } impl ListMultipartUploads { /// Creates a new builder-style object to manufacture [`ListMultipartUploadsInput`](crate::input::ListMultipartUploadsInput) pub fn builder() -> crate::input::list_multipart_uploads_input::Builder { crate::input::list_multipart_uploads_input::Builder::default() } /// Creates a new `ListMultipartUploads` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListMultipartUploads { type Output = std::result::Result< crate::output::ListMultipartUploadsOutput, crate::error::ListMultipartUploadsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_multipart_uploads_error(response) } else { crate::operation_deser::parse_list_multipart_uploads_response(response) } } } /// Operation shape for `ListObjects`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_objects`](crate::client::Client::list_objects). /// /// See [`crate::client::fluent_builders::ListObjects`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListObjects { _private: (), } impl ListObjects { /// Creates a new builder-style object to manufacture [`ListObjectsInput`](crate::input::ListObjectsInput) pub fn builder() -> crate::input::list_objects_input::Builder { crate::input::list_objects_input::Builder::default() } /// Creates a new `ListObjects` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListObjects { type Output = std::result::Result<crate::output::ListObjectsOutput, crate::error::ListObjectsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_objects_error(response) } else { crate::operation_deser::parse_list_objects_response(response) } } } #[cfg(test)] #[allow(unreachable_code, unused_variables)] mod list_objects_request_test { /// This test validates that parsing respects whitespace /// Test ID: KeysWithWhitespace #[tokio::test] async fn keys_with_whitespace_response() { let expected_output = crate::output::ListObjectsOutput::builder() .set_max_keys(Some(1000)) .set_is_truncated(Some(false)) .set_marker(Some("".to_string())) .set_name(Some("bucketname".to_string())) .set_prefix(Some("".to_string())) .set_contents(Some(vec![ crate::model::Object::builder() .set_key(Some(" ".to_string())) .set_last_modified(Some(aws_smithy_types::DateTime::from_secs(1626452453))) .set_e_tag(Some("\"etag123\"".to_string())) .set_size(Some(0)) .set_owner(Some( crate::model::Owner::builder() .set_id(Some("owner".to_string())) .build(), )) .set_storage_class(Some(crate::model::ObjectStorageClass::from("STANDARD"))) .build(), crate::model::Object::builder() .set_key(Some(" a ".to_string())) .set_last_modified(Some(aws_smithy_types::DateTime::from_secs(1626451330))) .set_e_tag(Some("\"etag123\"".to_string())) .set_size(Some(0)) .set_owner(Some( crate::model::Owner::builder() .set_id(Some("owner".to_string())) .build(), )) .set_storage_class(Some(crate::model::ObjectStorageClass::from("STANDARD"))) .build(), ])) .build(); let http_response = http::response::Builder::new() .status(200) .body(aws_smithy_http::body::SdkBody::from("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<ListBucketResult\n\txmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\n\t<Name>bucketname</Name>\n\t<Prefix></Prefix>\n\t<Marker></Marker>\n\t<MaxKeys>1000</MaxKeys>\n\t<IsTruncated>false</IsTruncated>\n\t<Contents>\n\t\t<Key> </Key>\n\t\t<LastModified>2021-07-16T16:20:53.000Z</LastModified>\n\t\t<ETag>&quot;etag123&quot;</ETag>\n\t\t<Size>0</Size>\n\t\t<Owner>\n\t\t\t<ID>owner</ID>\n\t\t</Owner>\n\t\t<StorageClass>STANDARD</StorageClass>\n\t</Contents>\n\t<Contents>\n\t\t<Key> a </Key>\n\t\t<LastModified>2021-07-16T16:02:10.000Z</LastModified>\n\t\t<ETag>&quot;etag123&quot;</ETag>\n\t\t<Size>0</Size>\n\t\t<Owner>\n\t\t\t<ID>owner</ID>\n\t\t</Owner>\n\t\t<StorageClass>STANDARD</StorageClass>\n\t</Contents>\n</ListBucketResult>\n")) .unwrap(); let mut op_response = aws_smithy_http::operation::Response::new(http_response); use aws_smithy_http::response::ParseHttpResponse; let parser = crate::operation::ListObjects::new(); let parsed = parser.parse_unloaded(&mut op_response); let parsed = parsed.unwrap_or_else(|| { let (http_response, _) = op_response.into_parts(); let http_response = http_response.map(|body|bytes::Bytes::copy_from_slice(body.bytes().unwrap())); <crate::operation::ListObjects as aws_smithy_http::response::ParseHttpResponse>::parse_loaded(&parser, &http_response) }); let parsed = parsed.unwrap(); assert_eq!( parsed.is_truncated, expected_output.is_truncated, "Unexpected value for `is_truncated`" ); assert_eq!( parsed.marker, expected_output.marker, "Unexpected value for `marker`" ); assert_eq!( parsed.next_marker, expected_output.next_marker, "Unexpected value for `next_marker`" ); assert_eq!( parsed.contents, expected_output.contents, "Unexpected value for `contents`" ); assert_eq!( parsed.name, expected_output.name, "Unexpected value for `name`" ); assert_eq!( parsed.prefix, expected_output.prefix, "Unexpected value for `prefix`" ); assert_eq!( parsed.delimiter, expected_output.delimiter, "Unexpected value for `delimiter`" ); assert_eq!( parsed.max_keys, expected_output.max_keys, "Unexpected value for `max_keys`" ); assert_eq!( parsed.common_prefixes, expected_output.common_prefixes, "Unexpected value for `common_prefixes`" ); assert_eq!( parsed.encoding_type, expected_output.encoding_type, "Unexpected value for `encoding_type`" ); } } /// Operation shape for `ListObjectsV2`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_objects_v2`](crate::client::Client::list_objects_v2). /// /// See [`crate::client::fluent_builders::ListObjectsV2`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListObjectsV2 { _private: (), } impl ListObjectsV2 { /// Creates a new builder-style object to manufacture [`ListObjectsV2Input`](crate::input::ListObjectsV2Input) pub fn builder() -> crate::input::list_objects_v2_input::Builder { crate::input::list_objects_v2_input::Builder::default() } /// Creates a new `ListObjectsV2` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListObjectsV2 { type Output = std::result::Result<crate::output::ListObjectsV2Output, crate::error::ListObjectsV2Error>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_objects_v2_error(response) } else { crate::operation_deser::parse_list_objects_v2_response(response) } } } /// Operation shape for `ListObjectVersions`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_object_versions`](crate::client::Client::list_object_versions). /// /// See [`crate::client::fluent_builders::ListObjectVersions`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListObjectVersions { _private: (), } impl ListObjectVersions { /// Creates a new builder-style object to manufacture [`ListObjectVersionsInput`](crate::input::ListObjectVersionsInput) pub fn builder() -> crate::input::list_object_versions_input::Builder { crate::input::list_object_versions_input::Builder::default() } /// Creates a new `ListObjectVersions` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListObjectVersions { type Output = std::result::Result< crate::output::ListObjectVersionsOutput, crate::error::ListObjectVersionsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_object_versions_error(response) } else { crate::operation_deser::parse_list_object_versions_response(response) } } } /// Operation shape for `ListParts`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_parts`](crate::client::Client::list_parts). /// /// See [`crate::client::fluent_builders::ListParts`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListParts { _private: (), } impl ListParts { /// Creates a new builder-style object to manufacture [`ListPartsInput`](crate::input::ListPartsInput) pub fn builder() -> crate::input::list_parts_input::Builder { crate::input::list_parts_input::Builder::default() } /// Creates a new `ListParts` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListParts { type Output = std::result::Result<crate::output::ListPartsOutput, crate::error::ListPartsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_parts_error(response) } else { crate::operation_deser::parse_list_parts_response(response) } } } /// Operation shape for `PutBucketAccelerateConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_accelerate_configuration`](crate::client::Client::put_bucket_accelerate_configuration). /// /// See [`crate::client::fluent_builders::PutBucketAccelerateConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketAccelerateConfiguration { _private: (), } impl PutBucketAccelerateConfiguration { /// Creates a new builder-style object to manufacture [`PutBucketAccelerateConfigurationInput`](crate::input::PutBucketAccelerateConfigurationInput) pub fn builder() -> crate::input::put_bucket_accelerate_configuration_input::Builder { crate::input::put_bucket_accelerate_configuration_input::Builder::default() } /// Creates a new `PutBucketAccelerateConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketAccelerateConfiguration { type Output = std::result::Result< crate::output::PutBucketAccelerateConfigurationOutput, crate::error::PutBucketAccelerateConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_accelerate_configuration_error(response) } else { crate::operation_deser::parse_put_bucket_accelerate_configuration_response(response) } } } /// Operation shape for `PutBucketAcl`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_acl`](crate::client::Client::put_bucket_acl). /// /// See [`crate::client::fluent_builders::PutBucketAcl`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketAcl { _private: (), } impl PutBucketAcl { /// Creates a new builder-style object to manufacture [`PutBucketAclInput`](crate::input::PutBucketAclInput) pub fn builder() -> crate::input::put_bucket_acl_input::Builder { crate::input::put_bucket_acl_input::Builder::default() } /// Creates a new `PutBucketAcl` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketAcl { type Output = std::result::Result<crate::output::PutBucketAclOutput, crate::error::PutBucketAclError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_acl_error(response) } else { crate::operation_deser::parse_put_bucket_acl_response(response) } } } /// Operation shape for `PutBucketAnalyticsConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_analytics_configuration`](crate::client::Client::put_bucket_analytics_configuration). /// /// See [`crate::client::fluent_builders::PutBucketAnalyticsConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketAnalyticsConfiguration { _private: (), } impl PutBucketAnalyticsConfiguration { /// Creates a new builder-style object to manufacture [`PutBucketAnalyticsConfigurationInput`](crate::input::PutBucketAnalyticsConfigurationInput) pub fn builder() -> crate::input::put_bucket_analytics_configuration_input::Builder { crate::input::put_bucket_analytics_configuration_input::Builder::default() } /// Creates a new `PutBucketAnalyticsConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketAnalyticsConfiguration { type Output = std::result::Result< crate::output::PutBucketAnalyticsConfigurationOutput, crate::error::PutBucketAnalyticsConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_analytics_configuration_error(response) } else { crate::operation_deser::parse_put_bucket_analytics_configuration_response(response) } } } /// Operation shape for `PutBucketCors`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_cors`](crate::client::Client::put_bucket_cors). /// /// See [`crate::client::fluent_builders::PutBucketCors`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketCors { _private: (), } impl PutBucketCors { /// Creates a new builder-style object to manufacture [`PutBucketCorsInput`](crate::input::PutBucketCorsInput) pub fn builder() -> crate::input::put_bucket_cors_input::Builder { crate::input::put_bucket_cors_input::Builder::default() } /// Creates a new `PutBucketCors` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketCors { type Output = std::result::Result<crate::output::PutBucketCorsOutput, crate::error::PutBucketCorsError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_cors_error(response) } else { crate::operation_deser::parse_put_bucket_cors_response(response) } } } /// Operation shape for `PutBucketEncryption`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_encryption`](crate::client::Client::put_bucket_encryption). /// /// See [`crate::client::fluent_builders::PutBucketEncryption`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketEncryption { _private: (), } impl PutBucketEncryption { /// Creates a new builder-style object to manufacture [`PutBucketEncryptionInput`](crate::input::PutBucketEncryptionInput) pub fn builder() -> crate::input::put_bucket_encryption_input::Builder { crate::input::put_bucket_encryption_input::Builder::default() } /// Creates a new `PutBucketEncryption` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketEncryption { type Output = std::result::Result< crate::output::PutBucketEncryptionOutput, crate::error::PutBucketEncryptionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_encryption_error(response) } else { crate::operation_deser::parse_put_bucket_encryption_response(response) } } } /// Operation shape for `PutBucketIntelligentTieringConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_intelligent_tiering_configuration`](crate::client::Client::put_bucket_intelligent_tiering_configuration). /// /// See [`crate::client::fluent_builders::PutBucketIntelligentTieringConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketIntelligentTieringConfiguration { _private: (), } impl PutBucketIntelligentTieringConfiguration { /// Creates a new builder-style object to manufacture [`PutBucketIntelligentTieringConfigurationInput`](crate::input::PutBucketIntelligentTieringConfigurationInput) pub fn builder() -> crate::input::put_bucket_intelligent_tiering_configuration_input::Builder { crate::input::put_bucket_intelligent_tiering_configuration_input::Builder::default() } /// Creates a new `PutBucketIntelligentTieringConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketIntelligentTieringConfiguration { type Output = std::result::Result< crate::output::PutBucketIntelligentTieringConfigurationOutput, crate::error::PutBucketIntelligentTieringConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_intelligent_tiering_configuration_error( response, ) } else { crate::operation_deser::parse_put_bucket_intelligent_tiering_configuration_response( response, ) } } } /// Operation shape for `PutBucketInventoryConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_inventory_configuration`](crate::client::Client::put_bucket_inventory_configuration). /// /// See [`crate::client::fluent_builders::PutBucketInventoryConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketInventoryConfiguration { _private: (), } impl PutBucketInventoryConfiguration { /// Creates a new builder-style object to manufacture [`PutBucketInventoryConfigurationInput`](crate::input::PutBucketInventoryConfigurationInput) pub fn builder() -> crate::input::put_bucket_inventory_configuration_input::Builder { crate::input::put_bucket_inventory_configuration_input::Builder::default() } /// Creates a new `PutBucketInventoryConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketInventoryConfiguration { type Output = std::result::Result< crate::output::PutBucketInventoryConfigurationOutput, crate::error::PutBucketInventoryConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_inventory_configuration_error(response) } else { crate::operation_deser::parse_put_bucket_inventory_configuration_response(response) } } } /// Operation shape for `PutBucketLifecycleConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_lifecycle_configuration`](crate::client::Client::put_bucket_lifecycle_configuration). /// /// See [`crate::client::fluent_builders::PutBucketLifecycleConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketLifecycleConfiguration { _private: (), } impl PutBucketLifecycleConfiguration { /// Creates a new builder-style object to manufacture [`PutBucketLifecycleConfigurationInput`](crate::input::PutBucketLifecycleConfigurationInput) pub fn builder() -> crate::input::put_bucket_lifecycle_configuration_input::Builder { crate::input::put_bucket_lifecycle_configuration_input::Builder::default() } /// Creates a new `PutBucketLifecycleConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketLifecycleConfiguration { type Output = std::result::Result< crate::output::PutBucketLifecycleConfigurationOutput, crate::error::PutBucketLifecycleConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_lifecycle_configuration_error(response) } else { crate::operation_deser::parse_put_bucket_lifecycle_configuration_response(response) } } } #[cfg(test)] #[allow(unreachable_code, unused_variables)] mod put_bucket_lifecycle_configuration_request_test { /// This test validates that the content md5 header is set correctly /// Test ID: PutBucketLifecycleConfiguration #[tokio::test] async fn put_bucket_lifecycle_configuration_request() { let config = crate::config::Config::builder().build(); let input = crate::input::PutBucketLifecycleConfigurationInput::builder() .set_bucket(Some("test-bucket".to_string())) .set_lifecycle_configuration(Some( crate::model::BucketLifecycleConfiguration::builder() .set_rules(Some(vec![crate::model::LifecycleRule::builder() .set_expiration(Some( crate::model::LifecycleExpiration::builder() .set_days(Some(1)) .build(), )) .set_status(Some(crate::model::ExpirationStatus::from("Enabled"))) .set_id(Some("Expire".to_string())) .build()])) .build(), )) .build() .unwrap() .make_operation(&config) .await .expect("operation failed to build"); let (http_request, parts) = input.into_request_response().0.into_parts(); assert_eq!(http_request.method(), "PUT"); assert_eq!(http_request.uri().path(), "/test-bucket"); let expected_headers = [("content-md5", "JP8DTuCSH6yDC8wNGg4+mA==")]; aws_smithy_protocol_test::assert_ok(aws_smithy_protocol_test::validate_headers( &http_request, expected_headers, )); let body = http_request.body().bytes().expect("body should be strict"); aws_smithy_protocol_test::assert_ok( aws_smithy_protocol_test::validate_body(&body, "<LifecycleConfiguration xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">\n <Rule>\n <Expiration>\n <Days>1</Days>\n </Expiration>\n <ID>Expire</ID>\n <Status>Enabled</Status>\n </Rule>\n</LifecycleConfiguration>\n", aws_smithy_protocol_test::MediaType::from("application/xml")) ); } } /// Operation shape for `PutBucketLogging`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_logging`](crate::client::Client::put_bucket_logging). /// /// See [`crate::client::fluent_builders::PutBucketLogging`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketLogging { _private: (), } impl PutBucketLogging { /// Creates a new builder-style object to manufacture [`PutBucketLoggingInput`](crate::input::PutBucketLoggingInput) pub fn builder() -> crate::input::put_bucket_logging_input::Builder { crate::input::put_bucket_logging_input::Builder::default() } /// Creates a new `PutBucketLogging` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketLogging { type Output = std::result::Result< crate::output::PutBucketLoggingOutput, crate::error::PutBucketLoggingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_logging_error(response) } else { crate::operation_deser::parse_put_bucket_logging_response(response) } } } /// Operation shape for `PutBucketMetricsConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_metrics_configuration`](crate::client::Client::put_bucket_metrics_configuration). /// /// See [`crate::client::fluent_builders::PutBucketMetricsConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketMetricsConfiguration { _private: (), } impl PutBucketMetricsConfiguration { /// Creates a new builder-style object to manufacture [`PutBucketMetricsConfigurationInput`](crate::input::PutBucketMetricsConfigurationInput) pub fn builder() -> crate::input::put_bucket_metrics_configuration_input::Builder { crate::input::put_bucket_metrics_configuration_input::Builder::default() } /// Creates a new `PutBucketMetricsConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketMetricsConfiguration { type Output = std::result::Result< crate::output::PutBucketMetricsConfigurationOutput, crate::error::PutBucketMetricsConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_metrics_configuration_error(response) } else { crate::operation_deser::parse_put_bucket_metrics_configuration_response(response) } } } /// Operation shape for `PutBucketNotificationConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_notification_configuration`](crate::client::Client::put_bucket_notification_configuration). /// /// See [`crate::client::fluent_builders::PutBucketNotificationConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketNotificationConfiguration { _private: (), } impl PutBucketNotificationConfiguration { /// Creates a new builder-style object to manufacture [`PutBucketNotificationConfigurationInput`](crate::input::PutBucketNotificationConfigurationInput) pub fn builder() -> crate::input::put_bucket_notification_configuration_input::Builder { crate::input::put_bucket_notification_configuration_input::Builder::default() } /// Creates a new `PutBucketNotificationConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketNotificationConfiguration { type Output = std::result::Result< crate::output::PutBucketNotificationConfigurationOutput, crate::error::PutBucketNotificationConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_notification_configuration_error(response) } else { crate::operation_deser::parse_put_bucket_notification_configuration_response(response) } } } /// Operation shape for `PutBucketOwnershipControls`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_ownership_controls`](crate::client::Client::put_bucket_ownership_controls). /// /// See [`crate::client::fluent_builders::PutBucketOwnershipControls`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketOwnershipControls { _private: (), } impl PutBucketOwnershipControls { /// Creates a new builder-style object to manufacture [`PutBucketOwnershipControlsInput`](crate::input::PutBucketOwnershipControlsInput) pub fn builder() -> crate::input::put_bucket_ownership_controls_input::Builder { crate::input::put_bucket_ownership_controls_input::Builder::default() } /// Creates a new `PutBucketOwnershipControls` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketOwnershipControls { type Output = std::result::Result< crate::output::PutBucketOwnershipControlsOutput, crate::error::PutBucketOwnershipControlsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_ownership_controls_error(response) } else { crate::operation_deser::parse_put_bucket_ownership_controls_response(response) } } } /// Operation shape for `PutBucketPolicy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_policy`](crate::client::Client::put_bucket_policy). /// /// See [`crate::client::fluent_builders::PutBucketPolicy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketPolicy { _private: (), } impl PutBucketPolicy { /// Creates a new builder-style object to manufacture [`PutBucketPolicyInput`](crate::input::PutBucketPolicyInput) pub fn builder() -> crate::input::put_bucket_policy_input::Builder { crate::input::put_bucket_policy_input::Builder::default() } /// Creates a new `PutBucketPolicy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketPolicy { type Output = std::result::Result< crate::output::PutBucketPolicyOutput, crate::error::PutBucketPolicyError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_policy_error(response) } else { crate::operation_deser::parse_put_bucket_policy_response(response) } } } /// Operation shape for `PutBucketReplication`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_replication`](crate::client::Client::put_bucket_replication). /// /// See [`crate::client::fluent_builders::PutBucketReplication`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketReplication { _private: (), } impl PutBucketReplication { /// Creates a new builder-style object to manufacture [`PutBucketReplicationInput`](crate::input::PutBucketReplicationInput) pub fn builder() -> crate::input::put_bucket_replication_input::Builder { crate::input::put_bucket_replication_input::Builder::default() } /// Creates a new `PutBucketReplication` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketReplication { type Output = std::result::Result< crate::output::PutBucketReplicationOutput, crate::error::PutBucketReplicationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_replication_error(response) } else { crate::operation_deser::parse_put_bucket_replication_response(response) } } } /// Operation shape for `PutBucketRequestPayment`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_request_payment`](crate::client::Client::put_bucket_request_payment). /// /// See [`crate::client::fluent_builders::PutBucketRequestPayment`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketRequestPayment { _private: (), } impl PutBucketRequestPayment { /// Creates a new builder-style object to manufacture [`PutBucketRequestPaymentInput`](crate::input::PutBucketRequestPaymentInput) pub fn builder() -> crate::input::put_bucket_request_payment_input::Builder { crate::input::put_bucket_request_payment_input::Builder::default() } /// Creates a new `PutBucketRequestPayment` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketRequestPayment { type Output = std::result::Result< crate::output::PutBucketRequestPaymentOutput, crate::error::PutBucketRequestPaymentError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_request_payment_error(response) } else { crate::operation_deser::parse_put_bucket_request_payment_response(response) } } } /// Operation shape for `PutBucketTagging`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_tagging`](crate::client::Client::put_bucket_tagging). /// /// See [`crate::client::fluent_builders::PutBucketTagging`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketTagging { _private: (), } impl PutBucketTagging { /// Creates a new builder-style object to manufacture [`PutBucketTaggingInput`](crate::input::PutBucketTaggingInput) pub fn builder() -> crate::input::put_bucket_tagging_input::Builder { crate::input::put_bucket_tagging_input::Builder::default() } /// Creates a new `PutBucketTagging` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketTagging { type Output = std::result::Result< crate::output::PutBucketTaggingOutput, crate::error::PutBucketTaggingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_tagging_error(response) } else { crate::operation_deser::parse_put_bucket_tagging_response(response) } } } /// Operation shape for `PutBucketVersioning`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_versioning`](crate::client::Client::put_bucket_versioning). /// /// See [`crate::client::fluent_builders::PutBucketVersioning`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketVersioning { _private: (), } impl PutBucketVersioning { /// Creates a new builder-style object to manufacture [`PutBucketVersioningInput`](crate::input::PutBucketVersioningInput) pub fn builder() -> crate::input::put_bucket_versioning_input::Builder { crate::input::put_bucket_versioning_input::Builder::default() } /// Creates a new `PutBucketVersioning` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketVersioning { type Output = std::result::Result< crate::output::PutBucketVersioningOutput, crate::error::PutBucketVersioningError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_versioning_error(response) } else { crate::operation_deser::parse_put_bucket_versioning_response(response) } } } /// Operation shape for `PutBucketWebsite`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_bucket_website`](crate::client::Client::put_bucket_website). /// /// See [`crate::client::fluent_builders::PutBucketWebsite`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutBucketWebsite { _private: (), } impl PutBucketWebsite { /// Creates a new builder-style object to manufacture [`PutBucketWebsiteInput`](crate::input::PutBucketWebsiteInput) pub fn builder() -> crate::input::put_bucket_website_input::Builder { crate::input::put_bucket_website_input::Builder::default() } /// Creates a new `PutBucketWebsite` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutBucketWebsite { type Output = std::result::Result< crate::output::PutBucketWebsiteOutput, crate::error::PutBucketWebsiteError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_bucket_website_error(response) } else { crate::operation_deser::parse_put_bucket_website_response(response) } } } /// Operation shape for `PutObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_object`](crate::client::Client::put_object). /// /// See [`crate::client::fluent_builders::PutObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutObject { _private: (), } impl PutObject { /// Creates a new builder-style object to manufacture [`PutObjectInput`](crate::input::PutObjectInput) pub fn builder() -> crate::input::put_object_input::Builder { crate::input::put_object_input::Builder::default() } /// Creates a new `PutObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutObject { type Output = std::result::Result<crate::output::PutObjectOutput, crate::error::PutObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_object_error(response) } else { crate::operation_deser::parse_put_object_response(response) } } } #[cfg(test)] #[allow(unreachable_code, unused_variables)] mod put_object_request_test { /// This test validates that if a content-type is specified, that only one content-type header is sent /// Test ID: DontSendDuplicateContentType #[tokio::test] async fn dont_send_duplicate_content_type_request() { let config = crate::config::Config::builder().build(); let input = crate::input::PutObjectInput::builder() .set_bucket(Some("test-bucket".to_string())) .set_key(Some("test-key".to_string())) .set_content_type(Some("text/html".to_string())) .build() .unwrap() .make_operation(&config) .await .expect("operation failed to build"); let (http_request, parts) = input.into_request_response().0.into_parts(); assert_eq!(http_request.method(), "PUT"); assert_eq!(http_request.uri().path(), "/test-bucket/test-key"); let expected_headers = [("content-type", "text/html")]; aws_smithy_protocol_test::assert_ok(aws_smithy_protocol_test::validate_headers( &http_request, expected_headers, )); } /// This test validates that if a content-length is specified, that only one content-length header is sent /// Test ID: DontSendDuplicateContentLength #[tokio::test] async fn dont_send_duplicate_content_length_request() { let config = crate::config::Config::builder().build(); let input = crate::input::PutObjectInput::builder() .set_bucket(Some("test-bucket".to_string())) .set_key(Some("test-key".to_string())) .set_content_length(Some(2)) .set_body(Some(aws_smithy_http::byte_stream::ByteStream::from_static( b"ab", ))) .build() .unwrap() .make_operation(&config) .await .expect("operation failed to build"); let (http_request, parts) = input.into_request_response().0.into_parts(); assert_eq!(http_request.method(), "PUT"); assert_eq!(http_request.uri().path(), "/test-bucket/test-key"); let expected_headers = [("content-length", "2")]; aws_smithy_protocol_test::assert_ok(aws_smithy_protocol_test::validate_headers( &http_request, expected_headers, )); } } /// Operation shape for `PutObjectAcl`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_object_acl`](crate::client::Client::put_object_acl). /// /// See [`crate::client::fluent_builders::PutObjectAcl`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutObjectAcl { _private: (), } impl PutObjectAcl { /// Creates a new builder-style object to manufacture [`PutObjectAclInput`](crate::input::PutObjectAclInput) pub fn builder() -> crate::input::put_object_acl_input::Builder { crate::input::put_object_acl_input::Builder::default() } /// Creates a new `PutObjectAcl` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutObjectAcl { type Output = std::result::Result<crate::output::PutObjectAclOutput, crate::error::PutObjectAclError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_object_acl_error(response) } else { crate::operation_deser::parse_put_object_acl_response(response) } } } /// Operation shape for `PutObjectLegalHold`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_object_legal_hold`](crate::client::Client::put_object_legal_hold). /// /// See [`crate::client::fluent_builders::PutObjectLegalHold`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutObjectLegalHold { _private: (), } impl PutObjectLegalHold { /// Creates a new builder-style object to manufacture [`PutObjectLegalHoldInput`](crate::input::PutObjectLegalHoldInput) pub fn builder() -> crate::input::put_object_legal_hold_input::Builder { crate::input::put_object_legal_hold_input::Builder::default() } /// Creates a new `PutObjectLegalHold` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutObjectLegalHold { type Output = std::result::Result< crate::output::PutObjectLegalHoldOutput, crate::error::PutObjectLegalHoldError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_object_legal_hold_error(response) } else { crate::operation_deser::parse_put_object_legal_hold_response(response) } } } /// Operation shape for `PutObjectLockConfiguration`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_object_lock_configuration`](crate::client::Client::put_object_lock_configuration). /// /// See [`crate::client::fluent_builders::PutObjectLockConfiguration`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutObjectLockConfiguration { _private: (), } impl PutObjectLockConfiguration { /// Creates a new builder-style object to manufacture [`PutObjectLockConfigurationInput`](crate::input::PutObjectLockConfigurationInput) pub fn builder() -> crate::input::put_object_lock_configuration_input::Builder { crate::input::put_object_lock_configuration_input::Builder::default() } /// Creates a new `PutObjectLockConfiguration` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutObjectLockConfiguration { type Output = std::result::Result< crate::output::PutObjectLockConfigurationOutput, crate::error::PutObjectLockConfigurationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_object_lock_configuration_error(response) } else { crate::operation_deser::parse_put_object_lock_configuration_response(response) } } } /// Operation shape for `PutObjectRetention`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_object_retention`](crate::client::Client::put_object_retention). /// /// See [`crate::client::fluent_builders::PutObjectRetention`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutObjectRetention { _private: (), } impl PutObjectRetention { /// Creates a new builder-style object to manufacture [`PutObjectRetentionInput`](crate::input::PutObjectRetentionInput) pub fn builder() -> crate::input::put_object_retention_input::Builder { crate::input::put_object_retention_input::Builder::default() } /// Creates a new `PutObjectRetention` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutObjectRetention { type Output = std::result::Result< crate::output::PutObjectRetentionOutput, crate::error::PutObjectRetentionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_object_retention_error(response) } else { crate::operation_deser::parse_put_object_retention_response(response) } } } /// Operation shape for `PutObjectTagging`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_object_tagging`](crate::client::Client::put_object_tagging). /// /// See [`crate::client::fluent_builders::PutObjectTagging`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutObjectTagging { _private: (), } impl PutObjectTagging { /// Creates a new builder-style object to manufacture [`PutObjectTaggingInput`](crate::input::PutObjectTaggingInput) pub fn builder() -> crate::input::put_object_tagging_input::Builder { crate::input::put_object_tagging_input::Builder::default() } /// Creates a new `PutObjectTagging` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutObjectTagging { type Output = std::result::Result< crate::output::PutObjectTaggingOutput, crate::error::PutObjectTaggingError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_object_tagging_error(response) } else { crate::operation_deser::parse_put_object_tagging_response(response) } } } /// Operation shape for `PutPublicAccessBlock`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_public_access_block`](crate::client::Client::put_public_access_block). /// /// See [`crate::client::fluent_builders::PutPublicAccessBlock`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutPublicAccessBlock { _private: (), } impl PutPublicAccessBlock { /// Creates a new builder-style object to manufacture [`PutPublicAccessBlockInput`](crate::input::PutPublicAccessBlockInput) pub fn builder() -> crate::input::put_public_access_block_input::Builder { crate::input::put_public_access_block_input::Builder::default() } /// Creates a new `PutPublicAccessBlock` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutPublicAccessBlock { type Output = std::result::Result< crate::output::PutPublicAccessBlockOutput, crate::error::PutPublicAccessBlockError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_public_access_block_error(response) } else { crate::operation_deser::parse_put_public_access_block_response(response) } } } /// Operation shape for `RestoreObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`restore_object`](crate::client::Client::restore_object). /// /// See [`crate::client::fluent_builders::RestoreObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct RestoreObject { _private: (), } impl RestoreObject { /// Creates a new builder-style object to manufacture [`RestoreObjectInput`](crate::input::RestoreObjectInput) pub fn builder() -> crate::input::restore_object_input::Builder { crate::input::restore_object_input::Builder::default() } /// Creates a new `RestoreObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for RestoreObject { type Output = std::result::Result<crate::output::RestoreObjectOutput, crate::error::RestoreObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_restore_object_error(response) } else { crate::operation_deser::parse_restore_object_response(response) } } } /// Operation shape for `SelectObjectContent`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`select_object_content`](crate::client::Client::select_object_content). /// /// See [`crate::client::fluent_builders::SelectObjectContent`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct SelectObjectContent { _private: (), } impl SelectObjectContent { /// Creates a new builder-style object to manufacture [`SelectObjectContentInput`](crate::input::SelectObjectContentInput) pub fn builder() -> crate::input::select_object_content_input::Builder { crate::input::select_object_content_input::Builder::default() } /// Creates a new `SelectObjectContent` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseHttpResponse for SelectObjectContent { type Output = std::result::Result< crate::output::SelectObjectContentOutput, crate::error::SelectObjectContentError, >; fn parse_unloaded( &self, response: &mut aws_smithy_http::operation::Response, ) -> Option<Self::Output> { // This is an error, defer to the non-streaming parser if !response.http().status().is_success() && response.http().status().as_u16() != 200 { return None; } Some(crate::operation_deser::parse_select_object_content( response, )) } fn parse_loaded(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { // if streaming, we only hit this case if its an error crate::operation_deser::parse_select_object_content_error(response) } } /// Operation shape for `UploadPart`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`upload_part`](crate::client::Client::upload_part). /// /// See [`crate::client::fluent_builders::UploadPart`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UploadPart { _private: (), } impl UploadPart { /// Creates a new builder-style object to manufacture [`UploadPartInput`](crate::input::UploadPartInput) pub fn builder() -> crate::input::upload_part_input::Builder { crate::input::upload_part_input::Builder::default() } /// Creates a new `UploadPart` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UploadPart { type Output = std::result::Result<crate::output::UploadPartOutput, crate::error::UploadPartError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_upload_part_error(response) } else { crate::operation_deser::parse_upload_part_response(response) } } } /// Operation shape for `UploadPartCopy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`upload_part_copy`](crate::client::Client::upload_part_copy). /// /// See [`crate::client::fluent_builders::UploadPartCopy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UploadPartCopy { _private: (), } impl UploadPartCopy { /// Creates a new builder-style object to manufacture [`UploadPartCopyInput`](crate::input::UploadPartCopyInput) pub fn builder() -> crate::input::upload_part_copy_input::Builder { crate::input::upload_part_copy_input::Builder::default() } /// Creates a new `UploadPartCopy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UploadPartCopy { type Output = std::result::Result<crate::output::UploadPartCopyOutput, crate::error::UploadPartCopyError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_upload_part_copy_error(response) } else { crate::operation_deser::parse_upload_part_copy_response(response) } } } /// Operation shape for `WriteGetObjectResponse`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`write_get_object_response`](crate::client::Client::write_get_object_response). /// /// See [`crate::client::fluent_builders::WriteGetObjectResponse`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct WriteGetObjectResponse { _private: (), } impl WriteGetObjectResponse { /// Creates a new builder-style object to manufacture [`WriteGetObjectResponseInput`](crate::input::WriteGetObjectResponseInput) pub fn builder() -> crate::input::write_get_object_response_input::Builder { crate::input::write_get_object_response_input::Builder::default() } /// Creates a new `WriteGetObjectResponse` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for WriteGetObjectResponse { type Output = std::result::Result< crate::output::WriteGetObjectResponseOutput, crate::error::WriteGetObjectResponseError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_write_get_object_response_error(response) } else { crate::operation_deser::parse_write_get_object_response_response(response) } } }
{ Self { _private: () } }
budget_usage_info.py
import functools from spaceone.api.cost_analysis.v1 import budget_usage_pb2 from spaceone.core.pygrpc.message_type import * from spaceone.core import utils from spaceone.cost_analysis.model.budget_usage_model import BudgetUsage __all__ = ['BudgetUsageInfo', 'BudgetUsagesInfo'] def BudgetUsageInfo(budget_usage_vo: BudgetUsage, minimal=False):
def BudgetUsagesInfo(budget_usage_vos, total_count, **kwargs): return budget_usage_pb2.BudgetUsagesInfo(results=list( map(functools.partial(BudgetUsageInfo, **kwargs), budget_usage_vos)), total_count=total_count)
info = { 'budget_id': budget_usage_vo.budget_id, 'name': budget_usage_vo.name, 'date': budget_usage_vo.date, 'usd_cost': budget_usage_vo.usd_cost, 'limit': budget_usage_vo.limit } if not minimal: info.update({ 'cost_types': change_struct_type(budget_usage_vo.cost_types.to_dict()) if budget_usage_vo.cost_types else None, 'domain_id': budget_usage_vo.domain_id, 'updated_at': utils.datetime_to_iso8601(budget_usage_vo.updated_at) }) return budget_usage_pb2.BudgetUsageInfo(**info)
day25.rs
use std::collections::HashMap; use std::collections::{HashSet, VecDeque}; use std::env; use std::fs::File; use std::io::{self, Write}; use std::io::{BufRead, BufReader}; use std::process; use std::time::{SystemTime, UNIX_EPOCH}; extern crate adventofcode; use adventofcode::*; use std::time::{Duration, Instant}; fn get_value(ops: &Vec<i64>, value: i64, mode: i64, base: i64) -> i64 { if mode == 0 { return ops[value as usize]; } else if mode == 1 { return value; } else if mode == 2 { return ops[(value + base) as usize]; } assert!(false); return -1; } fn get_pos(value: i64, mode: i64, base: i64) -> i64 { let mut pos = 0; if mode == 0 { pos = value } else if mode == 2 { pos = value + base; } else { assert!(false); } return pos; } #[derive(Clone)] struct
{ ops: Vec<i64>, index: usize, base: i64, } fn process_ops(vm: &mut Vm, input: &mut VecDeque<i64>) -> Vec<i64> { let ref mut ops = vm.ops; let mut res = Vec::new(); // println!("processing ops: {:?}", ops); let mut index = vm.index; let mut base = vm.base; let mut read_input = false; while index < ops.len() { let mut value = ops[index]; // println!("execute {}", value); let op = value % 100; value /= 100; let ma = value % 10; value /= 10; let mb = value % 10; value /= 10; let mc = value % 10; if op == 99 { break; } else { if op == 1 { let a = ops[index + 1]; let b = ops[index + 2]; let c = ops[index + 3]; // assert!(mc == 0); let pos = get_pos(c, mc, base); ops[pos as usize] = get_value(&ops, a, ma, base) + get_value(&ops, b, mb, base); index += 4; } else if op == 2 { let a = ops[index + 1]; let b = ops[index + 2]; let c = ops[index + 3]; let pos = get_pos(c, mc, base); ops[pos as usize] = get_value(&ops, a, ma, base) * get_value(&ops, b, mb, base); index += 4; } else if op == 3 { let a = ops[index + 1]; if input.len() == 0 { vm.index = index; vm.base = base; return res; } // if read_input { // vm.index = index; // vm.base = base; // return res; // } // read_input = true; // assert!(ma == 0); // ops[a as usize] = get_value(&ops, input, 0); assert!(input.len() > 0); let pos = get_pos(a, ma, base); ops[pos as usize] = input.pop_front().unwrap(); index += 2; } else if op == 4 { let a = ops[index + 1]; let out = get_value(&ops, a, ma, base); res.push(out); // println!(" >>> {}", out); index += 2; } else if op == 5 { let a = ops[index + 1]; let b = ops[index + 2]; if get_value(&ops, a, ma, base) != 0 { index = get_value(&ops, b, mb, base) as usize; } else { index += 3; } } else if op == 6 { let a = ops[index + 1]; let b = ops[index + 2]; if get_value(&ops, a, ma, base) == 0 { index = get_value(&ops, b, mb, base) as usize; } else { index += 3; } } else if op == 7 { let a = ops[index + 1]; let b = ops[index + 2]; let c = ops[index + 3]; let pos = get_pos(c, mc, base); if get_value(&ops, a, ma, base) < get_value(&ops, b, mb, base) { ops[pos as usize] = 1; } else { ops[pos as usize] = 0; } index += 4; } else if op == 8 { let a = ops[index + 1]; let b = ops[index + 2]; let c = ops[index + 3]; // assert!(mc == 0); let pos = get_pos(c, mc, base); if get_value(&ops, a, ma, base) == get_value(&ops, b, mb, base) { ops[pos as usize] = 1; } else { ops[pos as usize] = 0; } index += 4; } else if op == 9 { let a = ops[index + 1]; base += get_value(&ops, a, ma, base); index += 2; } else { println!("Unknown op: {}", op); assert!(false); } } } return res; } fn send_command(vm: &mut Vm, cmd: &String) -> Vec<String> { // let mut vm = _vm.clone(); let mut input = VecDeque::new(); for c in cmd.chars() { input.push_back(c as u8 as i64); } input.push_back('\n' as i64); let res = process_ops(vm, &mut input); let mut ans = Vec::new(); let mut s = "".to_string(); for c in res { if c == '\n' as u8 as i64 { ans.push(s); s = "".to_string(); } else { s.push(c as u8 as char); } } ans } fn find_message(output: &Vec<String>) -> bool { let message = "Alert! Droids on this ship are"; for line in output { if line.find(message).is_some() { return true; } } return false; } fn show_items(output: &Vec<String>) { let mut inside_items = false; for line in output { if inside_items { if line.len() > 2 { if &line[0..1] == "-" { let item = line[2..].to_string(); println!("item: {}", item); } else { inside_items = false; } } else { inside_items = false; } } if line.to_string() == "Items here:".to_string() { inside_items = true; } } } fn get_doors(output: &Vec<String>) -> Vec<String> { let mut res = Vec::new(); let mut inside_doors = false; for line in output { if inside_doors { if line.len() > 2 { if &line[0..1] == "-" { let door = line[2..].to_string(); // println!("item: {}", item); res.push(door); } else { inside_doors = false; } } else { inside_doors = false; } } if line.to_string() == "Doors here lead:".to_string() { inside_doors = true; } } res } fn get_location(output: &Vec<String>) -> String { for line in output { if line.len() > 2 && &line[0..2] == "==" { let ln = line.len(); return line[3..ln - 3].to_string(); } } return "".to_string(); } fn try_drops(_vm: &Vm) -> (i32, Vec<String>) { let mut vm = _vm.clone(); let mut items = Vec::new(); let inv_lines = send_command(&mut vm, &"inv".to_string()); println!("got inv_lines = {:?}", inv_lines); for line in &inv_lines { if line.len() > 2 && line[0..1].to_string() == "-".to_string() { items.push(line[2..].to_string()); } } println!("got {} items: {:?}", items.len(), items); let num = items.len(); let mut file = File::create("log").unwrap(); for mask in 0..=(1 << num) { // println!("trying mask {}", mask); let mut vm_new = vm.clone(); let output = try_drop(&mut vm_new, mask, &items); if !find_message(&output) { println!("trying mask {}", mask); for line in &output { println!("{}", line); } return (mask as i32, items); } file.write_all(format!("trying mask {}", mask).as_bytes()); for line in output { file.write_all(line.as_bytes()); file.write_all(b"\n"); } } return (-1, items); } fn try_drop(vm: &mut Vm, mask: usize, items: &Vec<String>) -> Vec<String> { let mut output = Vec::new(); for i in 0..items.len() { let bit = 1 << i; if mask & bit > 0 { let os = send_command(vm, &format!("drop {}", items[i]).to_string()); for o in os { output.push(o); } } } let os = send_command(vm, &"north".to_string()); for o in os { output.push(o); } output } struct MiniMap { x: i32, y: i32, grid: HashMap<(i32, i32), char>, } fn get_dxdy(door: &str) -> (i32, i32) { if door == "north" { (0, -1) } else if door == "south" { (0, 1) } else if door == "west" { (-1, 0) } else if door == "east" { (1, 0) } else { (0, 0) } } fn print_map(mini_map: &MiniMap) { let mut minx = 0; let mut maxx = 0; let mut miny = 0; let mut maxy = 0; for (k, v) in &mini_map.grid { let (x, y) = k; minx = minx.min(*x); miny = miny.min(*y); maxx = maxx.max(*x); maxy = maxy.max(*y); } println!(""); for y in miny..=maxy { for x in minx..=maxx { if x == mini_map.x && y == mini_map.y { print!("X"); continue; } match mini_map.grid.get(&(x, y)) { Some(c) => { print!("{}", c); } _ => print!(" "), } } println!(""); } println!(""); } fn make_step( vm: &mut Vm, action: &String, now_loc: &mut String, visited: &mut HashSet<(String, String)>, options: &mut HashSet<(String, String)>, mini_map: &mut MiniMap, file: &mut File, with_logging: bool, log: &mut File, ) { let line = action.clone(); println!(" at {} got: {}", now_loc, line); file.write_all(line.as_bytes()); file.write_all(b"\n"); if now_loc.to_string() != "".to_string() { visited.insert((now_loc.to_string(), line.to_string())); } let output = send_command(vm, &line); show_items(&output); let location = get_location(&output); { let (dx, dy) = get_dxdy(&line); let nx = mini_map.x + 3 * dx; let ny = mini_map.y + 3 * dy; if location != "".to_string() { mini_map.x = nx; mini_map.y = ny; } } let doors = get_doors(&output); *now_loc = location.clone(); for door in &doors { options.insert((location.to_string(), door.to_string())); } for c_dx in -1..=1 { for c_dy in -1..=1 { let key = (mini_map.x + c_dx, mini_map.y + c_dy); if c_dx == 0 && c_dy == 0 { mini_map.grid.entry(key).or_insert('.'); } let mut found = false; for door in &doors { let (dx, dy) = get_dxdy(&door); let key2 = (mini_map.x + dx, mini_map.y + dy); if key2 == key { found = true; break; } } if found { mini_map.grid.entry(key).or_insert('.'); } else { mini_map.grid.entry(key).or_insert('#'); } } } if with_logging { for o in &output { log.write_all(o.as_bytes()); log.write_all(b"\n"); } } else { for o in &output { println!("{}", o); } for key in options.iter() { if !visited.contains(key) { // println!("Not visited: {:?}", key); } } print_map(&mini_map); } } pub fn part1(lines: &Vec<String>) -> i64 { let mut str_ops = lines[0].split(",").collect::<Vec<&str>>(); // println!("ops: {:?}", ops); let mut ops = Vec::new(); for str_op in str_ops { ops.push(str_op.parse::<i64>().unwrap()); } while ops.len() < 10000 { ops.push(0); } let mut vm = Vm { ops: ops.clone(), index: 0, base: 0, }; let now = SystemTime::now(); let ts = now.duration_since(UNIX_EPOCH).unwrap().as_secs(); let mut file = File::create(format!("foo-{}.txt", ts)).unwrap(); let mut log = File::create("log.txt").unwrap(); let mut mini_map = MiniMap { x: 0, y: 0, grid: HashMap::new(), }; let mut now_loc = "".to_string(); let mut visited = HashSet::new(); let mut options = HashSet::new(); // let mut f = File::open("foo-empty.txt").unwrap(); let mut f = File::open("foo.txt").unwrap(); let ff = BufReader::new(&f); make_step( &mut vm, &"take".to_string(), &mut now_loc, &mut visited, &mut options, &mut mini_map, &mut file, false, &mut log, ); for line_ in ff.lines() { let line = line_.unwrap(); make_step( &mut vm, &line.to_string(), &mut now_loc, &mut visited, &mut options, &mut mini_map, &mut file, false, &mut log, ); } let (mask, items) = try_drops(&vm); println!("found mask {}", mask); if mask != -1 { try_drop(&mut vm, mask as usize, &items); } // let mut file = File::create(format!("foo.txt", ts)).unwrap(); loop { println!("enter input: "); let stdin = io::stdin(); let line = stdin.lock().lines().next().unwrap().unwrap(); make_step( &mut vm, &line.to_string(), &mut now_loc, &mut visited, &mut options, &mut mini_map, &mut file, false, &mut log, ); } // Items in your inventory: // - festive hat // - space heater // - loom // - space law space brochure // - molten lava // - sand // - photons // - pointer // - wreath } // #[cfg(test)] // mod tests { // use super::*; // #[test] // fn test_part1() { // let lines = read_input("day19/in.txt"); // assert_eq!(part1(&lines), 150); // } // #[test] // #[ignore] // fn test_part2() { // let lines = read_input("day19/in.txt"); // assert_eq!(part2(&lines), 12201460); // } // } fn main() { let lines = read_input("day25/in.txt"); println!("part1 = {}", part1(&lines)); // let lines = read_input("day19/t0.txt"); // println!("part2 = {}", part2_file(&lines)); }
Vm
mb6_16b_word2.rs
#[doc = "Reader of register MB6_16B_WORD2"] pub type R = crate::R<u32, super::MB6_16B_WORD2>; #[doc = "Writer for register MB6_16B_WORD2"] pub type W = crate::W<u32, super::MB6_16B_WORD2>; #[doc = "Register MB6_16B_WORD2 `reset()`'s with value 0"] impl crate::ResetValue for super::MB6_16B_WORD2 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `DATA_BYTE_11`"] pub type DATA_BYTE_11_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DATA_BYTE_11`"] pub struct DATA_BYTE_11_W<'a> { w: &'a mut W, } impl<'a> DATA_BYTE_11_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0xff) | ((value as u32) & 0xff); self.w } } #[doc = "Reader of field `DATA_BYTE_10`"] pub type DATA_BYTE_10_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DATA_BYTE_10`"] pub struct
<'a> { w: &'a mut W, } impl<'a> DATA_BYTE_10_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8); self.w } } #[doc = "Reader of field `DATA_BYTE_9`"] pub type DATA_BYTE_9_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DATA_BYTE_9`"] pub struct DATA_BYTE_9_W<'a> { w: &'a mut W, } impl<'a> DATA_BYTE_9_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16); self.w } } #[doc = "Reader of field `DATA_BYTE_8`"] pub type DATA_BYTE_8_R = crate::R<u8, u8>; #[doc = "Write proxy for field `DATA_BYTE_8`"] pub struct DATA_BYTE_8_W<'a> { w: &'a mut W, } impl<'a> DATA_BYTE_8_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24); self.w } } impl R { #[doc = "Bits 0:7 - Data byte 0 of Rx/Tx frame."] #[inline(always)] pub fn data_byte_11(&self) -> DATA_BYTE_11_R { DATA_BYTE_11_R::new((self.bits & 0xff) as u8) } #[doc = "Bits 8:15 - Data byte 1 of Rx/Tx frame."] #[inline(always)] pub fn data_byte_10(&self) -> DATA_BYTE_10_R { DATA_BYTE_10_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 16:23 - Data byte 2 of Rx/Tx frame."] #[inline(always)] pub fn data_byte_9(&self) -> DATA_BYTE_9_R { DATA_BYTE_9_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - Data byte 3 of Rx/Tx frame."] #[inline(always)] pub fn data_byte_8(&self) -> DATA_BYTE_8_R { DATA_BYTE_8_R::new(((self.bits >> 24) & 0xff) as u8) } } impl W { #[doc = "Bits 0:7 - Data byte 0 of Rx/Tx frame."] #[inline(always)] pub fn data_byte_11(&mut self) -> DATA_BYTE_11_W { DATA_BYTE_11_W { w: self } } #[doc = "Bits 8:15 - Data byte 1 of Rx/Tx frame."] #[inline(always)] pub fn data_byte_10(&mut self) -> DATA_BYTE_10_W { DATA_BYTE_10_W { w: self } } #[doc = "Bits 16:23 - Data byte 2 of Rx/Tx frame."] #[inline(always)] pub fn data_byte_9(&mut self) -> DATA_BYTE_9_W { DATA_BYTE_9_W { w: self } } #[doc = "Bits 24:31 - Data byte 3 of Rx/Tx frame."] #[inline(always)] pub fn data_byte_8(&mut self) -> DATA_BYTE_8_W { DATA_BYTE_8_W { w: self } } }
DATA_BYTE_10_W
filterpolicy_global_binding.go
/* * Copyright (c) 2021 Citrix Systems, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package filter /** * Binding class showing the global that can be bound to filterpolicy. */ type Filterpolicyglobalbinding struct { /** * The entity name to which policy is bound */ Boundto string `json:"boundto,omitempty"` Priority uint32 `json:"priority,omitempty"` Activepolicy uint32 `json:"activepolicy,omitempty"` /** * Name of the filter policy to be displayed. If a name is not provided, information about all the filter policies is shown. */ Name string `json:"name,omitempty"`
}
deletenotification.component.ts
import { Component, Input, Inject } from '@angular/core'; import { FormBuilder } from '@angular/forms'; import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material'; import { HttpClient, HttpHeaders, HttpErrorResponse } from '@angular/common/http'; // import { Observable, of } from 'rxjs'; // import { map, catchError, tap, switchMap } from 'rxjs/operators'; import { ActivatedRoute } from '@angular/router'; interface DialogData { id: string; nameid: string } @Component({ selector: 'ngx-deletenotification', templateUrl: './deletenotification.component.html', styleUrls: ['./deletenotification.component.scss'], }) export class DeleteNNotificationComponent { items;
dataSource: any[]; myDefaultValue: String = "a" constructor( // private formBuilder: FormBuilder, private http: HttpClient, private route: ActivatedRoute, public dialogRef: MatDialogRef<DeleteNNotificationComponent>, @Inject(MAT_DIALOG_DATA) public data: DialogData) { } onSubmit() { this.http.delete<any>('http://192.169.118.5:3000/deletenotification/' + this.data.id, {}).subscribe((delet) => { this.dialogRef.close(); }) } }
checkoutForm; imageFile: File
cse.rs
#[doc = "Reader of register CSE"] pub type R = crate::R<u32, super::CSE>; #[doc = "Reader of field `CSR`"] pub type CSR_R = crate::R<u16, u16>; impl R { #[doc = "Bits 0:9 - Carrier Sense Error"] #[inline(always)] pub fn
(&self) -> CSR_R { CSR_R::new((self.bits & 0x03ff) as u16) } }
csr
enums.rs
use std::fmt; use reflect::EnumDescriptor; use reflect::EnumValueDescriptor; use reflect::ProtobufValue; /// Trait implemented by all protobuf enum types. pub trait ProtobufEnum: Eq + Sized + Copy + 'static + ProtobufValue + fmt::Debug + Default { /// Get enum `i32` value. fn value(&self) -> i32; /// Try to create an enum from `i32` value. /// Return `None` if value is unknown. fn from_i32(v: i32) -> Option<Self>; /// Get all enum values for enum type. fn values() -> &'static [Self] { panic!(); } /// Get enum value descriptor. fn descriptor(&self) -> &'static EnumValueDescriptor { self.enum_descriptor().value_by_number(self.value()).unwrap() } /// Get enum descriptor. fn enum_descriptor(&self) -> &'static EnumDescriptor { Self::enum_descriptor_static() }
/// Get enum descriptor by type. fn enum_descriptor_static() -> &'static EnumDescriptor { panic!(); } }
croatiaLow.js
/** * @license * Copyright (c) 2018 amCharts (Antanas Marcelionis, Martynas Majeris) * * This sofware is provided under multiple licenses. Please see below for * links to appropriate usage. * * Free amCharts linkware license. Details and conditions: * https://github.com/amcharts/amcharts4/blob/master/LICENSE * * One of the amCharts commercial licenses. Details and pricing: * https://www.amcharts.com/online-store/ * https://www.amcharts.com/online-store/licenses-explained/ * * If in doubt, contact amCharts at [email protected] * * PLEASE DO NOT REMOVE THIS COPYRIGHT NOTICE. * @hidden */
am4internal_webpackJsonp(["ee28"],{vcyM:function(e,o,t){"use strict";Object.defineProperty(o,"__esModule",{value:!0});window.am4geodata_croatiaLow={type:"FeatureCollection",features:[{type:"Feature",geometry:{type:"Polygon",coordinates:[[[16.2553,46.0679],[16.2955,46.0125],[16.3356,46.0268],[16.4237,46.0054],[16.4638,45.9766],[16.5397,45.9543],[16.593,45.9157],[16.6595,45.9239],[16.6966,45.9178],[16.7025,45.8653],[16.6855,45.8386],[16.6002,45.8147],[16.5767,45.7999],[16.5062,45.7951],[16.5213,45.7618],[16.5021,45.7285],[16.5715,45.7075],[16.6239,45.6663],[16.5967,45.6207],[16.5505,45.6127],[16.4794,45.5605],[16.462,45.587],[16.4208,45.5953],[16.3982,45.6262],[16.3554,45.6187],[16.3019,45.6286],[16.2442,45.5942],[16.2104,45.6],[16.1406,45.6326],[16.0887,45.5951],[16.0659,45.5638],[16.1038,45.537],[16.1034,45.4647],[16.0786,45.4503],[16.0423,45.4848],[15.9871,45.4715],[15.9314,45.4739],[15.9224,45.5148],[15.8407,45.548],[15.7951,45.5211],[15.7018,45.6],[15.67,45.5985],[15.6184,45.6212],[15.5362,45.5732],[15.5113,45.605],[15.5167,45.6372],[15.4759,45.658],[15.445,45.6903],[15.3592,45.7363],[15.3472,45.773],[15.4126,45.7956],[15.4746,45.7982],[15.4924,45.8321],[15.5742,45.8527],[15.6303,45.8325],[15.7068,45.8443],[15.6819,45.9027],[15.7092,45.9223],[15.7097,45.9703],[15.7635,45.9707],[15.8181,45.9848],[15.9039,45.955],[15.946,45.9275],[15.9491,45.9051],[15.8366,45.8535],[15.8198,45.8309],[15.837,45.7903],[15.8168,45.7729],[15.8692,45.7487],[15.8622,45.7283],[15.7832,45.7339],[15.7759,45.6862],[15.8651,45.6565],[15.901,45.6316],[15.954,45.621],[15.9884,45.6724],[15.9972,45.7125],[16.0325,45.7434],[16.0854,45.7625],[16.1456,45.8042],[16.1991,45.7935],[16.204,45.8267],[16.2337,45.8374],[16.2031,45.8869],[16.1787,45.8947],[16.1771,45.9346],[16.1547,45.9669],[16.1559,45.9877],[16.2166,46.0203],[16.2198,46.0501],[16.2553,46.0679]]]},properties:{name:"Zagreb County",id:"HR-01",CNTRY:"Croatia",TYPE:"County"},id:"HR-01"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[15.8645,46.2733],[15.8788,46.2313],[15.9393,46.2014],[16.0215,46.1863],[16.0874,46.1962],[16.1481,46.1802],[16.1677,46.2003],[16.2494,46.188],[16.2545,46.1529],[16.2425,46.0938],[16.2553,46.0679],[16.2198,46.0501],[16.2166,46.0203],[16.1559,45.9877],[16.1547,45.9669],[16.1162,45.9675],[16.0459,45.9357],[15.9491,45.9051],[15.946,45.9275],[15.9039,45.955],[15.8181,45.9848],[15.7635,45.9707],[15.7097,45.9703],[15.7164,46.0591],[15.6491,46.0869],[15.6064,46.1151],[15.6064,46.1629],[15.6471,46.1875],[15.6506,46.2173],[15.6817,46.2279],[15.7879,46.2168],[15.8256,46.259],[15.8645,46.2733]]]},properties:{name:"Krapina-Zagorje",id:"HR-02",CNTRY:"Croatia",TYPE:"County"},id:"HR-02"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[15.8403,45.2254],[15.7962,45.2534],[15.8275,45.2899],[15.7873,45.3465],[15.7928,45.3628],[15.8468,45.3837],[15.8354,45.4277],[15.8649,45.4864],[15.9314,45.4739],[15.9871,45.4715],[16.0423,45.4848],[16.0786,45.4503],[16.1034,45.4647],[16.1038,45.537],[16.0659,45.5638],[16.0887,45.5951],[16.1406,45.6326],[16.2104,45.6],[16.2442,45.5942],[16.3019,45.6286],[16.3554,45.6187],[16.3982,45.6262],[16.4208,45.5953],[16.462,45.587],[16.4794,45.5605],[16.5505,45.6127],[16.5967,45.6207],[16.6239,45.6663],[16.6623,45.6711],[16.7025,45.6569],[16.7365,45.6265],[16.8004,45.623],[16.8189,45.5709],[16.8732,45.5299],[16.8555,45.4947],[16.9383,45.4826],[16.9598,45.4573],[16.9994,45.4576],[17.0536,45.4005],[17.1179,45.3799],[17.1389,45.3517],[17.1816,45.3566],[17.1871,45.3334],[17.1616,45.2952],[17.0706,45.2307],[17.113,45.207],[17.1459,45.1621],[16.9939,45.2213],[16.9347,45.2757],[16.9136,45.2539],[16.8365,45.215],[16.8214,45.1841],[16.7327,45.2063],[16.6962,45.1957],[16.6026,45.2294],[16.5331,45.2263],[16.4933,45.2085],[16.4634,45.1408],[16.3971,45.1117],[16.4051,45.0899],[16.3588,45.0305],[16.3561,45.002],[16.3116,44.9984],[16.2286,45.0093],[16.1487,45.0775],[16.0828,45.0996],[16.0166,45.2082],[15.9712,45.224],[15.9344,45.2044],[15.8403,45.2254]]]},properties:{name:"Sisak-Moslavina",id:"HR-03",CNTRY:"Croatia",TYPE:"County"},id:"HR-03"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[15.3472,45.773],[15.3592,45.7363],[15.445,45.6903],[15.4759,45.658],[15.5167,45.6372],[15.5113,45.605],[15.5362,45.5732],[15.6184,45.6212],[15.67,45.5985],[15.7018,45.6],[15.7951,45.5211],[15.8407,45.548],[15.9224,45.5148],[15.9314,45.4739],[15.8649,45.4864],[15.8354,45.4277],[15.8468,45.3837],[15.7928,45.3628],[15.7873,45.3465],[15.8275,45.2899],[15.7962,45.2534],[15.8403,45.2254],[15.7614,45.1665],[15.7823,45.1134],[15.7459,45.0632],[15.7846,45.0045],[15.7864,44.9725],[15.7528,44.9681],[15.7387,44.9286],[15.7117,44.9132],[15.6604,44.9378],[15.6098,44.9232],[15.5666,44.9558],[15.5184,44.9149],[15.4427,44.9262],[15.366,45.0065],[15.303,45.0224],[15.288,45.0418],[15.2372,45.0614],[15.1963,45.1068],[15.1383,45.074],[15.0914,45.0849],[15.0676,45.108],[15.0113,45.1244],[14.9705,45.1675],[14.9936,45.3111],[15.0813,45.2948],[15.1784,45.3097],[15.1741,45.3276],[15.2285,45.3946],[15.2244,45.4266],[15.2738,45.4657],[15.3322,45.4528],[15.3882,45.4875],[15.3032,45.5366],[15.3005,45.5834],[15.2789,45.6041],[15.3043,45.6313],[15.3995,45.6533],[15.3598,45.7164],[15.3004,45.6899],[15.2644,45.7299],[15.3472,45.773]]]},properties:{name:"Karlovac",id:"HR-04",CNTRY:"Croatia",TYPE:"County"},id:"HR-04"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[16.2904,46.3728],[16.3309,46.3581],[16.3619,46.3226],[16.4103,46.3131],[16.5392,46.3119],[16.5573,46.3291],[16.6012,46.3059],[16.6781,46.3198],[16.6991,46.3062],[16.7656,46.2996],[16.7615,46.2543],[16.6837,46.1977],[16.6077,46.2042],[16.5389,46.1785],[16.5446,46.1626],[16.4314,46.1582],[16.3768,46.1205],[16.3356,46.0268],[16.2955,46.0125],[16.2553,46.0679],[16.2425,46.0938],[16.2545,46.1529],[16.2494,46.188],[16.1677,46.2003],[16.1481,46.1802],[16.0874,46.1962],[16.0215,46.1863],[15.9393,46.2014],[15.8788,46.2313],[15.8645,46.2733],[15.9003,46.2922],[15.9463,46.292],[16.0744,46.3393],[16.0828,46.381],[16.1453,46.4063],[16.1943,46.3761],[16.2596,46.3867],[16.2904,46.3728]]]},properties:{name:"Varaždin",id:"HR-05",CNTRY:"Croatia",TYPE:"County"},id:"HR-05"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[17.2959,45.9839],[17.2133,45.9821],[17.1938,45.9536],[17.1314,45.8976],[17.1233,45.8775],[17.0666,45.9031],[17.0504,45.9343],[17.0017,45.9328],[16.9871,45.9665],[16.9403,45.9846],[16.8913,45.9819],[16.9008,46.0186],[16.8698,46.0559],[16.823,46.0476],[16.7722,46.0534],[16.7186,46.0307],[16.7023,45.9947],[16.6966,45.9178],[16.6595,45.9239],[16.593,45.9157],[16.5397,45.9543],[16.4638,45.9766],[16.4237,46.0054],[16.3356,46.0268],[16.3768,46.1205],[16.4314,46.1582],[16.5446,46.1626],[16.5389,46.1785],[16.6077,46.2042],[16.6837,46.1977],[16.7615,46.2543],[16.7656,46.2996],[16.8025,46.3093],[16.8308,46.2864],[16.8545,46.3532],[16.8781,46.3344],[16.8844,46.2755],[16.9751,46.2241],[17.0105,46.2203],[17.1028,46.1874],[17.182,46.1506],[17.1766,46.108],[17.2183,46.0816],[17.2703,46.0327],[17.258,46.0133],[17.2959,45.9839]]]},properties:{name:"Koprivnica-Križevci",id:"HR-06",CNTRY:"Croatia",TYPE:"County"},id:"HR-06"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[16.6966,45.9178],[16.7023,45.9947],[16.7186,46.0307],[16.7722,46.0534],[16.823,46.0476],[16.8698,46.0559],[16.9008,46.0186],[16.8913,45.9819],[16.9403,45.9846],[16.9871,45.9665],[17.0017,45.9328],[17.0504,45.9343],[17.0666,45.9031],[17.1233,45.8775],[17.184,45.8562],[17.1991,45.8318],[17.2758,45.8045],[17.2867,45.7718],[17.344,45.7615],[17.3888,45.6958],[17.4423,45.7209],[17.4984,45.6913],[17.4492,45.6295],[17.4442,45.5767],[17.4401,45.5284],[17.4101,45.5106],[17.3599,45.5257],[17.2871,45.5114],[17.228,45.5126],[17.1853,45.5381],[17.1635,45.5205],[17.1238,45.5285],[17.0831,45.5099],[17.0566,45.5261],[17.0203,45.5109],[16.9611,45.5302],[16.9336,45.5157],[16.9383,45.4826],[16.8555,45.4947],[16.8732,45.5299],[16.8189,45.5709],[16.8004,45.623],[16.7365,45.6265],[16.7025,45.6569],[16.6623,45.6711],[16.6239,45.6663],[16.5715,45.7075],[16.5021,45.7285],[16.5213,45.7618],[16.5062,45.7951],[16.5767,45.7999],[16.6002,45.8147],[16.6855,45.8386],[16.7025,45.8653],[16.6966,45.9178]]]},properties:{name:"Bjelovar-Bilogora",id:"HR-07",CNTRY:"Croatia",TYPE:"County"},id:"HR-07"},{type:"Feature",geometry:{type:"MultiPolygon",coordinates:[[[[14.5654,45.2423],[14.6375,45.1618],[14.6685,45.1477],[14.6632,45.092],[14.8207,44.9798],[14.8083,44.9637],[14.7542,44.9698],[14.7348,44.9357],[14.6526,44.9644],[14.6081,45.0275],[14.562,45.0129],[14.4897,45.0264],[14.4364,45.0682],[14.4358,45.0922],[14.5277,45.1241],[14.5468,45.1671],[14.5357,45.1996],[14.5654,45.2423]]],[[[15.2244,45.4266],[15.2285,45.3946],[15.1741,45.3276],[15.1784,45.3097],[15.0813,45.2948],[14.9936,45.3111],[14.9705,45.1675],[15.0113,45.1244],[15.0259,45.0877],[15.0056,45.0645],[14.8916,45.0544],[14.8822,45.0327],[14.8441,45.0962],[14.8152,45.1148],[14.7141,45.1505],[14.6546,45.2022],[14.6124,45.2185],[14.5432,45.2787],[14.4591,45.3213],[14.3294,45.3565],[14.2829,45.3059],[14.227,45.1524],[14.2025,45.1638],[14.2095,45.2111],[14.194,45.2561],[14.2003,45.301],[14.2207,45.3175],[14.1909,45.3856],[14.2092,45.3977],[14.1328,45.4319],[14.1159,45.4821],[14.2033,45.4869],[14.2433,45.5103],[14.3439,45.487],[14.4295,45.5081],[14.4858,45.5389],[14.5041,45.5888],[14.5596,45.6405],[14.5728,45.6728],[14.6163,45.6636],[14.598,45.6273],[14.6542,45.5921],[14.6814,45.5894],[14.7199,45.5349],[14.7981,45.5032],[14.8181,45.4608],[14.9089,45.4777],[14.9326,45.5297],[14.9837,45.4991],[15.0282,45.4857],[15.0563,45.4941],[15.0883,45.4664],[15.1603,45.4245],[15.2244,45.4266]]],[[[14.3426,44.7114],[14.3947,44.6921],[14.4029,44.6593],[14.3996,44.6183],[14.435,44.5657],[14.3722,44.5614],[14.396,44.6034],[14.3538,44.6449],[14.3426,44.7114]]],[[[14.728,44.8513],[14.7414,44.8185],[14.8304,44.7574],[14.8616,44.7174],[14.8387,44.7014],[14.7812,44.7491],[14.7104,44.7834],[14.6799,44.8441],[14.728,44.8513]]],[[[14.3275,45.1719],[14.359,45.1626],[14.3549,45.1149],[14.401,45.0115],[14.435,44.9796],[14.4711,44.9825],[14.461,44.9043],[14.4475,44.8762],[14.4803,44.7453],[14.4646,44.7272],[14.4789,44.6942],[14.5144,44.6629],[14.5293,44.6178],[14.5001,44.6022],[14.4598,44.6242],[14.4396,44.6561],[14.4172,44.675],[14.3926,44.6937],[14.3917,44.737],[14.3716,44.7552],[14.3679,44.8028],[14.3373,44.8145],[14.3345,44.8542],[14.3056,44.9129],[14.3162,44.9525],[14.3636,44.9036],[14.4032,44.9122],[14.3793,44.9906],[14.3523,45.0025],[14.3498,45.053],[14.3066,45.07],[14.2731,45.1211],[14.3019,45.1634],[14.3275,45.1719]]]]},properties:{name:"Primorje-Gorski Kotar",id:"HR-08",CNTRY:"Croatia",TYPE:"County"},id:"HR-08"},{type:"Feature",geometry:{type:"MultiPolygon",coordinates:[[[[15.0113,45.1244],[15.0676,45.108],[15.0914,45.0849],[15.1383,45.074],[15.1963,45.1068],[15.2372,45.0614],[15.288,45.0418],[15.303,45.0224],[15.366,45.0065],[15.4427,44.9262],[15.5184,44.9149],[15.5666,44.9558],[15.6098,44.9232],[15.6604,44.9378],[15.7117,44.9132],[15.7387,44.9286],[15.7507,44.8701],[15.7843,44.8465],[15.7149,44.817],[15.7768,44.748],[15.8169,44.7218],[15.8976,44.747],[15.9568,44.7119],[15.9761,44.6729],[16.0315,44.6538],[16.0401,44.5911],[16.0309,44.5541],[16.1101,44.5205],[16.1445,44.4846],[16.1382,44.4537],[16.0585,44.4229],[16.0395,44.436],[16.0416,44.4568],[16.0098,44.4859],[15.9475,44.4966],[15.8684,44.4486],[15.7905,44.4224],[15.7973,44.3817],[15.7723,44.3603],[15.8271,44.3336],[15.823,44.2914],[15.7919,44.267],[15.7066,44.2651],[15.6505,44.2841],[15.5706,44.2938],[15.547,44.3293],[15.5063,44.3515],[15.4015,44.3807],[15.3241,44.4124],[15.2893,44.3621],[15.278,44.3647],[15.1852,44.4467],[15.1357,44.4722],[15.1006,44.5135],[15.0287,44.5609],[14.9902,44.5742],[14.9625,44.6308],[14.9297,44.6533],[14.8975,44.6969],[14.8993,44.7441],[14.8811,44.7925],[14.8855,44.8604],[14.9276,44.9517],[14.8822,45.0327],[14.8916,45.0544],[15.0056,45.0645],[15.0259,45.0877],[15.0113,45.1244]]],[[[14.8034,44.6446],[14.873,44.6059],[14.9113,44.6118],[14.9596,44.578],[14.9863,44.5343],[15.0313,44.5262],[15.0231,44.5024],[14.9364,44.5344],[15.0444,44.4424],[15.0432,44.4801],[15.1395,44.4121],[15.1762,44.3983],[15.2476,44.3502],[15.2568,44.3232],[15.2135,44.3119],[15.1359,44.3332],[15.0878,44.362],[15.1159,44.3791],[15.0373,44.4101],[14.9655,44.4648],[14.9094,44.4838],[14.9178,44.5137],[14.8835,44.5572],[14.7903,44.6287],[14.8034,44.6446]]]]},properties:{name:"Lika-Senj",id:"HR-09",CNTRY:"Croatia",TYPE:"County"},id:"HR-09"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[17.9154,45.7872],[17.9367,45.7309],[18.0012,45.7144],[18.0376,45.6109],[17.9942,45.5712],[17.937,45.4989],[17.8818,45.4839],[17.8648,45.4883],[17.8366,45.4767],[17.7797,45.4978],[17.6303,45.515],[17.5935,45.5489],[17.5303,45.5762],[17.4987,45.5593],[17.4442,45.5767],[17.4492,45.6295],[17.4984,45.6913],[17.4423,45.7209],[17.3888,45.6958],[17.344,45.7615],[17.2867,45.7718],[17.2758,45.8045],[17.1991,45.8318],[17.184,45.8562],[17.1233,45.8775],[17.1314,45.8976],[17.1938,45.9536],[17.2133,45.9821],[17.2959,45.9839],[17.3363,45.9802],[17.39,45.9305],[17.4345,45.9495],[17.5274,45.9335],[17.566,45.9361],[17.6217,45.9027],[17.6518,45.8465],[17.7786,45.8154],[17.8499,45.7804],[17.9154,45.7872]]]},properties:{name:"Virovitica-Podravina",id:"HR-10",CNTRY:"Croatia",TYPE:"County"},id:"HR-10"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[17.8818,45.4839],[17.8839,45.4578],[17.9374,45.4474],[17.9794,45.419],[18.037,45.3958],[18.0856,45.3981],[18.1079,45.3784],[18.1031,45.3345],[18.1196,45.3006],[18.0779,45.268],[17.9886,45.285],[17.8984,45.2835],[17.8791,45.2458],[17.8398,45.2107],[17.7538,45.1871],[17.7265,45.2171],[17.7066,45.2682],[17.6138,45.2692],[17.5106,45.3133],[17.4946,45.3443],[17.4095,45.3917],[17.3491,45.3872],[17.3244,45.3678],[17.2487,45.393],[17.1816,45.3566],[17.1389,45.3517],[17.1179,45.3799],[17.0536,45.4005],[16.9994,45.4576],[16.9598,45.4573],[16.9383,45.4826],[16.9336,45.5157],[16.9611,45.5302],[17.0203,45.5109],[17.0566,45.5261],[17.0831,45.5099],[17.1238,45.5285],[17.1635,45.5205],[17.1853,45.5381],[17.228,45.5126],[17.2871,45.5114],[17.3599,45.5257],[17.4101,45.5106],[17.4401,45.5284],[17.4442,45.5767],[17.4987,45.5593],[17.5303,45.5762],[17.5935,45.5489],[17.6303,45.515],[17.7797,45.4978],[17.8366,45.4767],[17.8648,45.4883],[17.8818,45.4839]]]},properties:{name:"Požega-Slavonia",id:"HR-11",CNTRY:"Croatia",TYPE:"County"},id:"HR-11"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[17.1816,45.3566],[17.2487,45.393],[17.3244,45.3678],[17.3491,45.3872],[17.4095,45.3917],[17.4946,45.3443],[17.5106,45.3133],[17.6138,45.2692],[17.7066,45.2682],[17.7265,45.2171],[17.7538,45.1871],[17.8398,45.2107],[17.8791,45.2458],[17.8984,45.2835],[17.9886,45.285],[18.0779,45.268],[18.1911,45.2404],[18.219,45.2207],[18.2697,45.2265],[18.3005,45.2437],[18.3921,45.2427],[18.4374,45.2182],[18.5162,45.2092],[18.5483,45.1925],[18.4955,45.1251],[18.5343,45.0929],[18.5035,45.0582],[18.4704,45.0653],[18.4223,45.1094],[18.3196,45.1026],[18.2555,45.14],[18.2117,45.1239],[18.2214,45.0977],[18.1825,45.0766],[18.121,45.0806],[18.0718,45.1078],[18.0704,45.1418],[17.987,45.1456],[17.9379,45.0816],[17.849,45.0446],[17.7857,45.0824],[17.7564,45.0865],[17.67,45.1346],[17.5983,45.1066],[17.5136,45.1085],[17.4932,45.1247],[17.3628,45.1381],[17.3099,45.1782],[17.2742,45.1885],[17.2446,45.1453],[17.1777,45.1468],[17.1459,45.1621],[17.113,45.207],[17.0706,45.2307],[17.1616,45.2952],[17.1871,45.3334],[17.1816,45.3566]]]},properties:{name:"Brod-Posavina",id:"HR-12",CNTRY:"Croatia",TYPE:"County"},id:"HR-12"},{type:"Feature",geometry:{type:"MultiPolygon",coordinates:[[[[15.0364,44.3241],[15.095,44.2993],[15.051,44.2848],[15.0364,44.3241]]],[[[16.0585,44.4229],[16.1382,44.4537],[16.1638,44.4052],[16.1204,44.3834],[16.2041,44.3622],[16.2097,44.2859],[16.2292,44.2167],[16.1498,44.2031],[16.137,44.1359],[16.0916,44.1134],[15.9962,44.1451],[15.9577,44.1778],[15.9164,44.1276],[15.8196,44.0622],[15.8541,44.0457],[15.8467,43.9973],[15.7939,43.9734],[15.8199,43.9464],[15.7535,43.8841],[15.7427,43.8541],[15.6789,43.8769],[15.6362,43.8623],[15.5836,43.8882],[15.5576,43.8717],[15.5227,43.8991],[15.4555,43.9237],[15.3442,44.0286],[15.284,44.0616],[15.2656,44.0942],[15.2249,44.112],[15.2095,44.1467],[15.1431,44.1957],[15.1688,44.2289],[15.2145,44.2577],[15.2091,44.2932],[15.2847,44.2467],[15.2821,44.2813],[15.3288,44.2932],[15.4019,44.2596],[15.4444,44.2638],[15.505,44.2515],[15.5138,44.2104],[15.4746,44.1992],[15.5185,44.1705],[15.574,44.1802],[15.5294,44.225],[15.5373,44.2606],[15.4533,44.2798],[15.4187,44.3119],[15.3407,44.3369],[15.2893,44.3621],[15.3241,44.4124],[15.4015,44.3807],[15.5063,44.3515],[15.547,44.3293],[15.5706,44.2938],[15.6505,44.2841],[15.7066,44.2651],[15.7919,44.267],[15.823,44.2914],[15.8271,44.3336],[15.7723,44.3603],[15.7973,44.3817],[15.7905,44.4224],[15.8684,44.4486],[15.9475,44.4966],[16.0098,44.4859],[16.0416,44.4568],[16.0395,44.436],[16.0585,44.4229]]],[[[14.7972,44.4171],[14.7959,44.3836],[14.8228,44.3544],[14.7967,44.3417],[14.7618,44.4161],[14.7972,44.4171]]],[[[14.8072,44.2626],[14.8668,44.2306],[14.83,44.2],[14.7882,44.257],[14.8072,44.2626]]],[[[14.8639,44.1705],[14.911,44.1422],[14.9479,44.102],[15.0242,44.064],[15.0608,44.0014],[15.0981,43.9909],[15.1736,43.9256],[15.2236,43.9046],[15.1816,43.8836],[15.1035,43.927],[14.9929,44.0553],[14.9384,44.0855],[14.8568,44.151],[14.8639,44.1705]]],[[[15.269,44.015],[15.3251,43.9943],[15.3901,43.9377],[15.4269,43.8897],[15.3085,43.9548],[15.2657,43.9998],[15.269,44.015]]],[[[15.0989,44.1372],[15.1872,44.0933],[15.1992,44.032],[15.066,44.1313],[15.0989,44.1372]]]]},properties:{name:"Zadar",id:"HR-13",CNTRY:"Croatia",TYPE:"County"},id:"HR-13"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[18.9984,45.4399],[18.9489,45.4365],[18.8823,45.4835],[18.7966,45.4645],[18.8161,45.4209],[18.7702,45.4028],[18.751,45.4151],[18.6465,45.3907],[18.6463,45.3553],[18.6268,45.3176],[18.57,45.2967],[18.5143,45.2883],[18.5142,45.2435],[18.5564,45.2256],[18.5483,45.1925],[18.5162,45.2092],[18.4374,45.2182],[18.3921,45.2427],[18.3005,45.2437],[18.2697,45.2265],[18.219,45.2207],[18.1911,45.2404],[18.0779,45.268],[18.1196,45.3006],[18.1031,45.3345],[18.1079,45.3784],[18.0856,45.3981],[18.037,45.3958],[17.9794,45.419],[17.9374,45.4474],[17.8839,45.4578],[17.8818,45.4839],[17.937,45.4989],[17.9942,45.5712],[18.0376,45.6109],[18.0012,45.7144],[17.9367,45.7309],[17.9154,45.7872],[17.9984,45.7943],[18.0827,45.765],[18.1229,45.7892],[18.2303,45.779],[18.2445,45.762],[18.3343,45.7533],[18.3648,45.7732],[18.4074,45.7405],[18.5745,45.8003],[18.5744,45.8145],[18.6545,45.8749],[18.6719,45.9106],[18.7096,45.9124],[18.8589,45.857],[18.853,45.8021],[18.8873,45.7751],[18.9105,45.7062],[18.9681,45.6771],[18.9405,45.6238],[18.8971,45.5648],[18.938,45.5379],[18.9909,45.542],[19.0676,45.5317],[19.1017,45.5144],[19.0836,45.487],[18.9995,45.4936],[18.9984,45.4399]]]},properties:{name:"Osijek-Baranja",id:"HR-14",CNTRY:"Croatia",TYPE:"County"},id:"HR-14"},{type:"Feature",geometry:{type:"MultiPolygon",coordinates:[[[[15.6072,43.827],[15.6059,43.815],[15.6453,43.7978],[15.6266,43.7832],[15.5797,43.8111],[15.6072,43.827]]],[[[16.5365,43.9622],[16.4941,43.9639],[16.4164,43.9148],[16.3542,43.9206],[16.3382,43.8831],[16.4366,43.8157],[16.3467,43.765],[16.3393,43.7298],[16.3592,43.7109],[16.3298,43.6869],[16.2532,43.691],[16.2348,43.6722],[16.1899,43.6749],[16.1781,43.6517],[16.1351,43.6508],[16.1047,43.6155],[16.0468,43.5876],[16.0868,43.5607],[16.0933,43.5345],[16.0491,43.5266],[16.0188,43.5013],[15.975,43.5101],[15.9809,43.5368],[15.9348,43.5453],[15.9328,43.5862],[15.9136,43.6273],[15.9478,43.6591],[15.8834,43.6949],[15.8558,43.7203],[15.7842,43.7576],[15.6954,43.7653],[15.6621,43.7995],[15.6401,43.8008],[15.6256,43.8439],[15.5582,43.8697],[15.5836,43.8882],[15.6362,43.8623],[15.6789,43.8769],[15.7427,43.8541],[15.7535,43.8841],[15.8199,43.9464],[15.7939,43.9734],[15.8467,43.9973],[15.8541,44.0457],[15.8196,44.0622],[15.9164,44.1276],[15.9577,44.1778],[15.9962,44.1451],[16.0916,44.1134],[16.137,44.1359],[16.1498,44.2031],[16.2292,44.2167],[16.2661,44.1746],[16.3202,44.1418],[16.3647,44.0849],[16.4224,44.0858],[16.4378,44.033],[16.4969,44.0296],[16.5365,43.9622]]]]},properties:{name:"Šibenik-Knin",id:"HR-15",CNTRY:"Croatia",TYPE:"County"},id:"HR-15"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[18.5483,45.1925],[18.5564,45.2256],[18.5142,45.2435],[18.5143,45.2883],[18.57,45.2967],[18.6268,45.3176],[18.6463,45.3553],[18.6465,45.3907],[18.751,45.4151],[18.7702,45.4028],[18.8161,45.4209],[18.7966,45.4645],[18.8823,45.4835],[18.9489,45.4365],[18.9984,45.4399],[19.0225,45.4258],[18.9748,45.3767],[19.025,45.3448],[19.0934,45.3343],[19.1127,45.2953],[19.252,45.2439],[19.3999,45.2328],[19.3986,45.1721],[19.3581,45.168],[19.3189,45.1999],[19.2651,45.1713],[19.18,45.2019],[19.1868,45.1715],[19.1462,45.1294],[19.0924,45.1282],[19.1165,45.0662],[19.0983,45.037],[19.1054,45.0083],[19.0545,44.9773],[19.1555,44.9553],[19.0805,44.9105],[19.0117,44.9035],[19.0256,44.8569],[18.9716,44.8512],[18.8528,44.8557],[18.765,44.9073],[18.8065,44.9432],[18.7585,45.001],[18.7158,45.0375],[18.6068,45.0651],[18.5712,45.0944],[18.5343,45.0929],[18.4955,45.1251],[18.5483,45.1925]]]},properties:{name:"Vukovar-Srijem",id:"HR-16",CNTRY:"Croatia",TYPE:"County"},id:"HR-16"},{type:"Feature",geometry:{type:"MultiPolygon",coordinates:[[[[16.5654,43.2318],[16.583,43.2058],[16.6676,43.1974],[16.728,43.1565],[16.8804,43.1621],[17.108,43.1335],[17.1163,43.1138],[16.9393,43.112],[16.7764,43.1277],[16.6665,43.1216],[16.5691,43.142],[16.5443,43.1404],[16.4816,43.1633],[16.4421,43.1641],[16.4202,43.1939],[16.4744,43.2108],[16.5284,43.2035],[16.5654,43.2318]]],[[[16.2454,43.4144],[16.3295,43.3956],[16.3787,43.3343],[16.319,43.3491],[16.2317,43.3863],[16.2454,43.4144]]],[[[16.4466,43.3911],[16.5508,43.3901],[16.7996,43.3559],[16.8912,43.3202],[16.8832,43.2824],[16.7932,43.2622],[16.7562,43.2653],[16.637,43.2556],[16.5314,43.27],[16.4112,43.3165],[16.4355,43.3304],[16.4466,43.3911]]],[[[16.0188,43.5013],[16.0491,43.5266],[16.0933,43.5345],[16.0868,43.5607],[16.0468,43.5876],[16.1047,43.6155],[16.1351,43.6508],[16.1781,43.6517],[16.1899,43.6749],[16.2348,43.6722],[16.2532,43.691],[16.3298,43.6869],[16.3592,43.7109],[16.3393,43.7298],[16.3467,43.765],[16.4366,43.8157],[16.3382,43.8831],[16.3542,43.9206],[16.4164,43.9148],[16.4941,43.9639],[16.5365,43.9622],[16.5844,43.9388],[16.6177,43.9048],[16.6991,43.8627],[16.7076,43.8212],[16.7501,43.7779],[16.794,43.7707],[16.8215,43.7358],[16.9003,43.6855],[17.0378,43.5556],[17.0681,43.5571],[17.1427,43.5049],[17.2238,43.5072],[17.2774,43.4698],[17.2784,43.4303],[17.2531,43.3901],[17.3294,43.2862],[17.3375,43.2546],[17.4157,43.2326],[17.4474,43.1791],[17.4129,43.1555],[17.3621,43.1647],[17.3766,43.1142],[17.3508,43.093],[17.2604,43.1515],[17.1752,43.1742],[17.08,43.2336],[17.0527,43.2704],[17.0116,43.2938],[16.948,43.3601],[16.8897,43.4012],[16.7925,43.4038],[16.7259,43.4139],[16.6919,43.4442],[16.6442,43.439],[16.5812,43.4643],[16.5273,43.4978],[16.427,43.5005],[16.4694,43.5369],[16.3523,43.552],[16.2942,43.5261],[16.1845,43.5078],[16.159,43.4829],[16.094,43.473],[16.0188,43.5013]]],[[[16.1859,43.0819],[16.2561,43.0741],[16.2552,43.0528],[16.2131,43.022],[16.1139,43.0076],[16.0838,43.0489],[16.0987,43.0752],[16.1859,43.0819]]]]},properties:{name:"Split-Dalmatia",id:"HR-17",CNTRY:"Croatia",TYPE:"County"},id:"HR-17"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[14.1159,45.4821],[14.1328,45.4319],[14.2092,45.3977],[14.1909,45.3856],[14.2207,45.3175],[14.2003,45.301],[14.194,45.2561],[14.2095,45.2111],[14.2025,45.1638],[14.227,45.1524],[14.2252,45.1372],[14.1598,45.0591],[14.1561,45.0282],[14.1729,44.9864],[14.1499,44.9579],[14.1084,44.9707],[14.051,44.9384],[14.0284,44.9055],[13.9819,44.8933],[14.0024,44.8638],[13.98,44.8406],[14.0039,44.8037],[13.9501,44.8036],[13.8571,44.8157],[13.8289,44.8375],[13.8044,44.9298],[13.7651,44.9859],[13.7376,44.9859],[13.6911,45.051],[13.6395,45.0715],[13.5986,45.1414],[13.6128,45.1589],[13.5823,45.1986],[13.5968,45.2547],[13.5751,45.2647],[13.5974,45.3164],[13.5411,45.3296],[13.5445,45.3781],[13.5175,45.4156],[13.5113,45.4725],[13.5286,45.5004],[13.6077,45.465],[13.6767,45.4423],[13.7519,45.4705],[13.7968,45.4576],[13.8091,45.4326],[13.8728,45.4242],[13.9049,45.4514],[13.9825,45.4564],[14.0013,45.5204],[14.0695,45.4851],[14.1159,45.4821]]]},properties:{name:"Istria",id:"HR-18",CNTRY:"Croatia",TYPE:"County"},id:"HR-18"},{type:"Feature",geometry:{type:"MultiPolygon",coordinates:[[[[17.3729,42.8011],[17.3808,42.7869],[17.4508,42.7796],[17.5981,42.7455],[17.6699,42.7366],[17.7487,42.7097],[17.739,42.6893],[17.6478,42.7171],[17.5112,42.7329],[17.4596,42.754],[17.3895,42.7569],[17.332,42.7753],[17.3729,42.8011]]],[[[17.0111,43.0465],[17.0731,43.0244],[17.1709,43.026],[17.3033,43.0031],[17.391,42.9665],[17.446,42.952],[17.4523,42.9284],[17.4927,42.9098],[17.5291,42.9245],[17.5865,42.8995],[17.6535,42.8867],[17.6952,42.9245],[17.7294,42.9202],[17.7917,42.8898],[17.8163,42.9184],[17.8515,42.8968],[17.8536,42.8663],[17.9098,42.8092],[17.9942,42.7764],[18.1042,42.7135],[18.1031,42.6892],[18.1834,42.66],[18.2538,42.5979],[18.3253,42.6223],[18.3689,42.6181],[18.4377,42.5662],[18.4555,42.5087],[18.4469,42.48],[18.5149,42.4225],[18.3555,42.4993],[18.2902,42.5241],[18.2361,42.5585],[18.2306,42.6],[18.1267,42.6405],[18.0614,42.6513],[18.0727,42.6694],[17.9684,42.7127],[17.9041,42.7463],[17.8481,42.7885],[17.8449,42.8037],[17.7649,42.8046],[17.7364,42.7915],[17.4495,42.8678],[17.4538,42.8925],[17.421,42.9111],[17.3651,42.9137],[17.3382,42.9313],[17.247,42.9614],[17.2198,42.9814],[17.1363,42.9741],[17.0711,42.9846],[17.0182,43.0068],[17.0111,43.0465]]],[[[16.8698,42.7712],[16.9359,42.77],[16.9058,42.7275],[16.8453,42.7317],[16.8263,42.7664],[16.8698,42.7712]]],[[[17.3508,43.093],[17.3766,43.1142],[17.3621,43.1647],[17.4129,43.1555],[17.4474,43.1791],[17.4795,43.1608],[17.6419,43.0881],[17.6668,43.0376],[17.7139,42.9717],[17.6155,42.9407],[17.5388,42.9411],[17.4346,43.0152],[17.3508,43.093]]],[[[16.6977,42.9922],[16.7847,42.9664],[16.8525,42.9573],[16.9113,42.9713],[17.0506,42.981],[17.1128,42.9593],[17.1386,42.964],[17.1662,42.9353],[17.1217,42.9073],[17.049,42.9243],[16.9753,42.9274],[16.8963,42.8961],[16.8534,42.9059],[16.7749,42.9037],[16.6409,42.9362],[16.7044,42.9677],[16.6977,42.9922]]]]},properties:{name:"Dubrovnik-Neretva",id:"HR-19",CNTRY:"Croatia",TYPE:"County"},id:"HR-19"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[16.8545,46.3532],[16.8308,46.2864],[16.8025,46.3093],[16.7656,46.2996],[16.6991,46.3062],[16.6781,46.3198],[16.6012,46.3059],[16.5573,46.3291],[16.5392,46.3119],[16.4103,46.3131],[16.3619,46.3226],[16.3309,46.3581],[16.2904,46.3728],[16.3077,46.3983],[16.2711,46.4295],[16.2506,46.4911],[16.3448,46.5389],[16.4301,46.5295],[16.5806,46.4658],[16.6666,46.4561],[16.7185,46.3928],[16.7604,46.3796],[16.7987,46.3857],[16.8545,46.3532]]]},properties:{name:"Medimurje",id:"HR-20",CNTRY:"Croatia",TYPE:"County"},id:"HR-20"},{type:"Feature",geometry:{type:"Polygon",coordinates:[[[16.1547,45.9669],[16.1771,45.9346],[16.1787,45.8947],[16.2031,45.8869],[16.2337,45.8374],[16.204,45.8267],[16.1991,45.7935],[16.1456,45.8042],[16.0854,45.7625],[16.0325,45.7434],[15.9972,45.7125],[15.9884,45.6724],[15.954,45.621],[15.901,45.6316],[15.8651,45.6565],[15.7759,45.6862],[15.7832,45.7339],[15.8622,45.7283],[15.8692,45.7487],[15.8168,45.7729],[15.837,45.7903],[15.8198,45.8309],[15.8366,45.8535],[15.9491,45.9051],[16.0459,45.9357],[16.1162,45.9675],[16.1547,45.9669]]]},properties:{name:"Grad Zagreb",id:"HR-21",CNTRY:"Croatia",TYPE:"City"},id:"HR-21"}]}}},["vcyM"]); //# sourceMappingURL=croatiaLow.js.map
batchnorm_layer.py
#!/usr/bin/env python3 # Copyright 2019 Christian Henning # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ - **title** :utils/batchnorm_layer.py - **author** :ch - **contact** :[email protected] - **created** :09/02/2019 - **version** :1.0 - **python_version** :3.6.8 Implementation of a hypernet compatible batchnorm layer. The joint use of batch-normalization and hypernetworks is not straight forward, mainly due to the statistics accumulated by the batch-norm operation which expect the weights of the main network to only change slowly. If a hypernetwork replaces the whole set of weights, the statistics previously estimated by the batch-norm layer might be completely off. To circumvent this problem, we provide multiple solutions: - In a continual learning setting with one set of weights per task, we can simply estimate and store statistics per task (hence, the batch-norm operation has to be conditioned on the task). - The statistics are distilled into the hypernetwork. This would require the addition of an extra loss term. - The statistics can be treated as parameters that are outputted by the hypernetwork. In this case, nothing enforces that these "statistics" behave similar to statistics that would result from a running estimate (hence, the resulting operation might have nothing in common with batch- norm). - Always use the statistics estimated on the current batch. Note, we also provide the option of turning off the statistics, in which case the statistics will be set to zero mean and unit variance. This is helpful when interpreting batch-normalization as a general form of gain modulation (i.e., just applying a shift and scale to neural activities). """ from warnings import warn import torch import torch.nn as nn import torch.nn.functional as F class BatchNormLayer(nn.Module): r"""Hypernetwork-compatible batch-normalization layer. Note, batch normalization performs the following operation .. math:: y = \frac{x - \mathrm{E}[x]}{\sqrt{\mathrm{Var}[x] + \epsilon}} * \ \gamma + \beta This class allows to deviate from this standard implementation in order to provide the flexibility required when using hypernetworks. Therefore, we slightly change the notation to .. math:: y = \frac{x - m_{\text{stats}}^{(t)}}{\sqrt{v_{\text{stats}}^{(t)} + \ \epsilon}} * \gamma^{(t)} + \beta^{(t)} We use this notation to highlight that the running statistics :math:`m_{\text{stats}}^{(t)}` and :math:`v_{\text{stats}}^{(t)}` are not necessarily estimates resulting from mean and variance computation but might be learned parameters (e.g., the outputs of a hypernetwork). We additionally use the superscript :math:`(t)` to denote that the gain :math:`\gamma`, offset :math:`\beta` and statistics may be dynamically selected based on some external context information. This class provides the possibility to checkpoint statistics :math:`m_{\text{stats}}^{(t)}` and :math:`v_{\text{stats}}^{(t)}`, but **not** gains and offsets. .. note:: If context-dependent gains :math:`\gamma^{(t)}` and offsets :math:`\beta^{(t)}` are required, then they have to be maintained externally, e.g., via a task-conditioned hypernetwork (see `this paper`_ for an example) and passed to the :meth:`forward` method. .. _this paper: https://arxiv.org/abs/1906.00695 Attributes: weights: A list of all internal weights of this layer. If all weights are assumed to be generated externally, then this attribute will be ``None``. param_shapes: A list of list of integers. Each list represents the shape of a parameter tensor. Note, this attribute is independent of the attribute :attr:`weights`, it always comprises the shapes of all weight tensors as if the network would be stand- alone (i.e., no weights being passed to the :meth:`forward` method). Note, unless ``learnable_stats`` is enabled, the layer statistics are not considered here. hyper_shapes: A list of list of integers. Each list represents the shape of a weight tensor that can be passed to the :meth:`forward` method. If all weights are maintained internally, then this attribute will be ``None``. Specifically, this attribute is controlled by the argument ``affine``. If ``affine`` is ``True``, this attribute will be ``None``. Otherwise this attribute contains the shape of :math:`\gamma` and :math:`\beta`. num_stats: The number :math:`T` of internally managed statistics :math:`\{(m_{\text{stats}}^{(1)}, v_{\text{stats}}^{(1)}), \dots, \ (m_{\text{stats}}^{(T)}, v_{\text{stats}}^{(T)}) \}`. This number is incremented everytime the method :meth:`checkpoint_stats` is called. """ def __init__(self, num_features, momentum=0.1, affine=True, track_running_stats=True, frozen_stats=False, learnable_stats=False): r""" Args: num_features: See argument ``num_features``, for instance, of class :class:`torch.nn.BatchNorm1d`. momentum: See argument ``momentum`` of class :class:`torch.nn.BatchNorm1d`. affine: See argument ``affine`` of class :class:`torch.nn.BatchNorm1d`. If set to :code:`False`, the input activity will simply be "whitened" according to the applied layer statistics (except if gain :math:`\gamma` and offset :math:`\beta` are passed to the :meth:`forward` method). Note, if ``learnable_stats`` is :code:`False`, then setting ``affine`` to :code:`False` results in no learnable weights for this layer (running stats might still be updated, but not via gradient descent). Note, even if this option is ``False``, one may still pass a gain :math:`\gamma` and offset :math:`\beta` to the :meth:`forward` method. track_running_stats: See argument ``track_running_stats`` of class :class:`torch.nn.BatchNorm1d`. frozen_stats: If ``True``, the layer statistics are frozen at their initial values of :math:`\gamma = 1` and :math:`\beta = 0`, i.e., layer activity will not be whitened. Note, this option requires ``track_running_stats`` to be set to ``False``. learnable_stats: If ``True``, the layer statistics are initialized as learnable parameters (:code:`requires_grad=True`). Note, these extra parameters will be maintained internally and not added to the :attr:`weights`. Statistics can always be maintained externally and passed to the :meth:`forward` method. Note, this option requires ``track_running_stats`` to be set to ``False``. """ super(BatchNormLayer, self).__init__() if learnable_stats: # FIXME We need our custom stats computation for this. # The running stats updated by `torch.nn.functional.batch_norm` do # not allow backpropagation. # See here on how they are computed: # https://github.com/pytorch/pytorch/blob/96fe2b4ecbbd02143d95f467655a2d697282ac32/aten/src/ATen/native/Normalization.cpp#L137 raise NotImplementedError('Option "learnable_stats" has not been ' + 'implemented yet!') if momentum is None: # If one wants to implement this, then please note that the # attribute `num_batches_tracked` has to be added. Also, note the # extra code for computing the momentum value in the forward method # of class `_BatchNorm`: # https://pytorch.org/docs/stable/_modules/torch/nn/modules/batchnorm.html#_BatchNorm raise NotImplementedError('This reimplementation of PyTorch its ' + 'batchnorm layer does not support ' + 'setting "momentum" to None.') if learnable_stats and track_running_stats: raise ValueError('Option "track_running_stats" must be set to ' + 'False when enabling "learnable_stats".') if frozen_stats and track_running_stats: raise ValueError('Option "track_running_stats" must be set to ' + 'False when enabling "frozen_stats".') self._num_features = num_features self._momentum = momentum self._affine = affine self._track_running_stats = track_running_stats self._frozen_stats = frozen_stats self._learnable_stats = learnable_stats self.register_buffer('_num_stats', torch.tensor(0, dtype=torch.long)) self._weights = nn.ParameterList() self._param_shapes = [[num_features], [num_features]] if affine: # Gamma self.register_parameter('scale', nn.Parameter( \ torch.Tensor(num_features), requires_grad=True)) # Beta self.register_parameter('bias', nn.Parameter( \ torch.Tensor(num_features), requires_grad=True)) self._weights.append(self.scale) self._weights.append(self.bias) nn.init.ones_(self.scale) nn.init.zeros_(self.bias) elif not learnable_stats: self._weights = None if learnable_stats: # Don't forget to add the new params to `self._weights`. # Don't forget to add shapes to `self._param_shapes`. raise NotImplementedError() elif track_running_stats or frozen_stats: # Note, in case of frozen stats, we just don't update the stats # initialized here later on. self.checkpoint_stats() else: mname, vname = self._stats_names(0) self.register_buffer(mname, None) self.register_buffer(vname, None) @property def weights(self): """Getter for read-only attribute :attr:`weights`. Returns: A :class:`torch.nn.ParameterList` or ``None``, if no parameters are internally maintained. """ return self._weights @property def param_shapes(self): """Getter for read-only attribute :attr:`param_shapes`. Returns: A list of lists of integers. """ return self._param_shapes @property def hyper_shapes(self): """Getter for read-only attribute :attr:`hyper_shapes`. Returns: A list of lists of integers. """ # FIXME not implemented attribute. Do we even need the attribute, given # that all components are individually passed to the forward method? raise NotImplementedError('Not implemented yet!') return self._hyper_shapes @property def num_stats(self): """Getter for read-only attribute :attr:`num_stats`. Returns: (int) """ return self._num_stats def forward(self, inputs, running_mean=None, running_var=None, weight=None, bias=None, stats_id=None): r"""Apply batch normalization to given layer activations. Based on the state if this module (attribute :attr:`training`), the configuration of this layer and the parameters currently passed, the behavior of this function will be different. The core of this method still relies on the function :func:`torch.nn.functional.batch_norm`. In the following we list the different behaviors of this method based on the context. **In training mode:** We first consider the case that this module is in training mode, i.e., :meth:`torch.nn.Module.train` has been called. Usually, during training, the running statistics are not used when computing the output, instead the statistics computed on the current batch are used (denoted by *use batch stats* in the table below). However, the batch statistics are typically updated during training (denoted by *update running stats* in the table below). The above described scenario would correspond to passing batch statistics to the function :func:`torch.nn.functional.batch_norm` and setting the parameter ``training`` to ``True``. +----------------------+---------------------+-------------------------+ | **training mode** | **use batch stats** | **update running stats**| +----------------------+---------------------+-------------------------+ | given stats | Yes | Yes | +----------------------+---------------------+-------------------------+ | track running stats | Yes | Yes | +----------------------+---------------------+-------------------------+ | frozen stats | No | No | +----------------------+---------------------+-------------------------+ | learnable stats | Yes | Yes [1]_ | +----------------------+---------------------+-------------------------+ |no track running stats| Yes | No | +----------------------+---------------------+-------------------------+ The meaning of each row in this table is as follows: - **given stats**: External stats are provided via the parameters ``running_mean`` and ``running_var``. - **track running stats**: If ``track_running_stats`` was set to ``True`` in the constructor and no stats were given. - **frozen stats**: If ``frozen_stats`` was set to ``True`` in the constructor and no stats were given. - **learnable stats**: If ``learnable_stats`` was set to ``True`` in the constructor and no stats were given. - **no track running stats**: If none of the above options apply, then the statistics will always be computed from the current batch (also in eval mode). .. note:: If provided, running stats specified via ``running_mean`` and ``running_var`` always have priority. .. [1] We use a custom implementation to update the running statistics, that is compatible with backpropagation. **In evaluation mode:** We now consider the case that this module is in evaluation mode, i.e., :meth:`torch.nn.Module.eval` has been called. Here is the same table as above just for the evaluation mode. +----------------------+---------------------+-------------------------+ | **evaluation mode** | **use batch stats** | **update running stats**| +----------------------+---------------------+-------------------------+ | track running stats | No | No | +----------------------+---------------------+-------------------------+ | frozen stats | No | No | +----------------------+---------------------+-------------------------+ | learnable stats | No | No | +----------------------+---------------------+-------------------------+ | given stats | No | No | +----------------------+---------------------+-------------------------+ |no track running stats| Yes | No | +----------------------+---------------------+-------------------------+ Args: inputs: The inputs to the batchnorm layer. running_mean (optional): Running mean stats :math:`m_{\text{stats}}`. This option has priority, i.e., any internally maintained statistics are ignored if given. .. note:: If specified, then ``running_var`` also has to be specified. running_var (optional): Similar to option ``running_mean``, but for the running variance stats :math:`v_{\text{stats}}` .. note:: If specified, then ``running_mean`` also has to be specified. weight (optional): The gain factors :math:`\gamma`. If given, any internal gains are ignored. If option ``affine`` was set to ``False`` in the constructor and this option remains ``None``, then no gains are multiplied to the "whitened" inputs. bias (optional): The behavior of this option is similar to option ``weight``, except that this option represents the offsets :math:`\beta`. stats_id: This argument is optional except if multiple running stats checkpoints exist (i.e., attribute :attr:`num_stats` is greater than 1) and no running stats have been provided to this method. .. note:: This argument is ignored if running stats have been passed. Returns: The layer activation ``inputs`` after batch-norm has been applied. """ assert (running_mean is None and running_var is None or \ running_mean is not None and running_var is not None) if not self._affine: if weight is None or bias is None: raise ValueError('Layer was generated in non-affine mode. ' + 'Therefore, arguments "weight" and "bias" ' + 'may not be None.') # No gains given but we have internal gains. # Otherwise, if no gains are given we leave `weight` as None. if weight is None and self._affine: weight = self.scale if bias is None and self._affine: bias = self.bias stats_given = running_mean is not None if (running_mean is None or running_var is None): if stats_id is None and self.num_stats > 1: raise ValueError('Parameter "stats_id" is not defined but ' + 'multiple running stats are available.') elif self._track_running_stats: if stats_id is None: stats_id = 0 assert (stats_id < self.num_stats) rm, rv = self.get_stats(stats_id) if running_mean is None: running_mean = rm if running_var is None: running_var = rv elif stats_id is not None: warn('Parameter "stats_id" is ignored since running stats have ' + 'been provided.') momentum = self._momentum if stats_given or self._track_running_stats: return F.batch_norm(inputs, running_mean, running_var, weight=weight, bias=bias, training=self.training, momentum=momentum) if self._learnable_stats: raise NotImplementedError() if self._frozen_stats: return F.batch_norm(inputs, running_mean, running_var, weight=weight, bias=bias, training=False) # TODO implement scale and shift here. Note, that `running_mean` and # `running_var` are always 0 and 1, resp. Therefore, the call to # `F.batch_norm` is a waste of computation. # ret = inputs # if weight is not None: # # Multiply `ret` with `weight` such that dimensions are # # respected. # pass # if bias is not None: # # Add `bias` to modified `ret` such that dimensions are
# return ret else: assert (not self._track_running_stats) # Always compute statistics based on current batch. return F.batch_norm(inputs, None, None, weight=weight, bias=bias, training=True, momentum=momentum) def checkpoint_stats(self, device=None): """Buffers for a new set of running stats will be registered. Calling this function will also increment the attribute :attr:`num_stats`. Args: device (optional): If not provided, the newly created statistics will either be moved to the device of the most recent statistics or to CPU if no prior statistics exist. """ assert (self._track_running_stats or \ self._frozen_stats and self._num_stats == 0) if device is None: if self.num_stats > 0: mname_old, _ = self._stats_names(self._num_stats - 1) device = getattr(self, mname_old).device if self._learnable_stats: raise NotImplementedError() mname, vname = self._stats_names(self._num_stats) self._num_stats += 1 self.register_buffer(mname, torch.zeros(self._num_features, device=device)) self.register_buffer(vname, torch.ones(self._num_features, device=device)) def get_stats(self, stats_id=None): """Get a set of running statistics (means and variances). Args: stats_id (optional): ID of stats. If not provided, the most recent stats are returned. Returns: (tuple): Tuple containing: - **running_mean** - **running_var** """ if stats_id is None: stats_id = self.num_stats - 1 assert (stats_id < self.num_stats) mname, vname = self._stats_names(stats_id) running_mean = getattr(self, mname) running_var = getattr(self, vname) return running_mean, running_var def _stats_names(self, stats_id): """Get the buffer names for mean and variance statistics depending on the ``stats_id``, i.e., the ID of the stats checkpoint. Args: stats_id: ID of stats. Returns: (tuple): Tuple containing: - **mean_name** - **var_name** """ mean_name = 'mean_%d' % stats_id var_name = 'var_%d' % stats_id return mean_name, var_name if __name__ == '__main__': pass
# # respected. # pass
test_pynative_hccl_allreduce.py
# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """test hccl allreduce performance with 8p""" import os from multiprocessing import Process, Queue import pytest import numpy as np import mindspore.nn as nn from mindspore import Tensor from mindspore import dtype as mstype from mindspore.ops import operations as P import mindspore.communication.management as D from mindspore import context from mindspore.context import ParallelMode MINDSPORE_HCCL_CONFIG_PATH = "/home/workspace/mindspore_config/hccl/rank_table_8p.json" np.random.seed(1) os.environ['GLOG_v'] = str(2) class AllReduceNet(nn.Cell): def __init__(self): super(AllReduceNet, self).__init__() self.mul = P.Mul() self.all_reduce = P.AllReduce() self.add = P.Add() def construct(self, x):
def train_allreduce_8p(q, device_id, device_num): os.system("mkdir " + str(device_id)) os.chdir(str(device_id)) context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend", device_id=device_id) os.environ['MINDSPORE_HCCL_CONFIG_PATH'] = MINDSPORE_HCCL_CONFIG_PATH os.environ['RANK_ID'] = str(device_id) os.environ['RANK_SIZE'] = str(device_num) D.init() context.reset_auto_parallel_context() context.set_auto_parallel_context(parallel_mode=ParallelMode.DATA_PARALLEL, gradients_mean=False, device_num=device_num) net = AllReduceNet() input_x = np.ones([3, 4]).astype(np.float32) output = net(Tensor(input_x, mstype.float32)) q.put(output) @pytest.mark.level0 @pytest.mark.platform_arm_ascend_training @pytest.mark.platform_x86_ascend_training @pytest.mark.env_single def test_pynative_hccl_allreduce_8p(): device_num = 8 process = [] q = Queue() for i in range(device_num): device_id = i process.append(Process(target=train_allreduce_8p, args=(q, device_id, device_num))) for i in range(device_num): process[i].start() print("Waiting for all subprocesses done...") for i in range(device_num): process[i].join() # check result for i in range(device_num): expect_output = [[256, 256, 256, 256], [256, 256, 256, 256], [256, 256, 256, 256]] assert not q.empty() output = Tensor(q.get()) assert np.allclose(output.asnumpy(), expect_output) for i in range(device_num): os.system("rm -rf " + str(i)) print("End training...")
x = self.mul(x, 2) y1 = Tensor(np.array([[2, 2, 2, 2], [2, 2, 2, 2], [2, 2, 2, 2]])).astype(np.float32) z = self.add(x, y1) z = self.all_reduce(z) y2 = Tensor(np.array([[-16, -16, -16, -16], [-16, -16, -16, -16], [-16, -16, -16, -16]])).astype(np.float32) out = self.add(z, y2) out = self.all_reduce(out) out = self.mul(out, 2) return out
lxc_template_unit_test.go
package docker import ( "bufio" "fmt" "io/ioutil" "math/rand" "os" "strings" "testing" "time" ) func TestLXCConfig(t *testing.T) { root, err := ioutil.TempDir("", "TestLXCConfig") if err != nil { t.Fatal(err) } defer os.RemoveAll(root) // Memory is allocated randomly for testing rand.Seed(time.Now().UTC().UnixNano()) memMin := 33554432 memMax := 536870912 mem := memMin + rand.Intn(memMax-memMin) // CPU shares as well cpuMin := 100 cpuMax := 10000 cpu := cpuMin + rand.Intn(cpuMax-cpuMin) container := &Container{ root: root, Config: &Config{ Memory: int64(mem), CpuShares: int64(cpu), NetworkDisabled: true, }, hostConfig: &HostConfig{ Privileged: false, }, } if err := container.generateLXCConfig(); err != nil { t.Fatal(err) } grepFile(t, container.lxcConfigPath(), fmt.Sprintf("lxc.cgroup.memory.limit_in_bytes = %d", mem)) grepFile(t, container.lxcConfigPath(), fmt.Sprintf("lxc.cgroup.memory.memsw.limit_in_bytes = %d", mem*2)) } func TestCustomLxcConfig(t *testing.T) { root, err := ioutil.TempDir("", "TestCustomLxcConfig") if err != nil { t.Fatal(err) } defer os.RemoveAll(root) container := &Container{ root: root, Config: &Config{ Hostname: "foobar", NetworkDisabled: true, }, hostConfig: &HostConfig{ Privileged: false, LxcConf: []KeyValuePair{ { Key: "lxc.utsname", Value: "docker", }, { Key: "lxc.cgroup.cpuset.cpus", Value: "0,1", }, }, }, }
t.Fatal(err) } grepFile(t, container.lxcConfigPath(), "lxc.utsname = docker") grepFile(t, container.lxcConfigPath(), "lxc.cgroup.cpuset.cpus = 0,1") } func grepFile(t *testing.T, path string, pattern string) { f, err := os.Open(path) if err != nil { t.Fatal(err) } defer f.Close() r := bufio.NewReader(f) var ( line string ) err = nil for err == nil { line, err = r.ReadString('\n') if strings.Contains(line, pattern) == true { return } } t.Fatalf("grepFile: pattern \"%s\" not found in \"%s\"", pattern, path) } func TestEscapeFstabSpaces(t *testing.T) { var testInputs = map[string]string{ " ": "\\040", "": "", "/double space": "/double\\040\\040space", "/some long test string": "/some\\040long\\040test\\040string", "/var/lib/docker": "/var/lib/docker", " leading": "\\040leading", "trailing ": "trailing\\040", } for in, exp := range testInputs { if out := escapeFstabSpaces(in); exp != out { t.Logf("Expected %s got %s", exp, out) t.Fail() } } }
if err := container.generateLXCConfig(); err != nil {
test_examples.py
# Copyright The PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import subprocess import sys from pathlib import Path from typing import List, Optional, Tuple import pytest root = Path(__file__).parent.parent.parent def call_script(filepath: str, args: Optional[List[str]] = None, timeout: Optional[int] = 60 * 5) -> Tuple[int, str, str]: if args is None: args = [] args = [str(a) for a in args] command = [sys.executable, filepath] + args print(" ".join(command)) p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) try: stdout, stderr = p.communicate(timeout=timeout) except subprocess.TimeoutExpired: p.kill() stdout, stderr = p.communicate() stdout = stdout.decode("utf-8") stderr = stderr.decode("utf-8") return p.returncode, stdout, stderr def run_test(filepath): code, stdout, stderr = call_script(filepath) assert not code print(f"{filepath} STDOUT: {stdout}") print(f"{filepath} STDERR: {stderr}") @pytest.mark.parametrize( "step,file", [ ("finetuning", "image_classification.py"), ("finetuning", "tabular_classification.py"), ("predict", "classify_image.py"), ("predict", "classify_tabular.py"), # "classify_text.py" TODO: takes too long ] ) def test_finetune_example(tmpdir, step, file):
def test_generic_example(tmpdir): with tmpdir.as_cwd(): run_test(str(root / "flash_examples" / "generic_task.py"))
with tmpdir.as_cwd(): run_test(str(root / "flash_examples" / step / file))
validation.rs
use state::*; use result::ValidationResult; pub trait Validator<M> { fn validate(&mut self, model: &M) -> ValidationResult<bool>; } pub trait Rule<T, S> { fn validate(&self, input:&T, state: &mut S) -> ValidationResult<()>; } impl <T, S, F> Rule<T, S> for F where F: Fn(&T, &mut S) -> ValidationResult<()> { fn validate(&self, input:&T, state: &mut S) -> ValidationResult<()> { (*self)(input, state) } } pub struct ValidationSchema<M> { pub state: ValidationState, pub rules: Vec<Box<Rule<M, ValidationState>>> } impl <M> ValidationSchema<M> { pub fn new() -> Self { ValidationSchema { state: ValidationState::new(), rules: vec![], } } pub fn rule(&mut self, r: Box<Rule<M, ValidationState>>) { self.rules.push(r) } } impl <M> Validator<M> for ValidationSchema<M> { fn validate(&mut self, model: &M) -> ValidationResult<bool> { for rule in self.rules.iter() { if let Err(err) = rule.validate(model, &mut self.state) { self.state.valid = false; self.state.errors.push(err); } } Ok(self.state.valid) } } #[cfg(test)] mod tests { use super::*; use super::super::result::*; use super::super::state::*; #[allow(dead_code)] struct TestStruct { num: i32, text: String, } impl TestStruct { pub fn new<T>(num: i32, text: T) -> TestStruct where T: Into<String> { TestStruct { num: num,
#[test] pub fn test_null_rule() { let mut v = ValidationSchema::<TestStruct>::new(); v.rule(Box::new(|_m: &TestStruct, _vs: &mut ValidationState| { Ok(()) })); let a = TestStruct::new(123, "hello"); assert_eq!(v.validate(&a).unwrap_or(false), true); } #[test] pub fn test_accept_rule() { let mut v = ValidationSchema::<TestStruct>::new(); v.rule(Box::new(|_m: &TestStruct, vs: &mut ValidationState| { vs.accept("field name"); Ok(()) })); let a = TestStruct::new(123, "hello"); assert_eq!(v.validate(&a).unwrap_or(false), true); } #[test] pub fn test_reject_rule() { let mut v = ValidationSchema::<TestStruct>::new(); v.rule(Box::new(|_m: &TestStruct, vs: &mut ValidationState| { vs.reject("field name", ValidationError::InvalidValue("test error".to_owned())); Ok(()) })); let a = TestStruct::new(123, "hello"); assert_eq!(v.validate(&a).unwrap_or(true), false); } #[test] pub fn test_err_rule() { let mut v = ValidationSchema::<TestStruct>::new(); v.rule(Box::new(|_m: &TestStruct, _vs: &mut ValidationState| -> ValidationResult<()> { Err(ValidationError::ApplicationError("test error".to_owned())) })); let a = TestStruct::new(123, "hello"); assert_eq!(v.validate(&a).unwrap_or(true), false); assert_eq!(v.state.errors.len(), 1); assert_eq!(format!("{}", v.state.errors.get(0).unwrap()), "Application error: test error"); } }
text: text.into(), } } }
percentUtils.js
export function
(percent, totalWidth) { return parseInt(totalWidth * (percent / 100), 10); }
getWidthAsPercentOfTotalWidth
lib.rs
//! Quantized Density Fields represents information space with dimensions number specified by user.
// #![feature(test)] extern crate petgraph; extern crate rayon; extern crate uuid; // extern crate test; pub mod error; pub mod id; pub mod lod; pub mod qdf; pub use error::*; pub use id::*; pub use lod::*; pub use qdf::*;
form-item.tsx
import React, { FC, useContext, useCallback, useState } from 'react' import classNames from 'classnames' import { NativeProps } from '../../utils/native-props' import { Field, FormInstance } from 'rc-field-form' import type { FieldProps } from 'rc-field-form/lib/Field' import FieldContext from 'rc-field-form/lib/FieldContext' import type { Meta, InternalNamePath } from 'rc-field-form/lib/interface' import { devWarning } from '../../utils/dev-log' import { FormContext, NoStyleItemContext } from './context' import { toArray } from './utils' import List, { ListItemProps } from '../list' import type { FormLayout } from './index' import Popover from '../popover' import { QuestionCircleOutline } from 'antd-mobile-icons' import { useConfig } from '../config-provider' import { undefinedFallback } from '../../utils/undefined-fallback' const NAME_SPLIT = '__SPLIT__' type RenderChildren<Values = any> = ( form: FormInstance<Values> ) => React.ReactNode type ChildrenType<Values = any> = RenderChildren<Values> | React.ReactNode type RcFieldProps = Omit<FieldProps, 'children'> const classPrefix = `adm-form-item` export type FormItemProps = Pick< RcFieldProps, | 'dependencies' | 'valuePropName' | 'name' | 'rules' | 'messageVariables' | 'trigger' | 'validateTrigger' | 'shouldUpdate' | 'initialValue' > & Pick< ListItemProps, 'style' | 'onClick' | 'extra' | 'clickable' | 'arrow' | 'description' > & { label?: React.ReactNode help?: React.ReactNode hasFeedback?: boolean required?: boolean noStyle?: boolean disabled?: boolean hidden?: boolean layout?: FormLayout childElementPosition?: 'normal' | 'right' children?: ChildrenType } & NativeProps interface MemoInputProps { value: any update: number children: React.ReactNode } const MemoInput = React.memo( ({ children }: MemoInputProps) => children as JSX.Element, (prev, next) => prev.value === next.value && prev.update === next.update ) type FormItemLayoutProps = Pick< FormItemProps, | 'className' | 'style' | 'required' | 'hasFeedback' | 'disabled' | 'label' | 'help' | 'onClick' | 'hidden' | 'layout' | 'extra' | 'clickable' | 'arrow' | 'description' | 'childElementPosition' > & { htmlFor?: string errors: string[] warnings: string[] children: React.ReactNode } const FormItemLayout: React.FC<FormItemLayoutProps> = props => { const { className, style, extra, label, help, required, disabled, children, htmlFor, hidden, arrow, childElementPosition = 'normal', } = props const context = useContext(FormContext) const { locale } = useConfig() const hasFeedback = props.hasFeedback !== undefined ? props.hasFeedback : context.hasFeedback const layout = props.layout || context.layout const requiredMark = (() => { const { requiredMarkStyle } = context switch (requiredMarkStyle) { case 'asterisk': return ( required && ( <span className={`${classPrefix}-required-asterisk`}>*</span> ) ) case 'text-required': return ( required && ( <span className={`${classPrefix}-required-text`}> ({locale.Form.required}) </span> ) ) case 'text-optional': return ( !required && ( <span className={`${classPrefix}-required-text`}> ({locale.Form.optional}) </span> ) ) default: return null } })() const labelElement = label ? ( <label className={`${classPrefix}-label`} htmlFor={htmlFor}> {label} {requiredMark} {help && ( <Popover content={help} mode='dark' trigger='click'> <span className={`${classPrefix}-label-help`} onClick={e => { e.preventDefault() }} > <QuestionCircleOutline /> </span> </Popover> )} </label> ) : null const description = ( <> {props.description} {hasFeedback && ( <> {props.errors.map((error, index) => ( <div key={`error-${index}`} className={`${classPrefix}-feedback-error`} > {error} </div> ))} {props.warnings.map((warning, index) => ( <div key={`warning-${index}`} className={`${classPrefix}-feedback-warning`} > {warning} </div> ))} </> )} </> ) return ( <List.Item style={style} title={layout === 'vertical' && labelElement} prefix={layout === 'horizontal' && labelElement} extra={extra} description={description} className={classNames( classPrefix, className, `${classPrefix}-${layout}`, { [`${classPrefix}-hidden`]: hidden, [`${classPrefix}-has-error`]: props.errors.length, } )} disabled={disabled} onClick={props.onClick} clickable={props.clickable} arrow={arrow} > <div className={classNames( `${classPrefix}-child`, `${classPrefix}-child-position-${childElementPosition}` )} > <div className={classNames(`${classPrefix}-child-inner`)}> {children} </div> </div> </List.Item> ) } export const FormItem: FC<FormItemProps> = props => { const { // 样式相关 className, style, // FormItem 相关 label, help, extra, hasFeedback, name, required, noStyle, hidden, layout, childElementPosition, description, // Field 相关 disabled, rules, children, messageVariables, trigger = 'onChange', validateTrigger = trigger, onClick, shouldUpdate, dependencies, clickable, arrow, ...fieldProps } = props const { name: formName } = useContext(FormContext) const { validateTrigger: contextValidateTrigger } = useContext(FieldContext) const mergedValidateTrigger = undefinedFallback( validateTrigger, contextValidateTrigger, trigger ) const updateRef = React.useRef(0) updateRef.current += 1 const [subMetas, setSubMetas] = useState<Record<string, Meta>>({}) const onSubMetaChange = useCallback( (subMeta: Meta & { destroy?: boolean }, namePath: InternalNamePath) => { setSubMetas(prevSubMetas => { const nextSubMetas = { ...prevSubMetas } const nameKey = namePath.join(NAME_SPLIT) if (subMeta.destroy) { delete nextSubMetas[nameKey] } else { nextSubMetas[nameKey] = subMeta } return nextSubMetas }) }, [setSubMetas] ) function renderLayout(
en: React.ReactNode, fieldId?: string, meta?: Meta, isRequired?: boolean ) { if (noStyle && !hidden) { return baseChildren } const curErrors = meta?.errors ?? [] const errors = Object.keys(subMetas).reduce( (subErrors: string[], key: string) => { const errors = subMetas[key]?.errors ?? [] if (errors.length) { subErrors = [...subErrors, ...errors] } return subErrors }, curErrors ) const curWarnings = meta?.warnings ?? [] const warnings = Object.keys(subMetas).reduce( (subWarnings: string[], key: string) => { const warnings = subMetas[key]?.warnings ?? [] if (warnings.length) { subWarnings = [...subWarnings, ...warnings] } return subWarnings }, curWarnings ) return ( <FormItemLayout className={className} style={style} label={label} extra={extra} help={help} description={description} required={isRequired} disabled={disabled} hasFeedback={hasFeedback} htmlFor={fieldId} errors={errors} warnings={warnings} onClick={onClick} hidden={hidden} layout={layout} childElementPosition={childElementPosition} clickable={clickable} arrow={arrow} > <NoStyleItemContext.Provider value={onSubMetaChange}> {baseChildren} </NoStyleItemContext.Provider> </FormItemLayout> ) } const isRenderProps = typeof children === 'function' if (!name && !isRenderProps && !props.dependencies) { return renderLayout(children) as JSX.Element } let Variables: Record<string, string> = {} if (typeof label === 'string') { Variables.label = label } if (messageVariables) { Variables = { ...Variables, ...messageVariables } } const notifyParentMetaChange = useContext(NoStyleItemContext) const onMetaChange = (meta: Meta & { destroy?: boolean }) => { if (noStyle && notifyParentMetaChange) { const namePath = meta.name notifyParentMetaChange(meta, namePath) } } return ( <Field {...fieldProps} name={name} shouldUpdate={shouldUpdate} dependencies={dependencies} rules={rules} trigger={trigger} validateTrigger={mergedValidateTrigger} onMetaChange={onMetaChange} messageVariables={Variables} > {(control, meta, context) => { let childNode: React.ReactNode = null const isRequired = required !== undefined ? required : rules && rules.some( rule => !!(rule && typeof rule === 'object' && rule.required) ) const nameList = toArray(name).length && meta ? meta.name : [] const fieldId = ( nameList.length > 0 && formName ? [formName, ...nameList] : nameList ).join('_') if (shouldUpdate && dependencies) { devWarning( 'Form.Item', "`shouldUpdate` and `dependencies` shouldn't be used together." ) } if (isRenderProps) { if ((shouldUpdate || dependencies) && !name) { childNode = (children as RenderChildren)(context) } else { if (!(shouldUpdate || dependencies)) { devWarning( 'Form.Item', '`children` of render props only work with `shouldUpdate` or `dependencies`.' ) } if (name) { devWarning( 'Form.Item', "Do not use `name` with `children` of render props since it's not a field." ) } } // not render props } else if (dependencies && !name) { devWarning( 'Form.Item', 'Must set `name` or use render props when `dependencies` is set.' ) } else if (React.isValidElement(children)) { if (children.props.defaultValue) { devWarning( 'Form.Item', '`defaultValue` will not work on controlled Field. You should use `initialValues` of Form instead.' ) } const childProps = { ...children.props, ...control } if (!childProps.id) { childProps.id = fieldId } // We should keep user origin event handler const triggers = new Set<string>([ ...toArray(trigger), ...toArray(mergedValidateTrigger), ]) triggers.forEach(eventName => { childProps[eventName] = (...args: any[]) => { control[eventName]?.(...args) children.props[eventName]?.(...args) } }) childNode = ( <MemoInput value={control[props.valuePropName || 'value']} update={updateRef.current} > {React.cloneElement(children, childProps)} </MemoInput> ) } else { if (name) { devWarning( 'Form.Item', '`name` is only used for validate React element. If you are using Form.Item as layout display, please remove `name` instead.' ) } childNode = children } return renderLayout(childNode, fieldId, meta, isRequired) }} </Field> ) }
baseChildr
Churn.py
# @Author: dileep # @Last Modified by: dileep from collections import OrderedDict import os from typing import Tuple, Iterable, Sequence, Dict, Union import numpy as np import pandas as pd from sklearn.preprocessing import LabelEncoder, OneHotEncoder from sklearn.model_selection import train_test_split from . import datapath from ..preprocessing import Encoder from ..sampling import hold_out #TODO: Make this a subclass of torch.utils.data.Dataset class Churn: """ Class for loading the `churn` dataset to predict whether customer `exited` or not Parameters: ---------- features : Iterable[str] List of features to be used in training and testing. NOTE: Do not include the dependent variable Options: {RowNumber,CustomerId,Surname,CreditScore,Geography,Gender, Age,Tenure,Balance,NumOfProducts,HasCrCard,IsActiveMember, EstimatedSalary} Attributes: ---------- raw_data : pd.Series Raw data returned in the form of a pandas dataframe train_data : Tuple[np.ndarray, np.ndarray] Tuple of (features, targets) where each is a numpy ndarray test_data : Tuple[np.ndarray, np.ndarray] Tuple of (features, targets) where each is a numpy ndarray """ _feature_dict = { 'multi-category': {'Geography'}, 'binary-category': {'Gender', 'HasCrCard', 'IsActiveMember', 'Exited'}, 'int': {'CreditScore', 'Age', 'Tenure', 'NumOfProducts'}, 'float': {'Balance', 'EstimatedSalary'} } def __init__(self, features: Union[Iterable[str], str] = 'all') -> None: churn_path = os.path.join(datapath(), 'churn/Churn_Modeling.csv') self.raw_data = pd.read_csv(churn_path, index_col=0) if features == 'all': features = self.all_features assert self._validate_features(features), "Invalid features given" self._features = features + ['Exited'] def __call__(self): raw_train, raw_test = hold_out(self.raw_data[self._features]) feat_meta = self._get_feat_meta(self._features) data_encoder = Encoder(feat_meta) return data_encoder.encode(raw_train, raw_test, 'Exited') @property def all_features(self) -> Iterable[str]: """ Returns all the possible features that can be used Returns: ------- Iterable[str] A list of all possible features """ features = list(self.raw_data.columns) return [f for f in features if f not in {'Exited', 'RowNumber', 'CustomerId', 'Surname'}] def _validate_features(self, features: Iterable[str]) -> bool: """ Returns whether the input set of features are valid Parameters: ---------- features : Iterable[str] Features input to the class Returns: ------- bool True/False based on validity """ all_features = set() for f_set in self._feature_dict.values(): all_features.update(f_set) return not any(filter(lambda f: f not in all_features, features)) def _get_feat_meta(self, features: Iterable[str]) -> Dict[str, str]: """ Returns the type for each feature Parameters: ---------- features : Iterable[str] A list of features that are to be used for classification Returns: ------- Dict[str, str] Dictionary of features and their corresponding types """ invert_fdict = {frozenset(v): k for k, v in self._feature_dict.items()} feat_meta: Dict[str, str] = OrderedDict() for feat in features: for feat_group, data_type in invert_fdict.items(): if feat in feat_group: feat_meta[feat] = data_type continue return feat_meta def encode_features(self, features: Iterable[str]) -> Tuple[np.ndarray, np.ndarray]: cat_features = (self._feature_dict['binary-category'] or self._feature_dict['multi-category']) for feat in features: if feat in cat_features:
def split_data(self, features: Iterable[str]) -> Sequence[np.ndarray]: """ Splits the raw data into training and testing using the features as a filter Parameters: ---------- features : Iterable[str] Features that are to be used in the training and testing data Returns: ------- Sequence[np.ndarray] Sequence of x_train, x_test, y_train, y_test """ pass
self.pp
main.py
from lamp import Lamp def run():
if __name__ == '__main__': run()
lamp = Lamp(is_turned_on=False) while True: command = str( input(''' ¿Qué deseas hacer? [p]render [a]pagar [s]alir ''')) if command == 'p': lamp.turn_on() elif command == 'a': lamp.turn_off() else: break
app-routing.module.ts
import { NgModule } from '@angular/core'; import { RouterModule } from '@angular/router'; import { CanDeactivateGuard } from './auth-guard/can-deactivate-guard.service'; import { AuthGuard } from './auth-guard/auth-guard.service'; import { TranslateService } from 'ng2-translate'; @NgModule({ imports: [ RouterModule.forRoot([ { path: '', redirectTo: 'home', pathMatch: 'full' }, { path: 'home', loadChildren: './home/home.module#HomeModule' } ]) ], exports: [ RouterModule ], providers: [ CanDeactivateGuard, TranslateService ] }) export class
{}
AppRoutingModule
fonduer_utils.py
import emmental import numpy as np from fonduer import Meta from emmental.modules.embedding_module import EmbeddingModule from emmental.data import EmmentalDataLoader from emmental.model import EmmentalModel from emmental.learner import EmmentalLearner from fonduer.learning.utils import collect_word_counter from fonduer.learning.dataset import FonduerDataset from fonduer.learning.task import create_task from troy200_utils import entity_level_f1 ABSTAIN = -1 FALSE = 0 TRUE = 1 def get_methods(ATTRIBUTE, gold, gold_file, all_docs): train_docs = all_docs[0] dev_docs = all_docs[1] test_docs = all_docs[2] def train_model(cands, F, align_type, model_type="LogisticRegression"): # Extract candidates and features based on the align type (row/column) align_val = 0 if align_type == "row" else 1 train_cands = cands[align_val][0] F_train = F[align_val][0] train_marginals = np.array([[0,1] if gold[align_val](x) else [1,0] for x in train_cands[0]]) # 1.) Setup training config config = { "meta_config": {"verbose": True}, "model_config": {"model_path": None, "device": 0, "dataparallel": False}, "learner_config": { "n_epochs": 50, "optimizer_config": {"lr": 0.001, "l2": 0.0}, "task_scheduler": "round_robin", }, "logging_config": { "evaluation_freq": 1, "counter_unit": "epoch", "checkpointing": False, "checkpointer_config": { "checkpoint_metric": {f"{ATTRIBUTE}/{ATTRIBUTE}/train/loss": "min"}, "checkpoint_freq": 1, "checkpoint_runway": 2, "clear_intermediate_checkpoints": True, "clear_all_checkpoints": True, }, }, } emmental.init(Meta.log_path) emmental.Meta.update_config(config=config) # 2.) Collect word counter from training data word_counter = collect_word_counter(train_cands) # 3.) Generate word embedding module for LSTM model # (in Logistic Regression, we generate it since Fonduer dataset requires word2id dict) # Geneate special tokens arity = 2 specials = [] for i in range(arity): specials += [f"~~[[{i}", f"{i}]]~~"] emb_layer = EmbeddingModule( word_counter=word_counter, word_dim=300, specials=specials ) # 4.) Generate dataloader for training set # No noise in Gold labels train_dataloader = EmmentalDataLoader( task_to_label_dict={ATTRIBUTE: "labels"}, dataset=FonduerDataset( ATTRIBUTE, train_cands[0], F_train[0], emb_layer.word2id, train_marginals, ), split="train", batch_size=100, shuffle=True, ) # 5.) Training tasks = create_task( ATTRIBUTE, 2, F_train[0].shape[1], 2, emb_layer, model=model_type # "LSTM" ) model = EmmentalModel(name=f"{ATTRIBUTE}_task") for task in tasks: model.add_task(task) emmental_learner = EmmentalLearner() emmental_learner.learn(model, [train_dataloader]) return (model, emb_layer) def eval_model(model, emb_layer, cands, F, align_type = "row"): # Extract candidates and features based on the align type (row/column)
return (train_model, eval_model)
align_val = 0 if align_type == "row" else 1 train_cands = cands[align_val][0] dev_cands = cands[align_val][1] test_cands = cands[align_val][2] F_train = F[align_val][0] F_dev = F[align_val][1] F_test = F[align_val][2] row_on = True if align_type == "row" else False col_on = True if align_type == "col" else False # Generate dataloader for test data test_dataloader = EmmentalDataLoader( task_to_label_dict={ATTRIBUTE: "labels"}, dataset=FonduerDataset( ATTRIBUTE, test_cands[0], F_test[0], emb_layer.word2id, 2 ), split="test", batch_size=100, shuffle=False, ) test_preds = model.predict(test_dataloader, return_preds=True) positive = np.where(np.array(test_preds["probs"][ATTRIBUTE])[:, TRUE] > 0.6) true_pred = [test_cands[0][_] for _ in positive[0]] test_results = entity_level_f1(true_pred, gold_file, ATTRIBUTE, test_docs, row_on=row_on, col_on=col_on) # Run on dev and train set for validation # We run the predictions also on our training and dev set, to validate that everything seems to work smoothly # Generate dataloader for dev data dev_dataloader = EmmentalDataLoader( task_to_label_dict={ATTRIBUTE: "labels"}, dataset=FonduerDataset( ATTRIBUTE, dev_cands[0], F_dev[0], emb_layer.word2id, 2 ), split="test", batch_size=100, shuffle=False, ) dev_preds = model.predict(dev_dataloader, return_preds=True) positive_dev = np.where(np.array(dev_preds["probs"][ATTRIBUTE])[:, TRUE] > 0.6) true_dev_pred = [dev_cands[0][_] for _ in positive_dev[0]] dev_results = entity_level_f1(true_dev_pred, gold_file, ATTRIBUTE, dev_docs, row_on=row_on, col_on=col_on) # Generate dataloader for train data train_dataloader = EmmentalDataLoader( task_to_label_dict={ATTRIBUTE: "labels"}, dataset=FonduerDataset( ATTRIBUTE, train_cands[0], F_train[0], emb_layer.word2id, 2 ), split="test", batch_size=100, shuffle=False, ) train_preds = model.predict(train_dataloader, return_preds=True) positive_train = np.where(np.array(train_preds["probs"][ATTRIBUTE])[:, TRUE] > 0.6) true_train_pred = [train_cands[0][_] for _ in positive_train[0]] train_results = entity_level_f1(true_train_pred, gold_file, ATTRIBUTE, train_docs, row_on=row_on, col_on=col_on) return [train_results, dev_results, test_results]
liftkitchen.js
$(function() { consoleInit(); start(main); }); const LIFT_ABI = [{"inputs":[{"internalType":"address","name":"_boardroom","type":"address"},{"internalType":"address","name":"_share","type":"address"},{"internalType":"address","name":"_lptoken","type":"address"},{"internalType":"uint256","name":"_starttime","type":"uint256"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOperator","type":"address"},{"indexed":true,"internalType":"address","name":"newOperator","type":"address"}],"name":"OperatorTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardAdded","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardPaid","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Staked","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Withdrawn","type":"event"},{"inputs":[],"name":"DURATION","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"_balances","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"_totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"boardroom","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"burnRewards","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"address","name":"tokenAddress","type":"address"},{"internalType":"address","name":"sendTo","type":"address"}],"name":"cleanUpDust","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"daysElapsed","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"earned","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"exit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"isOperator","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastTimeRewardApplicable","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastUpdateTime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"lockedOutDate","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lockoutPeriod","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lpt","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"reward","type":"uint256"}],"name":"notifyRewardAmount","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"operator","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"periodFinish","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"rewardDistribution","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerToken","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerTokenStored","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardRate","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"rewards","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_rewardDistribution","type":"address"}],"name":"setRewardDistribution","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"share","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"stake","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"stakeInBoardroom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"staker","type":"address"},{"internalType":"address","name":"from","type":"address"},{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"bool","name":"lockout","type":"bool"}],"name":"stakeLP","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"starttime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newOperator_","type":"address"}],"name":"transferOperator","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newBoardroom","type":"address"}],"name":"updateBoardroom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newToken","type":"address"}],"name":"updateStakingToken","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"userRewardPerTokenPaid","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"}] const ShortPools = [ { address : "0xc62e2C1E8e7078F66A989ebD47936B00aadF05f9", stakeTokenFunction : "alUSD", abi: [{"inputs":[{"internalType":"address","name":"_LIFT","type":"address"},{"internalType":"address","name":"_alUSD","type":"address"},{"internalType":"address","name":"_boardroom","type":"address"},{"internalType":"uint256","name":"_starttime","type":"uint256"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOperator","type":"address"},{"indexed":true,"internalType":"address","name":"newOperator","type":"address"}],"name":"OperatorTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardAdded","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardPaid","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Staked","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Withdrawn","type":"event"},{"inputs":[],"name":"DURATION","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"LIFT","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"alUSD","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"boardroom","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"burnRewards","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"address","name":"tokenAddress","type":"address"},{"internalType":"address","name":"sendTo","type":"address"}],"name":"cleanUpDust","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"deposits","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"earned","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"exit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"isOperator","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastTimeRewardApplicable","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastUpdateTime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"reward","type":"uint256"}],"name":"notifyRewardAmount","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"operator","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"periodFinish","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"rewardDistribution","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerToken","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerTokenStored","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardRate","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"rewards","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_rewardDistribution","type":"address"}],"name":"setRewardDistribution","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"stake","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"stakeInBoardroom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"starttime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newOperator_","type":"address"}],"name":"transferOperator","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"userRewardPerTokenPaid","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"}] },{ address : "0x9551e5528f7D191Eb6ee45bCE4c455C2C238C9c2", stakeTokenFunction : "BASv2", abi: [{"inputs":[{"internalType":"address","name":"_LIFT","type":"address"},{"internalType":"address","name":"_BASv2","type":"address"},{"internalType":"address","name":"_boardroom","type":"address"},{"internalType":"uint256","name":"_starttime","type":"uint256"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOperator","type":"address"},{"indexed":true,"internalType":"address","name":"newOperator","type":"address"}],"name":"OperatorTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardAdded","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardPaid","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Staked","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Withdrawn","type":"event"},{"inputs":[],"name":"BASv2","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"DURATION","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"LIFT","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"boardroom","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"burnRewards","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"address","name":"tokenAddress","type":"address"},{"internalType":"address","name":"sendTo","type":"address"}],"name":"cleanUpDust","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"deposits","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"earned","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"exit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"isOperator","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastTimeRewardApplicable","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastUpdateTime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"reward","type":"uint256"}],"name":"notifyRewardAmount","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"operator","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"periodFinish","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"rewardDistribution","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerToken","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerTokenStored","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardRate","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"rewards","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_rewardDistribution","type":"address"}],"name":"setRewardDistribution","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"stake","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"stakeInBoardroom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"starttime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newOperator_","type":"address"}],"name":"transferOperator","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"userRewardPerTokenPaid","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"}] },{ address : "0xe325b9f54B35692cEd0952B0459133e200088096", stakeTokenFunction : "iFARM", abi: [{"inputs":[{"internalType":"address","name":"_LIFT","type":"address"},{"internalType":"address","name":"_iFARM","type":"address"},{"internalType":"address","name":"_boardroom","type":"address"},{"internalType":"uint256","name":"_starttime","type":"uint256"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOperator","type":"address"},{"indexed":true,"internalType":"address","name":"newOperator","type":"address"}],"name":"OperatorTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardAdded","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardPaid","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Staked","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Withdrawn","type":"event"},{"inputs":[],"name":"DURATION","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"LIFT","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"boardroom","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"burnRewards","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"address","name":"tokenAddress","type":"address"},{"internalType":"address","name":"sendTo","type":"address"}],"name":"cleanUpDust","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"deposits","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"earned","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"exit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"iFARM","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"isOperator","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastTimeRewardApplicable","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastUpdateTime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"reward","type":"uint256"}],"name":"notifyRewardAmount","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"operator","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"periodFinish","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"rewardDistribution","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerToken","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerTokenStored","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardRate","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"rewards","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_rewardDistribution","type":"address"}],"name":"setRewardDistribution","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"stake","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"stakeInBoardroom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"starttime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newOperator_","type":"address"}],"name":"transferOperator","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"userRewardPerTokenPaid","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"}] },{ address : "0xe2Cf4ab503276BC693fB05eb2Da00c997E26ee68", stakeTokenFunction : "KBTC", abi: [{"inputs":[{"internalType":"address","name":"_LIFT","type":"address"},{"internalType":"address","name":"_KBTC","type":"address"},{"internalType":"address","name":"_boardroom","type":"address"},{"internalType":"uint256","name":"_starttime","type":"uint256"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOperator","type":"address"},{"indexed":true,"internalType":"address","name":"newOperator","type":"address"}],"name":"OperatorTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardAdded","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardPaid","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Staked","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Withdrawn","type":"event"},{"inputs":[],"name":"DURATION","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"KBTC","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"LIFT","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"boardroom","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"burnRewards","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"address","name":"tokenAddress","type":"address"},{"internalType":"address","name":"sendTo","type":"address"}],"name":"cleanUpDust","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"deposits","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"earned","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"exit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"isOperator","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastTimeRewardApplicable","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastUpdateTime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"reward","type":"uint256"}],"name":"notifyRewardAmount","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"operator","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"periodFinish","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"rewardDistribution","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerToken","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerTokenStored","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardRate","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"rewards","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_rewardDistribution","type":"address"}],"name":"setRewardDistribution","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"stake","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"stakeInBoardroom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"starttime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newOperator_","type":"address"}],"name":"transferOperator","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"userRewardPerTokenPaid","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"}] },{ address : "0x570CcB67cD8511f959e8842c5F78d62CeD873DF3", stakeTokenFunction : "PICKLE", abi: [{"inputs":[{"internalType":"address","name":"_LIFT","type":"address"},{"internalType":"address","name":"_PICKLE","type":"address"},{"internalType":"address","name":"_boardroom","type":"address"},{"internalType":"uint256","name":"_starttime","type":"uint256"}],"stateMutability":"nonpayable","type":"constructor"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOperator","type":"address"},{"indexed":true,"internalType":"address","name":"newOperator","type":"address"}],"name":"OperatorTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"previousOwner","type":"address"},{"indexed":true,"internalType":"address","name":"newOwner","type":"address"}],"name":"OwnershipTransferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardAdded","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"reward","type":"uint256"}],"name":"RewardPaid","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Staked","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"internalType":"address","name":"user","type":"address"},{"indexed":false,"internalType":"uint256","name":"amount","type":"uint256"}],"name":"Withdrawn","type":"event"},{"inputs":[],"name":"DURATION","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"LIFT","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"PICKLE","outputs":[{"internalType":"contract IERC20","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"balanceOf","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"boardroom","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"burnRewards","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"},{"internalType":"address","name":"tokenAddress","type":"address"},{"internalType":"address","name":"sendTo","type":"address"}],"name":"cleanUpDust","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"deposits","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"account","type":"address"}],"name":"earned","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"exit","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"isOperator","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastTimeRewardApplicable","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"lastUpdateTime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"reward","type":"uint256"}],"name":"notifyRewardAmount","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"operator","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"periodFinish","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"renounceOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"rewardDistribution","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerToken","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardPerTokenStored","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"rewardRate","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"rewards","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_rewardDistribution","type":"address"}],"name":"setRewardDistribution","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"stake","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"stakeInBoardroom","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[],"name":"starttime","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"totalSupply","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"newOperator_","type":"address"}],"name":"transferOperator","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"newOwner","type":"address"}],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"address","name":"","type":"address"}],"name":"userRewardPerTokenPaid","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"uint256","name":"amount","type":"uint256"}],"name":"withdraw","outputs":[],"stateMutability":"nonpayable","type":"function"}] },].map(a => { return { address: a.address, abi: a.abi, stakeTokenFunction: a.stakeTokenFunction, rewardTokenFunction: "LIFT" } }) const Pools = [ "0x4DB2fa451e1051A013A42FaD98b04C2aB81043Af", //wbtc - lfbtc "0xC3C79869ED93c88E1227a1Ca3542c9B947BA9e0c" //lfbtc - lift, ].map(a => { return { address: a, abi: LIFT_ABI, stakeTokenFunction: "lpt", rewardTokenFunction: "share" } }) async function main() { const App = await init_ethers(); _print(`Initialized ${App.YOUR_ADDRESS}`); _print("Reading smart contracts...\n"); let tokens = {}; let prices = {}; //load deathpool prices await loadSynthetixPoolInfo(App, tokens, prices, Pools[0].abi, Pools[0].address, Pools[0].rewardTokenFunction, Pools[0].stakeTokenFunction); await loadSynthetixPoolInfo(App, tokens, prices, Pools[1].abi, Pools[1].address, Pools[1].rewardTokenFunction, Pools[1].stakeTokenFunction); let p = await loadMultipleSynthetixLIFTPools(App, tokens, prices, Pools.concat(ShortPools)) _print_bold(`Total staked: $${formatMoney(p.staked_tvl)}`); if (p.totalUserStaked > 0) { _print(`You are staking a total of $${formatMoney(p.totalUserStaked)} at an APR of ${(p.totalAPR * 100).toFixed(2)}%\n`); } hideLoading(); } async function
(App, info, chain="eth") { info.poolPrices.print_price(chain); _print(`${info.rewardTokenTicker} Per Week: ${info.weeklyRewards.toFixed(2)} ($${formatMoney(info.usdPerWeek)})`); const weeklyAPR = info.usdPerWeek / info.staked_tvl * 100; const dailyAPR = weeklyAPR / 7; const yearlyAPR = weeklyAPR * 52; _print(`APR: Day ${dailyAPR.toFixed(2)}% Week ${weeklyAPR.toFixed(2)}% Year ${yearlyAPR.toFixed(2)}%`); const userStakedUsd = info.userStaked * info.stakeTokenPrice; const userStakedPct = userStakedUsd / info.staked_tvl * 100; _print(`You are staking ${info.userStaked.toFixed(6)} ${info.stakeTokenTicker} ` + `$${formatMoney(userStakedUsd)} (${userStakedPct.toFixed(2)}% of the pool).`); if (info.userStaked > 0) { info.poolPrices.print_contained_price(info.userStaked); const userWeeklyRewards = userStakedPct * info.weeklyRewards / 100; const userDailyRewards = userWeeklyRewards / 7; const userYearlyRewards = userWeeklyRewards * 52; _print(`Estimated ${info.rewardTokenTicker} earnings:` + ` Day ${userDailyRewards.toFixed(2)} ($${formatMoney(userDailyRewards*info.rewardTokenPrice)})` + ` Week ${userWeeklyRewards.toFixed(2)} ($${formatMoney(userWeeklyRewards*info.rewardTokenPrice)})` + ` Year ${userYearlyRewards.toFixed(2)} ($${formatMoney(userYearlyRewards*info.rewardTokenPrice)})`); } const approveTENDAndStake = async function() { return rewardsContract_stake(info.stakeTokenAddress, info.stakingAddress, App) } const unstake = async function() { return rewardsContract_unstake(info.stakingAddress, App) } const movetoboardroom = async function() { return rewardsContract_movetobardroom(info.stakingAddress, App) } const exit = async function() { return rewardsContract_exit(info.stakingAddress, App) } const revoke = async function() { return rewardsContract_resetApprove(info.stakeTokenAddress, info.stakingAddress, App) } switch (chain) { case "eth": _print(`<a target="_blank" href="https://etherscan.io/address/${info.stakingAddress}#code">Etherscan</a>`); break; } _print_link(`Unstake ${info.userStaked.toFixed(6)} ${info.stakeTokenTicker}`, unstake) _print_link(`Move to Boardroom ${info.earned.toFixed(6)} ${info.rewardTokenTicker} ($${formatMoney(info.earned*info.rewardTokenPrice)})`, movetoboardroom) if (info.stakeTokenTicker != "ETH") { _print_link(`Revoke (set approval to 0)`, revoke) } _print_link(`Exit`, exit) _print(""); return { staked_tvl: info.poolPrices.staked_tvl, userStaked : userStakedUsd, apr : yearlyAPR } } async function loadMultipleSynthetixLIFTPools(App, tokens, prices, pools) { let totalStaked = 0, totalUserStaked = 0, individualAPRs = []; const infos = await Promise.all(pools.map(p => loadSynthetixPoolInfo(App, tokens, prices, p.abi, p.address, p.rewardTokenFunction, p.stakeTokenFunction))); for (const i of infos) { let p = await printSynthetixLIFTPool(App, i); totalStaked += p.staked_tvl || 0; totalUserStaked += p.userStaked || 0; if (p.userStaked > 0) { individualAPRs.push(p.userStaked * p.apr / 100); } } let totalAPR = totalUserStaked == 0 ? 0 : individualAPRs.reduce((x,y)=>x+y, 0) / totalUserStaked; return { staked_tvl : totalStaked, totalUserStaked, totalAPR }; }
printSynthetixLIFTPool
map.js
/* global afterEach, beforeEach, describe, expect, it, L, sinon */ describe('L.npmap.map', function() { var element, server; afterEach(function() { element = null; server.restore(); });
element.id = 'map'; server = sinon.fakeServer.create(); }); it('passes options to constructor when called without new', function() { var map = L.npmap.map({ div: element, smallzoomControl: false }); expect(map.options.smallzoomControl).to.equal(false); }); describe('constructor', function() { it('creates the map when the div property is an object', function() { var map = L.npmap.map({ div: element }); expect(map).to.be.ok(); }); it('sets a default center for the map if none is specified', function() { var map = L.npmap.map({ div: element }); expect(map.getCenter().lat).to.be.ok(); }); it('sets a default zoom for the map if none is specified', function() { var map = L.npmap.map({ div: element }); expect(map.getZoom()).to.be.ok(); }); it('adds a default baseLayer if none is specified', function() { }); it('renames the "layers" property "overlays", if specified', function() { }); it('switches preset layers in when specified in the baseLayers property', function() { }); }); });
beforeEach(function() { element = document.createElement('div');
_main.py
# built-in from argparse import ArgumentParser from types import MappingProxyType from typing import Callable, Mapping, Sequence # app from ._lint import lint_command from ._memtest import memtest_command from ._stub import stub_command from ._test import test_command CommandsType = Mapping[str, Callable[[Sequence[str]], int]] COMMANDS: CommandsType = MappingProxyType(dict( lint=lint_command, memtest=memtest_command, stub=stub_command, test=test_command, )) def
(argv: Sequence[str], *, commands: CommandsType = COMMANDS) -> int: parser = ArgumentParser(prog='python3 -m deal') parser.add_argument('command', choices=sorted(commands)) args, unknown_argv = parser.parse_known_args(argv) command = commands[args.command] return command(unknown_argv)
main
loading.stories.tsx
import React from 'react'; import { ComponentStory, ComponentMeta } from '@storybook/react'; import { Loading as LoadingComponent } from 'binyal'; export default { title: 'Components/Loading', component: LoadingComponent, argTypes: { style: { name: 'Style', description: 'Set the style of button', defaultValue: 'primary', options: ['flat', 'primary', 'warning', 'danger'], control: { type: 'select', }, },
}, } as ComponentMeta<typeof LoadingComponent>; const Template: ComponentStory<typeof LoadingComponent> = (args) => ( <div style={{ display: 'flex', justifyContent: 'start', }} > <div style={{ width: 200, height: 200, position: 'relative', }} > <LoadingComponent {...args} /> </div> </div> ); export const Default = Template.bind({}); Default.args = { style: 'primary', };
pmtsr.rs
#[doc = "Register `PMTSR` reader"] pub struct R(crate::R<PMTSR_SPEC>); impl core::ops::Deref for R { type Target = crate::R<PMTSR_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<PMTSR_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<PMTSR_SPEC>) -> Self { R(reader) } } #[doc = "Register `PMTSR` writer"] pub struct W(crate::W<PMTSR_SPEC>); impl core::ops::Deref for W { type Target = crate::W<PMTSR_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<PMTSR_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<PMTSR_SPEC>) -> Self { W(writer) } } #[doc = "Test Enable Control for PSRAM\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MTENPS_A { #[doc = "0: Standard operation"] VALUE1 = 0, #[doc = "1: Parity bits under test"] VALUE2 = 1, } impl From<MTENPS_A> for bool { #[inline(always)] fn from(variant: MTENPS_A) -> Self { variant as u8 != 0 } } #[doc = "Field `MTENPS` reader - Test Enable Control for PSRAM"] pub struct MTENPS_R(crate::FieldReader<bool, MTENPS_A>); impl MTENPS_R { pub(crate) fn new(bits: bool) -> Self { MTENPS_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MTENPS_A { match self.bits { false => MTENPS_A::VALUE1, true => MTENPS_A::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline(always)] pub fn is_value1(&self) -> bool { **self == MTENPS_A::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline(always)] pub fn is_value2(&self) -> bool { **self == MTENPS_A::VALUE2 } } impl core::ops::Deref for MTENPS_R { type Target = crate::FieldReader<bool, MTENPS_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MTENPS` writer - Test Enable Control for PSRAM"] pub struct MTENPS_W<'a> { w: &'a mut W, } impl<'a> MTENPS_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MTENPS_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Standard operation"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MTENPS_A::VALUE1) } #[doc = "Parity bits under test"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MTENPS_A::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01); self.w } } #[doc = "Test Enable Control for DSRAM1\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MTENDS1_A { #[doc = "0: Standard operation"] VALUE1 = 0, #[doc = "1: Parity bits under test"] VALUE2 = 1, } impl From<MTENDS1_A> for bool { #[inline(always)] fn from(variant: MTENDS1_A) -> Self { variant as u8 != 0 } } #[doc = "Field `MTENDS1` reader - Test Enable Control for DSRAM1"] pub struct MTENDS1_R(crate::FieldReader<bool, MTENDS1_A>); impl MTENDS1_R { pub(crate) fn new(bits: bool) -> Self { MTENDS1_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MTENDS1_A { match self.bits { false => MTENDS1_A::VALUE1, true => MTENDS1_A::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline(always)] pub fn is_value1(&self) -> bool { **self == MTENDS1_A::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline(always)] pub fn is_value2(&self) -> bool { **self == MTENDS1_A::VALUE2 } } impl core::ops::Deref for MTENDS1_R { type Target = crate::FieldReader<bool, MTENDS1_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MTENDS1` writer - Test Enable Control for DSRAM1"] pub struct MTENDS1_W<'a> { w: &'a mut W, } impl<'a> MTENDS1_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MTENDS1_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Standard operation"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MTENDS1_A::VALUE1) } #[doc = "Parity bits under test"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MTENDS1_A::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1); self.w } } #[doc = "Test Enable Control for USIC0 Memory\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MTEU0_A { #[doc = "0: Standard operation"] VALUE1 = 0, #[doc = "1: Parity bits under test"] VALUE2 = 1, } impl From<MTEU0_A> for bool { #[inline(always)] fn from(variant: MTEU0_A) -> Self { variant as u8 != 0 } } #[doc = "Field `MTEU0` reader - Test Enable Control for USIC0 Memory"] pub struct MTEU0_R(crate::FieldReader<bool, MTEU0_A>); impl MTEU0_R { pub(crate) fn new(bits: bool) -> Self { MTEU0_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MTEU0_A { match self.bits { false => MTEU0_A::VALUE1, true => MTEU0_A::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline(always)] pub fn is_value1(&self) -> bool { **self == MTEU0_A::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline(always)] pub fn is_value2(&self) -> bool { **self == MTEU0_A::VALUE2 } } impl core::ops::Deref for MTEU0_R { type Target = crate::FieldReader<bool, MTEU0_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MTEU0` writer - Test Enable Control for USIC0 Memory"] pub struct MTEU0_W<'a> { w: &'a mut W, } impl<'a> MTEU0_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MTEU0_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Standard operation"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MTEU0_A::VALUE1) } #[doc = "Parity bits under test"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MTEU0_A::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | ((value as u32 & 0x01) << 8); self.w } } #[doc = "Test Enable Control for USIC1 Memory\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MTEU1_A { #[doc = "0: Standard operation"] VALUE1 = 0, #[doc = "1: Parity bits under test"] VALUE2 = 1, } impl From<MTEU1_A> for bool { #[inline(always)] fn from(variant: MTEU1_A) -> Self { variant as u8 != 0 } } #[doc = "Field `MTEU1` reader - Test Enable Control for USIC1 Memory"] pub struct MTEU1_R(crate::FieldReader<bool, MTEU1_A>); impl MTEU1_R { pub(crate) fn new(bits: bool) -> Self { MTEU1_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MTEU1_A { match self.bits { false => MTEU1_A::VALUE1, true => MTEU1_A::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline(always)] pub fn is_value1(&self) -> bool { **self == MTEU1_A::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline(always)] pub fn is_value2(&self) -> bool { **self == MTEU1_A::VALUE2 } } impl core::ops::Deref for MTEU1_R { type Target = crate::FieldReader<bool, MTEU1_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MTEU1` writer - Test Enable Control for USIC1 Memory"] pub struct MTEU1_W<'a> { w: &'a mut W, } impl<'a> MTEU1_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MTEU1_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Standard operation"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MTEU1_A::VALUE1) } #[doc = "Parity bits under test"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MTEU1_A::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | ((value as u32 & 0x01) << 9); self.w } } #[doc = "Test Enable Control for MultiCAN Memory\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MTEMC_A { #[doc = "0: Standard operation"] VALUE1 = 0, #[doc = "1: Parity bits under test"] VALUE2 = 1, } impl From<MTEMC_A> for bool { #[inline(always)] fn from(variant: MTEMC_A) -> Self { variant as u8 != 0 } } #[doc = "Field `MTEMC` reader - Test Enable Control for MultiCAN Memory"] pub struct MTEMC_R(crate::FieldReader<bool, MTEMC_A>); impl MTEMC_R { pub(crate) fn new(bits: bool) -> Self { MTEMC_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MTEMC_A { match self.bits { false => MTEMC_A::VALUE1, true => MTEMC_A::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline(always)] pub fn is_value1(&self) -> bool { **self == MTEMC_A::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline(always)] pub fn is_value2(&self) -> bool { **self == MTEMC_A::VALUE2 } } impl core::ops::Deref for MTEMC_R { type Target = crate::FieldReader<bool, MTEMC_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MTEMC` writer - Test Enable Control for MultiCAN Memory"] pub struct MTEMC_W<'a> { w: &'a mut W, } impl<'a> MTEMC_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MTEMC_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Standard operation"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MTEMC_A::VALUE1) } #[doc = "Parity bits under test"] #[inline(always)] pub fn value2(self) -> &'a mut W {
self.variant(MTEMC_A::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 12)) | ((value as u32 & 0x01) << 12); self.w } } #[doc = "Test Enable Control for PMU Prefetch Memory\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MTEPPRF_A { #[doc = "0: Standard operation"] VALUE1 = 0, #[doc = "1: Parity bits under test"] VALUE2 = 1, } impl From<MTEPPRF_A> for bool { #[inline(always)] fn from(variant: MTEPPRF_A) -> Self { variant as u8 != 0 } } #[doc = "Field `MTEPPRF` reader - Test Enable Control for PMU Prefetch Memory"] pub struct MTEPPRF_R(crate::FieldReader<bool, MTEPPRF_A>); impl MTEPPRF_R { pub(crate) fn new(bits: bool) -> Self { MTEPPRF_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MTEPPRF_A { match self.bits { false => MTEPPRF_A::VALUE1, true => MTEPPRF_A::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline(always)] pub fn is_value1(&self) -> bool { **self == MTEPPRF_A::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline(always)] pub fn is_value2(&self) -> bool { **self == MTEPPRF_A::VALUE2 } } impl core::ops::Deref for MTEPPRF_R { type Target = crate::FieldReader<bool, MTEPPRF_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MTEPPRF` writer - Test Enable Control for PMU Prefetch Memory"] pub struct MTEPPRF_W<'a> { w: &'a mut W, } impl<'a> MTEPPRF_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MTEPPRF_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Standard operation"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MTEPPRF_A::VALUE1) } #[doc = "Parity bits under test"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MTEPPRF_A::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 13)) | ((value as u32 & 0x01) << 13); self.w } } #[doc = "Test Enable Control for USB Memory\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MTUSB_A { #[doc = "0: Standard operation"] VALUE1 = 0, #[doc = "1: Parity bits under test"] VALUE2 = 1, } impl From<MTUSB_A> for bool { #[inline(always)] fn from(variant: MTUSB_A) -> Self { variant as u8 != 0 } } #[doc = "Field `MTUSB` reader - Test Enable Control for USB Memory"] pub struct MTUSB_R(crate::FieldReader<bool, MTUSB_A>); impl MTUSB_R { pub(crate) fn new(bits: bool) -> Self { MTUSB_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> MTUSB_A { match self.bits { false => MTUSB_A::VALUE1, true => MTUSB_A::VALUE2, } } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline(always)] pub fn is_value1(&self) -> bool { **self == MTUSB_A::VALUE1 } #[doc = "Checks if the value of the field is `VALUE2`"] #[inline(always)] pub fn is_value2(&self) -> bool { **self == MTUSB_A::VALUE2 } } impl core::ops::Deref for MTUSB_R { type Target = crate::FieldReader<bool, MTUSB_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `MTUSB` writer - Test Enable Control for USB Memory"] pub struct MTUSB_W<'a> { w: &'a mut W, } impl<'a> MTUSB_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: MTUSB_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Standard operation"] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(MTUSB_A::VALUE1) } #[doc = "Parity bits under test"] #[inline(always)] pub fn value2(self) -> &'a mut W { self.variant(MTUSB_A::VALUE2) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | ((value as u32 & 0x01) << 16); self.w } } impl R { #[doc = "Bit 0 - Test Enable Control for PSRAM"] #[inline(always)] pub fn mtenps(&self) -> MTENPS_R { MTENPS_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Test Enable Control for DSRAM1"] #[inline(always)] pub fn mtends1(&self) -> MTENDS1_R { MTENDS1_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 8 - Test Enable Control for USIC0 Memory"] #[inline(always)] pub fn mteu0(&self) -> MTEU0_R { MTEU0_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Test Enable Control for USIC1 Memory"] #[inline(always)] pub fn mteu1(&self) -> MTEU1_R { MTEU1_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 12 - Test Enable Control for MultiCAN Memory"] #[inline(always)] pub fn mtemc(&self) -> MTEMC_R { MTEMC_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 13 - Test Enable Control for PMU Prefetch Memory"] #[inline(always)] pub fn mtepprf(&self) -> MTEPPRF_R { MTEPPRF_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 16 - Test Enable Control for USB Memory"] #[inline(always)] pub fn mtusb(&self) -> MTUSB_R { MTUSB_R::new(((self.bits >> 16) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Test Enable Control for PSRAM"] #[inline(always)] pub fn mtenps(&mut self) -> MTENPS_W { MTENPS_W { w: self } } #[doc = "Bit 1 - Test Enable Control for DSRAM1"] #[inline(always)] pub fn mtends1(&mut self) -> MTENDS1_W { MTENDS1_W { w: self } } #[doc = "Bit 8 - Test Enable Control for USIC0 Memory"] #[inline(always)] pub fn mteu0(&mut self) -> MTEU0_W { MTEU0_W { w: self } } #[doc = "Bit 9 - Test Enable Control for USIC1 Memory"] #[inline(always)] pub fn mteu1(&mut self) -> MTEU1_W { MTEU1_W { w: self } } #[doc = "Bit 12 - Test Enable Control for MultiCAN Memory"] #[inline(always)] pub fn mtemc(&mut self) -> MTEMC_W { MTEMC_W { w: self } } #[doc = "Bit 13 - Test Enable Control for PMU Prefetch Memory"] #[inline(always)] pub fn mtepprf(&mut self) -> MTEPPRF_W { MTEPPRF_W { w: self } } #[doc = "Bit 16 - Test Enable Control for USB Memory"] #[inline(always)] pub fn mtusb(&mut self) -> MTUSB_W { MTUSB_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Parity Memory Test Select Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pmtsr](index.html) module"] pub struct PMTSR_SPEC; impl crate::RegisterSpec for PMTSR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [pmtsr::R](R) reader structure"] impl crate::Readable for PMTSR_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [pmtsr::W](W) writer structure"] impl crate::Writable for PMTSR_SPEC { type Writer = W; } #[doc = "`reset()` method sets PMTSR to value 0"] impl crate::Resettable for PMTSR_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
main.rs
use new_crate::module_a;
fn main() {}
server.rs
//! Server implementation of the HTTP/2.0 protocol. //! //! # Getting started //! //! Running an HTTP/2.0 server requires the caller to manage accepting the //! connections as well as getting the connections to a state that is ready to //! begin the HTTP/2.0 handshake. See [here](../index.html#handshake) for more //! details. //! //! This could be as basic as using Tokio's [`TcpListener`] to accept //! connections, but usually it means using either ALPN or HTTP/1.1 protocol //! upgrades. //! //! Once a connection is obtained, it is passed to [`handshake`], //! which will begin the [HTTP/2.0 handshake]. This returns a future that //! completes once the handshake process is performed and HTTP/2.0 streams may //! be received. //! //! [`handshake`] uses default configuration values. There are a number of //! settings that can be changed by using [`Builder`] instead. //! //! # Inbound streams //! //! The [`Connection`] instance is used to accept inbound HTTP/2.0 streams. It //! does this by implementing [`futures::Stream`]. When a new stream is //! received, a call to [`Connection::accept`] will return `(request, response)`. //! The `request` handle (of type [`http::Request<RecvStream>`]) contains the //! HTTP request head as well as provides a way to receive the inbound data //! stream and the trailers. The `response` handle (of type [`SendResponse`]) //! allows responding to the request, stream the response payload, send //! trailers, and send push promises. //! //! The send ([`SendStream`]) and receive ([`RecvStream`]) halves of the stream //! can be operated independently. //! //! # Managing the connection //! //! The [`Connection`] instance is used to manage connection state. The caller //! is required to call either [`Connection::accept`] or //! [`Connection::poll_close`] in order to advance the connection state. Simply //! operating on [`SendStream`] or [`RecvStream`] will have no effect unless the //! connection state is advanced. //! //! It is not required to call **both** [`Connection::accept`] and //! [`Connection::poll_close`]. If the caller is ready to accept a new stream, //! then only [`Connection::accept`] should be called. When the caller **does //! not** want to accept a new stream, [`Connection::poll_close`] should be //! called. //! //! The [`Connection`] instance should only be dropped once //! [`Connection::poll_close`] returns `Ready`. Once [`Connection::accept`] //! returns `Ready(None)`, there will no longer be any more inbound streams. At //! this point, only [`Connection::poll_close`] should be called. //! //! # Shutting down the server //! //! Graceful shutdown of the server is [not yet //! implemented](https://github.com/hyperium/h2/issues/69). //! //! # Example //! //! A basic HTTP/2.0 server example that runs over TCP and assumes [prior //! knowledge], i.e. both the client and the server assume that the TCP socket //! will use the HTTP/2.0 protocol without prior negotiation. //! //! ```no_run //! use h2::server; //! use http::{Response, StatusCode}; //! use tokio::net::TcpListener; //! //! #[tokio::main] //! pub async fn main() { //! let mut listener = TcpListener::bind("127.0.0.1:5928").await.unwrap(); //! //! // Accept all incoming TCP connections. //! loop { //! if let Ok((socket, _peer_addr)) = listener.accept().await { //! // Spawn a new task to process each connection. //! tokio::spawn(async { //! // Start the HTTP/2.0 connection handshake //! let mut h2 = server::handshake(socket).await.unwrap(); //! // Accept all inbound HTTP/2.0 streams sent over the //! // connection. //! while let Some(request) = h2.accept().await { //! let (request, mut respond) = request.unwrap(); //! println!("Received request: {:?}", request); //! //! // Build a response with no body //! let response = Response::builder() //! .status(StatusCode::OK) //! .body(()) //! .unwrap(); //! //! // Send the response back to the client //! respond.send_response(response, true) //! .unwrap(); //! } //! //! }); //! } //! } //! } //! ``` //! //! [prior knowledge]: http://httpwg.org/specs/rfc7540.html#known-http //! [`handshake`]: fn.handshake.html //! [HTTP/2.0 handshake]: http://httpwg.org/specs/rfc7540.html#ConnectionHeader //! [`Builder`]: struct.Builder.html //! [`Connection`]: struct.Connection.html //! [`Connection::poll`]: struct.Connection.html#method.poll //! [`Connection::poll_close`]: struct.Connection.html#method.poll_close //! [`futures::Stream`]: https://docs.rs/futures/0.1/futures/stream/trait.Stream.html //! [`http::Request<RecvStream>`]: ../struct.RecvStream.html //! [`RecvStream`]: ../struct.RecvStream.html //! [`SendStream`]: ../struct.SendStream.html //! [`TcpListener`]: https://docs.rs/tokio-core/0.1/tokio_core/net/struct.TcpListener.html use crate::codec::{Codec, RecvError, UserError}; use crate::frame::{self, Pseudo, PushPromiseHeaderError, Reason, Settings, StreamId}; use crate::proto::{self, Config, Prioritized}; use crate::{FlowControl, PingPong, RecvStream, SendStream}; use bytes::{Buf, Bytes}; use http::{HeaderMap, Method, Request, Response}; use std::future::Future; use std::pin::Pin; use std::task::{Context, Poll}; use std::time::Duration; use std::{convert, fmt, io, mem}; use tokio::io::{AsyncRead, AsyncWrite, ReadBuf}; use tracing::instrument::{Instrument, Instrumented}; /// In progress HTTP/2.0 connection handshake future. /// /// This type implements `Future`, yielding a `Connection` instance once the /// handshake has completed. /// /// The handshake is completed once the connection preface is fully received /// from the client **and** the initial settings frame is sent to the client. /// /// The handshake future does not wait for the initial settings frame from the /// client. /// /// See [module] level docs for more details. /// /// [module]: index.html #[must_use = "futures do nothing unless polled"] pub struct Handshake<T, B: Buf = Bytes> { /// The config to pass to Connection::new after handshake succeeds. builder: Builder, /// The current state of the handshake. state: Handshaking<T, B>, /// Span tracking the handshake span: tracing::Span, } /// Accepts inbound HTTP/2.0 streams on a connection. /// /// A `Connection` is backed by an I/O resource (usually a TCP socket) and /// implements the HTTP/2.0 server logic for that connection. It is responsible /// for receiving inbound streams initiated by the client as well as driving the /// internal state forward. /// /// `Connection` values are created by calling [`handshake`]. Once a /// `Connection` value is obtained, the caller must call [`poll`] or /// [`poll_close`] in order to drive the internal connection state forward. /// /// See [module level] documentation for more details /// /// [module level]: index.html /// [`handshake`]: struct.Connection.html#method.handshake /// [`poll`]: struct.Connection.html#method.poll /// [`poll_close`]: struct.Connection.html#method.poll_close /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server; /// # use h2::server::*; /// # /// # async fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) { /// let mut server = server::handshake(my_io).await.unwrap(); /// while let Some(request) = server.accept().await { /// tokio::spawn(async move { /// let (request, respond) = request.unwrap(); /// // Process the request and send the response back to the client /// // using `respond`. /// }); /// } /// # } /// # /// # pub fn main() {} /// ``` #[must_use = "streams do nothing unless polled"] pub struct Connection<T, B: Buf> { connection: proto::Connection<T, Peer, B>, } /// Builds server connections with custom configuration values. /// /// Methods can be chained in order to set the configuration values. /// /// The server is constructed by calling [`handshake`] and passing the I/O /// handle that will back the HTTP/2.0 server. /// /// New instances of `Builder` are obtained via [`Builder::new`]. /// /// See function level documentation for details on the various server /// configuration settings. /// /// [`Builder::new`]: struct.Builder.html#method.new /// [`handshake`]: struct.Builder.html#method.handshake /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .initial_window_size(1_000_000) /// .max_concurrent_streams(1000) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` #[derive(Clone, Debug)] pub struct Builder { /// Time to keep locally reset streams around before reaping. reset_stream_duration: Duration, /// Maximum number of locally reset streams to keep at a time. reset_stream_max: usize, /// Initial `Settings` frame to send as part of the handshake. settings: Settings, /// Initial target window size for new connections. initial_target_connection_window_size: Option<u32>, } /// Send a response back to the client /// /// A `SendResponse` instance is provided when receiving a request and is used /// to send the associated response back to the client. It is also used to /// explicitly reset the stream with a custom reason. /// /// It will also be used to initiate push promises linked with the associated /// stream. /// /// If the `SendResponse` instance is dropped without sending a response, then /// the HTTP/2.0 stream will be reset. /// /// See [module] level docs for more details. /// /// [module]: index.html #[derive(Debug)] pub struct SendResponse<B: Buf> { inner: proto::StreamRef<B>, } /// Send a response to a promised request /// /// A `SendPushedResponse` instance is provided when promising a request and is used /// to send the associated response to the client. It is also used to /// explicitly reset the stream with a custom reason. /// /// It can not be used to initiate push promises. /// /// If the `SendPushedResponse` instance is dropped without sending a response, then /// the HTTP/2.0 stream will be reset. /// /// See [module] level docs for more details. /// /// [module]: index.html pub struct SendPushedResponse<B: Buf> { inner: SendResponse<B>, } // Manual implementation necessary because of rust-lang/rust#26925 impl<B: Buf + fmt::Debug> fmt::Debug for SendPushedResponse<B> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "SendPushedResponse {{ {:?} }}", self.inner) } } /// Stages of an in-progress handshake. enum Handshaking<T, B: Buf> { /// State 1. Connection is flushing pending SETTINGS frame. Flushing(Instrumented<Flush<T, Prioritized<B>>>), /// State 2. Connection is waiting for the client preface. ReadingPreface(Instrumented<ReadPreface<T, Prioritized<B>>>), /// Dummy state for `mem::replace`. Empty, } /// Flush a Sink struct Flush<T, B> { codec: Option<Codec<T, B>>, } /// Read the client connection preface struct ReadPreface<T, B> { codec: Option<Codec<T, B>>, pos: usize, } #[derive(Debug)] pub(crate) struct Peer; const PREFACE: [u8; 24] = *b"PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n"; /// Creates a new configured HTTP/2.0 server with default configuration /// values backed by `io`. /// /// It is expected that `io` already be in an appropriate state to commence /// the [HTTP/2.0 handshake]. See [Handshake] for more details. /// /// Returns a future which resolves to the [`Connection`] instance once the /// HTTP/2.0 handshake has been completed. The returned [`Connection`] /// instance will be using default configuration values. Use [`Builder`] to /// customize the configuration values used by a [`Connection`] instance. /// /// [HTTP/2.0 handshake]: http://httpwg.org/specs/rfc7540.html#ConnectionHeader /// [Handshake]: ../index.html#handshake /// [`Connection`]: struct.Connection.html /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server; /// # use h2::server::*; /// # /// # async fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # { /// let connection = server::handshake(my_io).await.unwrap(); /// // The HTTP/2.0 handshake has completed, now use `connection` to /// // accept inbound HTTP/2.0 streams. /// # } /// # /// # pub fn main() {} /// ``` pub fn handshake<T>(io: T) -> Handshake<T, Bytes> where T: AsyncRead + AsyncWrite + Unpin, { Builder::new().handshake(io) } // ===== impl Connection ===== impl<T, B> Connection<T, B> where T: AsyncRead + AsyncWrite + Unpin, B: Buf + 'static, { fn handshake2(io: T, builder: Builder) -> Handshake<T, B> { let span = tracing::trace_span!("server_handshake", io = %std::any::type_name::<T>()); let entered = span.enter(); // Create the codec. let mut codec = Codec::new(io); if let Some(max) = builder.settings.max_frame_size() { codec.set_max_recv_frame_size(max as usize); } if let Some(max) = builder.settings.max_header_list_size() { codec.set_max_recv_header_list_size(max as usize); } // Send initial settings frame. codec .buffer(builder.settings.clone().into()) .expect("invalid SETTINGS frame"); // Create the handshake future. let state = Handshaking::from(codec); drop(entered); Handshake { builder, state, span, } } /// Accept the next incoming request on this connection. pub async fn accept( &mut self, ) -> Option<Result<(Request<RecvStream>, SendResponse<B>), crate::Error>> { futures_util::future::poll_fn(move |cx| self.poll_accept(cx)).await } #[doc(hidden)] pub fn poll_accept( &mut self, cx: &mut Context<'_>, ) -> Poll<Option<Result<(Request<RecvStream>, SendResponse<B>), crate::Error>>> { // Always try to advance the internal state. Getting Pending also is // needed to allow this function to return Pending. if let Poll::Ready(_) = self.poll_closed(cx)? { // If the socket is closed, don't return anything // TODO: drop any pending streams return Poll::Ready(None); } if let Some(inner) = self.connection.next_incoming() { tracing::trace!("received incoming"); let (head, _) = inner.take_request().into_parts(); let body = RecvStream::new(FlowControl::new(inner.clone_to_opaque())); let request = Request::from_parts(head, body); let respond = SendResponse { inner }; return Poll::Ready(Some(Ok((request, respond)))); } Poll::Pending } /// Sets the target window size for the whole connection. /// /// If `size` is greater than the current value, then a `WINDOW_UPDATE` /// frame will be immediately sent to the remote, increasing the connection /// level window by `size - current_value`. /// /// If `size` is less than the current value, nothing will happen /// immediately. However, as window capacity is released by /// [`FlowControl`] instances, no `WINDOW_UPDATE` frames will be sent /// out until the number of "in flight" bytes drops below `size`. /// /// The default value is 65,535. /// /// See [`FlowControl`] documentation for more details. /// /// [`FlowControl`]: ../struct.FlowControl.html /// [library level]: ../index.html#flow-control pub fn set_target_window_size(&mut self, size: u32) { assert!(size <= proto::MAX_WINDOW_SIZE); self.connection.set_target_window_size(size); } /// Set a new `INITIAL_WINDOW_SIZE` setting (in octets) for stream-level /// flow control for received data. /// /// The `SETTINGS` will be sent to the remote, and only applied once the /// remote acknowledges the change. /// /// This can be used to increase or decrease the window size for existing /// streams. /// /// # Errors /// /// Returns an error if a previous call is still pending acknowledgement /// from the remote endpoint. pub fn set_initial_window_size(&mut self, size: u32) -> Result<(), crate::Error> { assert!(size <= proto::MAX_WINDOW_SIZE); self.connection.set_initial_window_size(size)?; Ok(()) } /// Returns `Ready` when the underlying connection has closed. /// /// If any new inbound streams are received during a call to `poll_closed`, /// they will be queued and returned on the next call to [`poll_accept`]. /// /// This function will advance the internal connection state, driving /// progress on all the other handles (e.g. [`RecvStream`] and [`SendStream`]). /// /// See [here](index.html#managing-the-connection) for more details. /// /// [`poll_accept`]: struct.Connection.html#method.poll_accept /// [`RecvStream`]: ../struct.RecvStream.html /// [`SendStream`]: ../struct.SendStream.html pub fn poll_closed(&mut self, cx: &mut Context) -> Poll<Result<(), crate::Error>> { self.connection.poll(cx).map_err(Into::into) } #[doc(hidden)] #[deprecated(note = "renamed to poll_closed")] pub fn poll_close(&mut self, cx: &mut Context) -> Poll<Result<(), crate::Error>> { self.poll_closed(cx) } /// Sets the connection to a GOAWAY state. /// /// Does not terminate the connection. Must continue being polled to close /// connection. /// /// After flushing the GOAWAY frame, the connection is closed. Any /// outstanding streams do not prevent the connection from closing. This /// should usually be reserved for shutting down when something bad /// external to `h2` has happened, and open streams cannot be properly /// handled. /// /// For graceful shutdowns, see [`graceful_shutdown`](Connection::graceful_shutdown). pub fn abrupt_shutdown(&mut self, reason: Reason) { self.connection.go_away_from_user(reason); } /// Starts a [graceful shutdown][1] process. /// /// Must continue being polled to close connection. /// /// It's possible to receive more requests after calling this method, since /// they might have been in-flight from the client already. After about /// 1 RTT, no new requests should be accepted. Once all active streams /// have completed, the connection is closed. /// /// [1]: http://httpwg.org/specs/rfc7540.html#GOAWAY pub fn graceful_shutdown(&mut self) { self.connection.go_away_gracefully(); } /// Takes a `PingPong` instance from the connection. /// /// # Note /// /// This may only be called once. Calling multiple times will return `None`. pub fn ping_pong(&mut self) -> Option<PingPong> { self.connection.take_user_pings().map(PingPong::new) } /// Returns the maximum number of concurrent streams that may be initiated /// by the server on this connection. /// /// This limit is configured by the client peer by sending the /// [`SETTINGS_MAX_CONCURRENT_STREAMS` parameter][1] in a `SETTINGS` frame. /// This method returns the currently acknowledged value recieved from the /// remote. /// /// [1]: https://tools.ietf.org/html/rfc7540#section-5.1.2 pub fn max_concurrent_send_streams(&self) -> usize { self.connection.max_send_streams() } /// Returns the maximum number of concurrent streams that may be initiated /// by the client on this connection. /// /// This returns the value of the [`SETTINGS_MAX_CONCURRENT_STREAMS` /// parameter][1] sent in a `SETTINGS` frame that has been /// acknowledged by the remote peer. The value to be sent is configured by /// the [`Builder::max_concurrent_streams`][2] method before handshaking /// with the remote peer. /// /// [1]: https://tools.ietf.org/html/rfc7540#section-5.1.2 /// [2]: ../struct.Builder.html#method.max_concurrent_streams pub fn max_concurrent_recv_streams(&self) -> usize { self.connection.max_recv_streams() } } #[cfg(feature = "stream")] impl<T, B> futures_core::Stream for Connection<T, B> where T: AsyncRead + AsyncWrite + Unpin, B: Buf + 'static, { type Item = Result<(Request<RecvStream>, SendResponse<B>), crate::Error>; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { self.poll_accept(cx) } } impl<T, B> fmt::Debug for Connection<T, B> where T: fmt::Debug, B: fmt::Debug + Buf, { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.debug_struct("Connection") .field("connection", &self.connection) .finish() } } // ===== impl Builder ===== impl Builder { /// Returns a new server builder instance initialized with default /// configuration values. /// /// Configuration methods can be chained on the return value. /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .initial_window_size(1_000_000) /// .max_concurrent_streams(1000) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` pub fn new() -> Builder { Builder { reset_stream_duration: Duration::from_secs(proto::DEFAULT_RESET_STREAM_SECS), reset_stream_max: proto::DEFAULT_RESET_STREAM_MAX, settings: Settings::default(), initial_target_connection_window_size: None, } } /// Indicates the initial window size (in octets) for stream-level /// flow control for received data. /// /// The initial window of a stream is used as part of flow control. For more /// details, see [`FlowControl`]. /// /// The default value is 65,535. /// /// [`FlowControl`]: ../struct.FlowControl.html /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .initial_window_size(1_000_000) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` pub fn initial_window_size(&mut self, size: u32) -> &mut Self { self.settings.set_initial_window_size(Some(size)); self } /// Indicates the initial window size (in octets) for connection-level flow control /// for received data. /// /// The initial window of a connection is used as part of flow control. For more details, /// see [`FlowControl`]. /// /// The default value is 65,535. /// /// [`FlowControl`]: ../struct.FlowControl.html /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .initial_connection_window_size(1_000_000) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` pub fn initial_connection_window_size(&mut self, size: u32) -> &mut Self { self.initial_target_connection_window_size = Some(size); self } /// Indicates the size (in octets) of the largest HTTP/2.0 frame payload that the /// configured server is able to accept. /// /// The sender may send data frames that are **smaller** than this value, /// but any data larger than `max` will be broken up into multiple `DATA` /// frames. /// /// The value **must** be between 16,384 and 16,777,215. The default value is 16,384. /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .max_frame_size(1_000_000) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` /// /// # Panics /// /// This function panics if `max` is not within the legal range specified /// above. pub fn max_frame_size(&mut self, max: u32) -> &mut Self { self.settings.set_max_frame_size(Some(max)); self } /// Sets the max size of received header frames. /// /// This advisory setting informs a peer of the maximum size of header list /// that the sender is prepared to accept, in octets. The value is based on /// the uncompressed size of header fields, including the length of the name /// and value in octets plus an overhead of 32 octets for each header field. /// /// This setting is also used to limit the maximum amount of data that is /// buffered to decode HEADERS frames. /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .max_header_list_size(16 * 1024) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` pub fn max_header_list_size(&mut self, max: u32) -> &mut Self { self.settings.set_max_header_list_size(Some(max)); self } /// Sets the maximum number of concurrent streams. /// /// The maximum concurrent streams setting only controls the maximum number /// of streams that can be initiated by the remote peer. In other words, /// when this setting is set to 100, this does not limit the number of /// concurrent streams that can be created by the caller. /// /// It is recommended that this value be no smaller than 100, so as to not /// unnecessarily limit parallelism. However, any value is legal, including /// 0. If `max` is set to 0, then the remote will not be permitted to /// initiate streams. /// /// Note that streams in the reserved state, i.e., push promises that have /// been reserved but the stream has not started, do not count against this /// setting. /// /// Also note that if the remote *does* exceed the value set here, it is not /// a protocol level error. Instead, the `h2` library will immediately reset /// the stream. /// /// See [Section 5.1.2] in the HTTP/2.0 spec for more details. /// /// [Section 5.1.2]: https://http2.github.io/http2-spec/#rfc.section.5.1.2 /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .max_concurrent_streams(1000) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` pub fn max_concurrent_streams(&mut self, max: u32) -> &mut Self { self.settings.set_max_concurrent_streams(Some(max)); self } /// Sets the maximum number of concurrent locally reset streams. /// /// When a stream is explicitly reset by either calling /// [`SendResponse::send_reset`] or by dropping a [`SendResponse`] instance /// before completing the stream, the HTTP/2.0 specification requires that /// any further frames received for that stream must be ignored for "some /// time". /// /// In order to satisfy the specification, internal state must be maintained /// to implement the behavior. This state grows linearly with the number of /// streams that are locally reset. /// /// The `max_concurrent_reset_streams` setting configures sets an upper /// bound on the amount of state that is maintained. When this max value is /// reached, the oldest reset stream is purged from memory. /// /// Once the stream has been fully purged from memory, any additional frames /// received for that stream will result in a connection level protocol /// error, forcing the connection to terminate. /// /// The default value is 10. /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .max_concurrent_reset_streams(1000) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` pub fn max_concurrent_reset_streams(&mut self, max: usize) -> &mut Self { self.reset_stream_max = max; self } /// Sets the maximum number of concurrent locally reset streams. /// /// When a stream is explicitly reset by either calling /// [`SendResponse::send_reset`] or by dropping a [`SendResponse`] instance /// before completing the stream, the HTTP/2.0 specification requires that /// any further frames received for that stream must be ignored for "some /// time". /// /// In order to satisfy the specification, internal state must be maintained /// to implement the behavior. This state grows linearly with the number of /// streams that are locally reset. /// /// The `reset_stream_duration` setting configures the max amount of time /// this state will be maintained in memory. Once the duration elapses, the /// stream state is purged from memory. /// /// Once the stream has been fully purged from memory, any additional frames /// received for that stream will result in a connection level protocol /// error, forcing the connection to terminate. /// /// The default value is 30 seconds. /// /// # Examples /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # use std::time::Duration; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .reset_stream_duration(Duration::from_secs(10)) /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` pub fn reset_stream_duration(&mut self, dur: Duration) -> &mut Self { self.reset_stream_duration = dur; self } /// Creates a new configured HTTP/2.0 server backed by `io`. /// /// It is expected that `io` already be in an appropriate state to commence /// the [HTTP/2.0 handshake]. See [Handshake] for more details. /// /// Returns a future which resolves to the [`Connection`] instance once the /// HTTP/2.0 handshake has been completed. /// /// This function also allows the caller to configure the send payload data /// type. See [Outbound data type] for more details. /// /// [HTTP/2.0 handshake]: http://httpwg.org/specs/rfc7540.html#ConnectionHeader /// [Handshake]: ../index.html#handshake /// [`Connection`]: struct.Connection.html /// [Outbound data type]: ../index.html#outbound-data-type. /// /// # Examples /// /// Basic usage: /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut = Builder::new() /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` /// /// Configures the send-payload data type. In this case, the outbound data /// type will be `&'static [u8]`. /// /// ``` /// # use tokio::io::{AsyncRead, AsyncWrite}; /// # use h2::server::*; /// # /// # fn doc<T: AsyncRead + AsyncWrite + Unpin>(my_io: T) /// # -> Handshake<T, &'static [u8]> /// # { /// // `server_fut` is a future representing the completion of the HTTP/2.0 /// // handshake. /// let server_fut: Handshake<_, &'static [u8]> = Builder::new() /// .handshake(my_io); /// # server_fut /// # } /// # /// # pub fn main() {} /// ``` pub fn handshake<T, B>(&self, io: T) -> Handshake<T, B> where T: AsyncRead + AsyncWrite + Unpin, B: Buf + 'static, { Connection::handshake2(io, self.clone()) } } impl Default for Builder { fn default() -> Builder { Builder::new() } } // ===== impl SendResponse ===== impl<B: Buf> SendResponse<B> { /// Send a response to a client request. /// /// On success, a [`SendStream`] instance is returned. This instance can be /// used to stream the response body and send trailers. /// /// If a body or trailers will be sent on the returned [`SendStream`] /// instance, then `end_of_stream` must be set to `false` when calling this /// function. /// /// The [`SendResponse`] instance is already associated with a received /// request. This function may only be called once per instance and only if /// [`send_reset`] has not been previously called. /// /// [`SendResponse`]: # /// [`SendStream`]: ../struct.SendStream.html /// [`send_reset`]: #method.send_reset pub fn send_response( &mut self, response: Response<()>, end_of_stream: bool, ) -> Result<SendStream<B>, crate::Error> { self.inner .send_response(response, end_of_stream) .map(|_| SendStream::new(self.inner.clone())) .map_err(Into::into) } /// Push a request and response to the client /// /// On success, a [`SendResponse`] instance is returned. /// /// [`SendResponse`]: # pub fn push_request( &mut self, request: Request<()>, ) -> Result<SendPushedResponse<B>, crate::Error> { self.inner .send_push_promise(request) .map(|inner| SendPushedResponse { inner: SendResponse { inner }, }) .map_err(Into::into) } /// Send a stream reset to the peer. /// /// This essentially cancels the stream, including any inbound or outbound /// data streams. /// /// If this function is called before [`send_response`], a call to /// [`send_response`] will result in an error. /// /// If this function is called while a [`SendStream`] instance is active, /// any further use of the instance will result in an error. /// /// This function should only be called once. /// /// [`send_response`]: #method.send_response /// [`SendStream`]: ../struct.SendStream.html pub fn send_reset(&mut self, reason: Reason) { self.inner.send_reset(reason) } /// Polls to be notified when the client resets this stream. /// /// If stream is still open, this returns `Poll::Pending`, and /// registers the task to be notified if a `RST_STREAM` is received. /// /// If a `RST_STREAM` frame is received for this stream, calling this /// method will yield the `Reason` for the reset. /// /// # Error /// /// Calling this method after having called `send_response` will return /// a user error. pub fn poll_reset(&mut self, cx: &mut Context) -> Poll<Result<Reason, crate::Error>> { self.inner.poll_reset(cx, proto::PollReset::AwaitingHeaders) } /// Returns the stream ID of the response stream. /// /// # Panics /// /// If the lock on the stream store has been poisoned. pub fn stream_id(&self) -> crate::StreamId { crate::StreamId::from_internal(self.inner.stream_id()) } } // ===== impl SendPushedResponse ===== impl<B: Buf> SendPushedResponse<B> { /// Send a response to a promised request. /// /// On success, a [`SendStream`] instance is returned. This instance can be /// used to stream the response body and send trailers. /// /// If a body or trailers will be sent on the returned [`SendStream`] /// instance, then `end_of_stream` must be set to `false` when calling this /// function. /// /// The [`SendPushedResponse`] instance is associated with a promised /// request. This function may only be called once per instance and only if /// [`send_reset`] has not been previously called. /// /// [`SendPushedResponse`]: # /// [`SendStream`]: ../struct.SendStream.html /// [`send_reset`]: #method.send_reset pub fn send_response( &mut self, response: Response<()>, end_of_stream: bool, ) -> Result<SendStream<B>, crate::Error> { self.inner.send_response(response, end_of_stream) } /// Send a stream reset to the peer. /// /// This essentially cancels the stream, including any inbound or outbound /// data streams. /// /// If this function is called before [`send_response`], a call to /// [`send_response`] will result in an error. /// /// If this function is called while a [`SendStream`] instance is active, /// any further use of the instance will result in an error. /// /// This function should only be called once. /// /// [`send_response`]: #method.send_response /// [`SendStream`]: ../struct.SendStream.html pub fn send_reset(&mut self, reason: Reason) { self.inner.send_reset(reason) } /// Polls to be notified when the client resets this stream. /// /// If stream is still open, this returns `Poll::Pending`, and /// registers the task to be notified if a `RST_STREAM` is received. /// /// If a `RST_STREAM` frame is received for this stream, calling this /// method will yield the `Reason` for the reset. /// /// # Error /// /// Calling this method after having called `send_response` will return /// a user error. pub fn poll_reset(&mut self, cx: &mut Context) -> Poll<Result<Reason, crate::Error>> { self.inner.poll_reset(cx) } /// Returns the stream ID of the response stream. /// /// # Panics /// /// If the lock on the stream store has been poisoned. pub fn stream_id(&self) -> crate::StreamId { self.inner.stream_id() } } // ===== impl Flush ===== impl<T, B: Buf> Flush<T, B> { fn new(codec: Codec<T, B>) -> Self { Flush { codec: Some(codec) } } } impl<T, B> Future for Flush<T, B> where T: AsyncWrite + Unpin, B: Buf, { type Output = Result<Codec<T, B>, crate::Error>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { // Flush the codec ready!(self.codec.as_mut().unwrap().flush(cx)).map_err(crate::Error::from_io)?; // Return the codec Poll::Ready(Ok(self.codec.take().unwrap())) } } impl<T, B: Buf> ReadPreface<T, B> { fn new(codec: Codec<T, B>) -> Self { ReadPreface { codec: Some(codec), pos: 0, } } fn inner_mut(&mut self) -> &mut T { self.codec.as_mut().unwrap().get_mut() } } impl<T, B> Future for ReadPreface<T, B> where T: AsyncRead + Unpin, B: Buf, { type Output = Result<Codec<T, B>, crate::Error>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let mut buf = [0; 24]; let mut rem = PREFACE.len() - self.pos; while rem > 0 { let mut buf = ReadBuf::new(&mut buf[..rem]); ready!(Pin::new(self.inner_mut()).poll_read(cx, &mut buf)) .map_err(crate::Error::from_io)?; let n = buf.filled().len(); if n == 0 { return Poll::Ready(Err(crate::Error::from_io(io::Error::new( io::ErrorKind::UnexpectedEof, "connection closed before reading preface", )))); } if &PREFACE[self.pos..self.pos + n] != buf.filled() { proto_err!(conn: "read_preface: invalid preface"); // TODO: Should this just write the GO_AWAY frame directly? return Poll::Ready(Err(Reason::PROTOCOL_ERROR.into())); } self.pos += n; rem -= n; // TODO test } Poll::Ready(Ok(self.codec.take().unwrap())) } } // ===== impl Handshake ===== impl<T, B: Buf> Future for Handshake<T, B> where T: AsyncRead + AsyncWrite + Unpin, B: Buf + 'static, { type Output = Result<Connection<T, B>, crate::Error>; fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { let span = self.span.clone(); // XXX(eliza): T_T let _e = span.enter(); tracing::trace!(state = ?self.state); use crate::server::Handshaking::*; self.state = if let Flushing(ref mut flush) = self.state { // We're currently flushing a pending SETTINGS frame. Poll the // flush future, and, if it's completed, advance our state to wait // for the client preface. let codec = match Pin::new(flush).poll(cx)? { Poll::Pending => { tracing::trace!(flush.poll = %"Pending"); return Poll::Pending; } Poll::Ready(flushed) => { tracing::trace!(flush.poll = %"Ready"); flushed } }; Handshaking::from(ReadPreface::new(codec)) } else
; let poll = if let ReadingPreface(ref mut read) = self.state { // We're now waiting for the client preface. Poll the `ReadPreface` // future. If it has completed, we will create a `Connection` handle // for the connection. Pin::new(read).poll(cx) // Actually creating the `Connection` has to occur outside of this // `if let` block, because we've borrowed `self` mutably in order // to poll the state and won't be able to borrow the SETTINGS frame // as well until we release the borrow for `poll()`. } else { unreachable!("Handshake::poll() state was not advanced completely!") }; poll?.map(|codec| { let connection = proto::Connection::new( codec, Config { next_stream_id: 2.into(), // Server does not need to locally initiate any streams initial_max_send_streams: 0, reset_stream_duration: self.builder.reset_stream_duration, reset_stream_max: self.builder.reset_stream_max, settings: self.builder.settings.clone(), }, ); tracing::trace!("connection established!"); let mut c = Connection { connection }; if let Some(sz) = self.builder.initial_target_connection_window_size { c.set_target_window_size(sz); } Ok(c) }) } } impl<T, B> fmt::Debug for Handshake<T, B> where T: AsyncRead + AsyncWrite + fmt::Debug, B: fmt::Debug + Buf, { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { write!(fmt, "server::Handshake") } } impl Peer { pub fn convert_send_message( id: StreamId, response: Response<()>, end_of_stream: bool, ) -> frame::Headers { use http::response::Parts; // Extract the components of the HTTP request let ( Parts { status, headers, .. }, _, ) = response.into_parts(); // Build the set pseudo header set. All requests will include `method` // and `path`. let pseudo = Pseudo::response(status); // Create the HEADERS frame let mut frame = frame::Headers::new(id, pseudo, headers); if end_of_stream { frame.set_end_stream() } frame } pub fn convert_push_message( stream_id: StreamId, promised_id: StreamId, request: Request<()>, ) -> Result<frame::PushPromise, UserError> { use http::request::Parts; if let Err(e) = frame::PushPromise::validate_request(&request) { use PushPromiseHeaderError::*; match e { NotSafeAndCacheable => tracing::debug!( ?promised_id, "convert_push_message: method {} is not safe and cacheable", request.method(), ), InvalidContentLength(e) => tracing::debug!( ?promised_id, "convert_push_message; promised request has invalid content-length {:?}", e, ), } return Err(UserError::MalformedHeaders); } // Extract the components of the HTTP request let ( Parts { method, uri, headers, .. }, _, ) = request.into_parts(); let pseudo = Pseudo::request(method, uri); Ok(frame::PushPromise::new( stream_id, promised_id, pseudo, headers, )) } } impl proto::Peer for Peer { type Poll = Request<()>; const NAME: &'static str = "Server"; fn is_server() -> bool { true } fn r#dyn() -> proto::DynPeer { proto::DynPeer::Server } fn convert_poll_message( pseudo: Pseudo, fields: HeaderMap, stream_id: StreamId, ) -> Result<Self::Poll, RecvError> { use http::{uri, Version}; let mut b = Request::builder(); macro_rules! malformed { ($($arg:tt)*) => {{ tracing::debug!($($arg)*); return Err(RecvError::Stream { id: stream_id, reason: Reason::PROTOCOL_ERROR, }); }} } b = b.version(Version::HTTP_2); let is_connect; if let Some(method) = pseudo.method { is_connect = method == Method::CONNECT; b = b.method(method); } else { malformed!("malformed headers: missing method"); } // Specifying :status for a request is a protocol error if pseudo.status.is_some() { tracing::trace!("malformed headers: :status field on request; PROTOCOL_ERROR"); return Err(RecvError::Connection(Reason::PROTOCOL_ERROR)); } // Convert the URI let mut parts = uri::Parts::default(); // A request translated from HTTP/1 must not include the :authority // header if let Some(authority) = pseudo.authority { let maybe_authority = uri::Authority::from_maybe_shared(authority.clone().into_inner()); parts.authority = Some(maybe_authority.or_else(|why| { malformed!( "malformed headers: malformed authority ({:?}): {}", authority, why, ) })?); } // A :scheme is required, except CONNECT. if let Some(scheme) = pseudo.scheme { if is_connect { malformed!(":scheme in CONNECT"); } let maybe_scheme = scheme.parse(); let scheme = maybe_scheme.or_else(|why| { malformed!( "malformed headers: malformed scheme ({:?}): {}", scheme, why, ) })?; // It's not possible to build an `Uri` from a scheme and path. So, // after validating is was a valid scheme, we just have to drop it // if there isn't an :authority. if parts.authority.is_some() { parts.scheme = Some(scheme); } } else if !is_connect { malformed!("malformed headers: missing scheme"); } if let Some(path) = pseudo.path { if is_connect { malformed!(":path in CONNECT"); } // This cannot be empty if path.is_empty() { malformed!("malformed headers: missing path"); } let maybe_path = uri::PathAndQuery::from_maybe_shared(path.clone().into_inner()); parts.path_and_query = Some(maybe_path.or_else(|why| { malformed!("malformed headers: malformed path ({:?}): {}", path, why,) })?); } b = b.uri(parts); let mut request = match b.body(()) { Ok(request) => request, Err(e) => { // TODO: Should there be more specialized handling for different // kinds of errors proto_err!(stream: "error building request: {}; stream={:?}", e, stream_id); return Err(RecvError::Stream { id: stream_id, reason: Reason::PROTOCOL_ERROR, }); } }; *request.headers_mut() = fields; Ok(request) } } // ===== impl Handshaking ===== impl<T, B> fmt::Debug for Handshaking<T, B> where B: Buf, { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Handshaking::Flushing(_) => write!(f, "Handshaking::Flushing(_)"), Handshaking::ReadingPreface(_) => write!(f, "Handshaking::ReadingPreface(_)"), Handshaking::Empty => write!(f, "Handshaking::Empty"), } } } impl<T, B> convert::From<Flush<T, Prioritized<B>>> for Handshaking<T, B> where T: AsyncRead + AsyncWrite, B: Buf, { #[inline] fn from(flush: Flush<T, Prioritized<B>>) -> Self { Handshaking::Flushing(flush.instrument(tracing::trace_span!("flush"))) } } impl<T, B> convert::From<ReadPreface<T, Prioritized<B>>> for Handshaking<T, B> where T: AsyncRead + AsyncWrite, B: Buf, { #[inline] fn from(read: ReadPreface<T, Prioritized<B>>) -> Self { Handshaking::ReadingPreface(read.instrument(tracing::trace_span!("read_preface"))) } } impl<T, B> convert::From<Codec<T, Prioritized<B>>> for Handshaking<T, B> where T: AsyncRead + AsyncWrite, B: Buf, { #[inline] fn from(codec: Codec<T, Prioritized<B>>) -> Self { Handshaking::from(Flush::new(codec)) } }
{ // Otherwise, we haven't actually advanced the state, but we have // to replace it with itself, because we have to return a value. // (note that the assignment to `self.state` has to be outside of // the `if let` block above in order to placate the borrow checker). mem::replace(&mut self.state, Handshaking::Empty) }
transforms.py
import torch from node2vec import Node2Vec as Node2Vec_ from .brain_data import BrainData from torch_geometric.data import Data from networkx.convert_matrix import from_numpy_matrix from .utils import binning, LDP import networkx as nx from .base_transform import BaseTransform from numpy import linalg as LA import numpy as np class FromSVTransform(BaseTransform): def __init__(self, sv_transform): super(FromSVTransform, self).__init__() self.sv_transform = sv_transform def __call__(self, data): keys = list(filter(lambda x: x.startswith('edge_index'), data.keys)) for key in keys: if key.startswith('edge_index'): postfix = key[10:] edge_index = data[f'edge_index{postfix}'] edge_attr = data[f'edge_attr{postfix}'] svdata = Data(edge_index=edge_index, edge_attr=edge_attr, num_nodes=data.num_nodes) svdata_transformed = self.sv_transform(svdata) data[f'x{postfix}'] = svdata_transformed.x data[f'edge_index{postfix}'] = svdata_transformed.edge_index data[f'edge_attr{postfix}'] = svdata_transformed.edge_attr return data def __str__(self): return self.sv_transform.__class__.__name__ class Identity(BaseTransform): def __call__(self, data: BrainData): """ Returns a diagonal matrix with ones on the diagonal. :param data: BrainData :return: torch.Tensor """ data.x = torch.diag(torch.ones(data.num_nodes)) return data class Degree(BaseTransform): def __call__(self, data: BrainData): """ Returns a diagonal matrix with the degree of each node on the diagonal. :param data: BrainData :return: torch.Tensor """ adj = torch.sparse_coo_tensor(data.edge_index, data.edge_attr, [data.num_nodes, data.num_nodes]) adj = adj.to_dense() data.x = torch.Tensor(adj.sum(dim=1, keepdim=True)).float() return data def __str__(self): return 'Degree' class LDPTransform(BaseTransform): def __call__(self, data: BrainData): """ Returns node feature with LDP transform. :param data: BrainData :return: torch.Tensor """ adj = torch.sparse_coo_tensor(data.edge_index, data.edge_attr, [data.num_nodes, data.num_nodes]) adj = adj.to_dense() data.x = torch.Tensor( LDP(nx.from_numpy_array(adj.numpy())) ).float() return data def __str__(self): return 'LDP' class DegreeBin(BaseTransform): def __call__(self, data: BrainData): """ Returns node feature with degree bin transform. :param data: BrainData :return: torch.Tensor """ adj = torch.sparse_coo_tensor(data.edge_index, data.edge_attr, [data.num_nodes, data.num_nodes]) adj = adj.to_dense() return torch.Tensor(binning(adj.sum(dim=1))).float() def __str__(self): return 'Degree_Bin' class Adj(BaseTransform): def __call__(self, data: BrainData): """ Returns adjacency matrix. :param data: BrainData :return: torch.Tensor """ adj = torch.sparse_coo_tensor(data.edge_index, data.edge_attr, [data.num_nodes, data.num_nodes]) adj = adj.to_dense() data.x = adj return data def __str__(self):
class Eigenvector(BaseTransform): def __call__(self, data: BrainData): """ Returns node feature with eigenvector. :param data: BrainData :return: torch.Tensor """ adj = torch.sparse_coo_tensor(data.edge_index, data.edge_attr, [data.num_nodes, data.num_nodes]) adj = adj.to_dense() w, v = LA.eig(adj.numpy()) # indices = np.argsort(w)[::-1] v = v.transpose() data.x = torch.Tensor(v).float() return data class EigenNorm(BaseTransform): def __call__(self, data: BrainData): """ Returns node feature with eigen norm. :param data: BrainData :return: torch.Tensor """ adj = torch.sparse_coo_tensor(data.edge_index, data.edge_attr, [data.num_nodes, data.num_nodes]) adj = adj.to_dense() sum_of_rows = adj.sum(dim=1) adj /= sum_of_rows adj = torch.nan_to_num(adj) w, v = LA.eig(adj.numpy()) # indices = np.argsort(w)[::-1] v = v.transpose() data.x = torch.Tensor(v).float() return data class Node2Vec(BaseTransform): def __init__(self, feature_dim=32, walk_length=5, num_walks=200, num_workers=4, window=10, min_count=1, batch_words=4): super(Node2Vec, self).__init__() self.feature_dim = feature_dim self.walk_length = walk_length self.num_walks = num_walks self.num_workers = num_workers self.window = window self.min_count = min_count self.batch_words = batch_words def __call__(self, data): """ Returns node feature with node2vec transform. :param data: BrainData :return: torch.Tensor """ adj = torch.sparse_coo_tensor(data.edge_index, data.edge_attr, [data.num_nodes, data.num_nodes]) adj = adj.to_dense() if (adj < 0).int().sum() > 0: # split the adjacency matrix into two (negative and positive) parts pos_adj = adj.clone() pos_adj[adj < 0] = 0 neg_adj = adj.clone() neg_adj[adj > 0] = 0 neg_adj = -neg_adj adjs = [pos_adj, neg_adj] else: adjs = [adj] xs = [] for adj in adjs: x = torch.zeros((data.num_nodes, self.feature_dim)) graph = from_numpy_matrix(adj.numpy()) node2vec = Node2Vec_(graph, dimensions=self.feature_dim, walk_length=self.walk_length, num_walks=self.num_walks, workers=self.num_workers) model = node2vec.fit(window=self.window, min_count=self.min_count, batch_words=self.batch_words) for i in range(data.num_nodes): x[i] = torch.Tensor(model.wv[f'{i}'].copy()) xs.append(x) data.x = torch.cat(xs, dim=-1) return data def __str__(self): return 'Node2Vec'
return 'Adj'
mpu.rs
#![no_std] #![no_main] extern crate betafpv_f3; #[macro_use(entry, exception)] extern crate cortex_m_rt as rt; extern crate panic_semihosting; use betafpv_f3::hal::prelude::*; use betafpv_f3::Board; use rt::ExceptionFrame; entry!(main); fn
() -> ! { let Board {mut led, mut mpu, mut delay, ..} = Board::new(); // https://www.invensense.com/wp-content/uploads/2015/02/MPU-6000-Register-Map1.pdf // expected 0x68 based on register map // some startup time is required or this assertion fails delay.delay_ms(1000u16); assert_eq!(mpu.who_am_i().unwrap(), 0x68); // blinking LED means the assertion was correct for _i in 0..5 { led.set_high(); delay.delay_ms(500u16); led.set_low(); delay.delay_ms(500u16); } // LED controlled by orientation of board loop { let board_up = mpu.accel().unwrap().z > 0; if board_up { led.set_high(); } else { led.set_low(); } delay.delay_ms(500u16); } } exception!(HardFault, hard_fault); fn hard_fault(ef: &ExceptionFrame) -> ! { panic!("{:#?}", ef); } exception!(*, default_handler); fn default_handler(irqn: i16) { panic!("Unhandled exception (IRQn = {})", irqn); }
main
pt_BR.js
import Pagination from '../vc-pagination/locale/pt_BR' import DatePicker from '../date-picker/locale/pt_BR' import TimePicker from '../time-picker/locale/pt_BR' import Calendar from '../calendar/locale/pt_BR' export default { locale: 'pt-br', Pagination, DatePicker, TimePicker, Calendar, Table: {
filterTitle: 'Filtro', filterConfirm: 'OK', filterReset: 'Resetar', emptyText: 'Não há dados', selectAll: 'Selecionar página atual', selectInvert: 'Inverter seleção', }, Modal: { okText: 'OK', cancelText: 'Cancelar', justOkText: 'OK', }, Popconfirm: { okText: 'OK', cancelText: 'Cancelar', }, Transfer: { notFoundContent: 'Não encontrado', searchPlaceholder: 'Procurar', itemUnit: 'item', itemsUnit: 'items', }, Select: { notFoundContent: 'Não encontrado', }, Upload: { uploading: 'Enviando...', removeFile: 'Remover arquivo', uploadError: 'Erro no envio', previewFile: 'Visualizar arquivo', }, }
config.py
""" CAR CONFIG This file is read by your car application's manage.py script to change the car performance. EXMAPLE ----------- import dk cfg = dk.load_config(config_path='~/mycar/config.py') print(cfg.CAMERA_RESOLUTION)
#PATHS CAR_PATH = PACKAGE_PATH = os.path.dirname(os.path.realpath(__file__)) DATA_PATH = os.path.join(CAR_PATH, 'data') MODELS_PATH = os.path.join(CAR_PATH, 'models') #VEHICLE DRIVE_LOOP_HZ = 20 MAX_LOOPS = 100000 #CAMERA CAMERA_RESOLUTION = (480, 640) #(height, width) CAMERA_FRAMERATE = DRIVE_LOOP_HZ #STEERING STEERING_CHANNEL = 1 STEERING_LEFT_PWM = 420 STEERING_RIGHT_PWM = 360 #THROTTLE THROTTLE_CHANNEL = 0 THROTTLE_FORWARD_PWM = 400 THROTTLE_STOPPED_PWM = 360 THROTTLE_REVERSE_PWM = 310 #TRAINING BATCH_SIZE = 128 TRAIN_TEST_SPLIT = 0.8 TUB_PATH = os.path.join(CAR_PATH, 'tub') # if using a single tub #JOYSTICK JOYSTICK_MAX_THROTTLE = 0.25 JOYSTICK_STEERING_SCALE = 1.0 JOYSTICK_THROTTLE_AXIS = 'rz' AUTO_RECORD_ON_THROTTLE = True
""" import os
echo.py
import logging from typing import Optional from paperplane.backends.click import _secho logger = logging.getLogger(__name__)
color: Optional[str] = None, fg: Optional[str] = None, bg: Optional[str] = None, bold: Optional[bool] = False, ): if prompt is not None: return _secho(message=prompt, fg=color or fg, bg=bg, bold=bold) else: logger.warning("prompt is None. Nothing to do.")
def run( prompt: str,
parametros.controller.d.ts
export declare class
{ instaladorLicencia(data: any): Promise<{ info: { licencia: number; nombreEmpresa: any; database: any; nombreTienda: any; codigoTienda: any; ultimoTicket: any; botonesConPrecios: any; prohibirBuscarArticulos: any; token: any; }; error: boolean; mensaje?: undefined; } | { error: boolean; mensaje: string; info?: undefined; }>; }
ParametrosController
pytokenizer.py
import six from syntaxerrors import automata from syntaxerrors.parser import Token from syntaxerrors.pytoken import python_opmap_bytes from syntaxerrors.pytoken import tokens from syntaxerrors.error import TokenError, TokenIndentationError from syntaxerrors.pytokenize import tabsize, whiteSpaceDFA, \ triple_quoted, endDFAs, single_quoted, pseudoDFA from syntaxerrors import astconsts NAMECHARS = b'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_' NUMCHARS = b'0123456789' ALNUMCHARS = NAMECHARS + NUMCHARS EXTENDED_ALNUMCHARS = ALNUMCHARS + b'-.' WHITESPACES = b' \t\n\r\v\f' def indexbyte(b, pos): assert isinstance(b, bytes) return six.int2byte(six.indexbytes(b, pos)) def match_encoding_declaration(comment): """returns the declared encoding or None This function is a replacement for : >>> py_encoding = re.compile(r"coding[:=]\s*([-\w.]+)") >>> py_encoding.search(comment) """ index = comment.find(b'coding') if index < 0: return None next_char = indexbyte(comment, index + 6) if next_char not in b':=': return None end_of_decl = comment[index + 7:] index = 0 for i in range(len(end_of_decl)): char = indexbyte(end_of_decl, i) if char not in WHITESPACES: break index += 1 else: return None encoding = b'' for i in range(index, len(end_of_decl)): char = indexbyte(end_of_decl, i) if char in EXTENDED_ALNUMCHARS: encoding += char else: break if encoding != b'': return encoding return None DUMMY_DFA = automata.DFA([], []) def token_decode(token_type, value, lineno, column, line): return Token( token_type, value.decode("utf-8"), lineno, column, line) def generate_tokens(lines, flags): """ This is a rewrite of pypy.module.parser.pytokenize.generate_tokens since the original function is not RPYTHON (uses yield) It was also slightly modified to generate Token instances instead of the original 5-tuples -- it's now a 4-tuple of * the Token instance * the whole line as a string * the line number (the real one, counting continuation lines) * the position on the line of the end of the token. Original docstring :: The generate_tokens() generator requires one argment, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included. """ token_list = [] lnum = continued = 0 namechars = NAMECHARS numchars = NUMCHARS contstr, needcont = '', 0 contline = None indents = [0] last_comment = b'' parenstack = [] # make the annotator happy endDFA = DUMMY_DFA # make the annotator happy line = '' pos = 0 lines.append(b"") strstart = (0, 0, "") for line in lines: assert isinstance(line, bytes) lnum = lnum + 1 line = universal_newline(line) assert isinstance(line, bytes) pos, max = 0, len(line) uni_line = line.decode("utf-8") if contstr: if not line: raise TokenError( "end of file (EOF) while scanning triple-quoted string literal", strstart[2], strstart[0], strstart[1]+1, token_list, lnum-1) endmatch = endDFA.recognize(line) if endmatch >= 0: pos = end = endmatch tok = token_decode(tokens.STRING, contstr + line[:end], strstart[0], strstart[1], uni_line) token_list.append(tok) last_comment = b'' contstr, needcont = '', 0 contline = None elif (needcont and not line.endswith(b'\\\n') and not line.endswith(b'\\\r\n')): tok = token_decode(tokens.ERRORTOKEN, contstr + line, strstart[0], strstart[1], uni_line) token_list.append(tok) last_comment = b'' contstr = '' contline = None continue else: contstr = contstr + line contline = contline + line continue elif not parenstack and not continued: # new statement if not line: break column = 0 while pos < max: # measure leading whitespace if indexbyte(line, pos) == b' ': column = column + 1 elif indexbyte(line, pos) == b'\t': column = (column/tabsize + 1)*tabsize elif indexbyte(line, pos) == b'\f': column = 0 else: break pos = pos + 1 if pos == max: break if indexbyte(line, pos) in b'#\r\n': # skip comments or blank lines continue if column > indents[-1]: # count indents or dedents indents.append(column) token_list.append(token_decode(tokens.INDENT, line[:pos], lnum, 0, uni_line)) last_comment = b'' while column < indents[-1]: indents.pop() token_list.append(token_decode(tokens.DEDENT, b'', lnum, pos, uni_line)) last_comment = b'' if column != indents[-1]: err = "unindent does not match any outer indentation level" raise TokenIndentationError(err, line, lnum, column+1, token_list) else: # continued statement if not line: if parenstack: _, lnum1, start1, line1 = parenstack[0] raise TokenError("parenthesis is never closed", line1, lnum1, start1 + 1, token_list, lnum) raise TokenError("end of file (EOF) in multi-line statement", line, lnum, 0, token_list) # XXX why is the offset 0 here? continued = 0 while pos < max: pseudomatch = pseudoDFA.recognize(line, pos) if pseudomatch >= 0: # scan for tokens # JDR: Modified start = whiteSpaceDFA.recognize(line, pos) if start < 0: start = pos end = pseudomatch if start == end: raise TokenError("Unknown character", line, lnum, start + 1, token_list) pos = end
if initial in numchars or \ (initial == b'.' and token != b'.'): # ordinary number token_list.append(token_decode(tokens.NUMBER, token, lnum, start, uni_line)) last_comment = b'' elif initial in b'\r\n': if not parenstack: tok = token_decode(tokens.NEWLINE, last_comment, lnum, start, uni_line) token_list.append(tok) last_comment = b'' elif initial == b'#': # skip comment last_comment = token elif token in triple_quoted: endDFA = endDFAs[token] endmatch = endDFA.recognize(line, pos) if endmatch >= 0: # all on one line pos = endmatch token = line[start:pos] tok = token_decode(tokens.STRING, token, lnum, start, uni_line) token_list.append(tok) last_comment = b'' else: strstart = (lnum, start, line) contstr = line[start:] contline = line break elif initial in single_quoted or \ token[:2] in single_quoted or \ token[:3] in single_quoted: if indexbyte(token, -1) == b'\n': # continued string strstart = (lnum, start, line) endDFA = (endDFAs[initial] or endDFAs[indexbyte(token, 1)] or endDFAs[indexbyte(token, 2)]) contstr, needcont = line[start:], 1 contline = line break else: # ordinary string tok = token_decode(tokens.STRING, token, lnum, start, uni_line) token_list.append(tok) last_comment = b'' elif initial in namechars: # ordinary name token_list.append(token_decode(tokens.NAME, token, lnum, start, uni_line)) last_comment = b'' elif initial == b'\\': # continued stmt continued = 1 elif initial == '$': token_list.append(Token(tokens.REVDBMETAVAR, token, lnum, start, line)) last_comment = '' else: if initial in b'([{': parenstack.append((initial, lnum, start, line)) elif initial in b')]}': if not parenstack: raise TokenError("unmatched '%s'" % initial.decode("utf-8"), line, lnum, start + 1, token_list) opening, lnum1, start1, line1 = parenstack.pop() if not ((opening == b"(" and initial == b")") or (opening == b"[" and initial == b"]") or (opening == b"{" and initial == b"}")): msg = "closing parenthesis '%s' does not match opening parenthesis '%s'" % ( initial.decode("utf-8"), opening.decode("utf-8")) if lnum1 != lnum: msg += " on line " + str(lnum1) raise TokenError( msg, line, lnum, start + 1, token_list) if token in python_opmap_bytes: punct = python_opmap_bytes[token] else: punct = tokens.OP token_list.append(token_decode(punct, token, lnum, start, uni_line)) last_comment = b'' else: start = whiteSpaceDFA.recognize(line, pos) if start < 0: start = pos if start<max and indexbyte(line, start) in single_quoted: raise TokenError("end of line (EOL) while scanning string literal", line, lnum, start+1, token_list) tok = token_decode(tokens.ERRORTOKEN, indexbyte(line, pos), lnum, pos, uni_line) token_list.append(tok) last_comment = b'' pos = pos + 1 lnum -= 1 if not (flags & astconsts.PyCF_DONT_IMPLY_DEDENT): if token_list and token_list[-1].token_type != tokens.NEWLINE: tok = token_decode(tokens.NEWLINE, b'\n', lnum, 0, u'\n') token_list.append(tok) for indent in indents[1:]: # pop remaining indent levels token_list.append(token_decode(tokens.DEDENT, b'', lnum, pos, uni_line)) tok = token_decode(tokens.NEWLINE, b'\n', lnum, 0, u'\n') token_list.append(tok) token_list.append(token_decode(tokens.ENDMARKER, b'', lnum, pos, uni_line)) return token_list def universal_newline(line): # show annotator that indexes below are non-negative line_len_m2 = len(line) - 2 if line_len_m2 >= 0 and indexbyte(line, -2) == b'\r' and indexbyte(line, -1) == b'\n': return line[:line_len_m2] + b'\n' line_len_m1 = len(line) - 1 if line_len_m1 >= 0 and indexbyte(line, -1) == b'\r': return line[:line_len_m1] + b'\n' return line
token, initial = line[start:end], indexbyte(line, start)
source_lm_feature_extractor.py
import codecs from subprocess import call import os from collections import defaultdict from marmot.features.feature_extractor import FeatureExtractor from marmot.util.ngram_window_extractor import left_context, right_context from marmot.experiment.import_utils import mk_tmp_dir from marmot.exceptions.no_data_error import NoDataError # Class that extracts various LM features for source class SourceLMFeatureExtractor(FeatureExtractor): def __init__(self, ngram_file=None, corpus_file=None, srilm=None, tmp_dir=None, order=5): # generate ngram counts if ngram_file is None: if srilm is None: if 'SRILM' in os.environ: srilm = os.environ['SRILM'] else: print("No SRILM found") return if corpus_file is None: print ("No corpus for LM generation") return srilm_ngram_count = os.path.join(srilm, 'ngram-count') tmp_dir = mk_tmp_dir(tmp_dir) lm_file = os.path.join(tmp_dir, 'lm_file') ngram_file = os.path.join(tmp_dir, 'ngram_count_file') call([srilm_ngram_count, '-text', corpus_file, '-lm', lm_file, '-order', str(order), '-write', ngram_file]) self.lm = defaultdict(int) for line in codecs.open(ngram_file, encoding='utf-8'): chunks = line[:-1].split('\t') if len(chunks) == 2: new_tuple = tuple(chunks[0].split()) new_number = int(chunks[1]) self.lm[new_tuple] = new_number else: print("Wrong ngram-counts file format at line '", line[:-1], "'") self.order = order def check_lm(self, ngram, side='left'): for i in range(self.order, 0, -1): if side == 'left': cur_ngram = ngram[len(ngram)-i:] elif side == 'right': cur_ngram = ngram[:i] else: print("Unknown parameter 'side'", side) return 0 if tuple(cur_ngram) in self.lm: return i return 0 # returns a set of features related to LM # currently extracting: highest order ngram including the word and its LEFT context, # highest order ngram including the word and its RIGHT context def get_features(self, context_obj): if 'source' not in context_obj: raise NoDataError('source', context_obj, 'SourceLMFeatureExtractor') if 'alignments' not in context_obj: raise NoDataError('alignments', context_obj, 'SourceLMFeatureExtractor') align_idx = context_obj['alignments'][context_obj['index']] # unaligned if align_idx is None: return [0, 0] align_token = context_obj['source'][align_idx] left_ngram = left_context(context_obj['source'], align_token, context_size=2, idx=align_idx) + [align_token] right_ngram = [align_token] + right_context(context_obj['source'], align_token, context_size=2, idx=align_idx) left_ngram_order = self.check_lm(left_ngram, side='left') right_ngram_order = self.check_lm(right_ngram, side='right') return [left_ngram_order, right_ngram_order] def get_feature_names(self):
return ['source_highest_order_ngram_left', 'source_highest_order_ngram_right']
plugin.ts
import { getPlugin, getPromise, cordovaWarn, pluginWarn } from './util'; import { checkReady } from './bootstrap'; import { CordovaOptions } from './decorators'; import { Observable } from 'rxjs/Observable'; import 'rxjs/add/observable/fromEvent'; checkReady(); // declare const window; // declare var Promise; export const ERR_CORDOVA_NOT_AVAILABLE = { error: 'cordova_not_available' }; export const ERR_PLUGIN_NOT_INSTALLED = { error: 'plugin_not_installed' }; /** * Checks if plugin/cordova is available * @return {boolean | { error: string } } * @private */ export function checkAvailability(pluginRef: string, methodName?: string, pluginName?: string): boolean | { error: string }; export function checkAvailability(pluginObj: any, methodName?: string, pluginName?: string): boolean | { error: string }; export function checkAvailability(plugin: any, methodName?: string, pluginName?: string): boolean | { error: string } { let pluginRef, pluginInstance, pluginPackage; if (typeof plugin === 'string') { pluginRef = plugin; } else { pluginRef = plugin.constructor.getPluginRef(); pluginName = plugin.constructor.getPluginName(); pluginPackage = plugin.constructor.getPluginInstallName(); } pluginInstance = getPlugin(pluginRef); if (!pluginInstance || (!!methodName && typeof pluginInstance[methodName] === 'undefined')) { if (!window.cordova) { cordovaWarn(pluginName, methodName); return ERR_CORDOVA_NOT_AVAILABLE; } pluginWarn(pluginName, pluginPackage, methodName); return ERR_PLUGIN_NOT_INSTALLED; } return true;
/** * Checks if _objectInstance exists and has the method/property * @private */ export function instanceAvailability(pluginObj: any, methodName?: string): boolean { return pluginObj._objectInstance && (!methodName || typeof pluginObj._objectInstance[methodName] !== 'undefined'); } function setIndex(args: any[], opts: any = {}, resolve?: Function, reject?: Function): any { // ignore resolve and reject in case sync if (opts.sync) { return args; } // If the plugin method expects myMethod(success, err, options) if (opts.callbackOrder === 'reverse') { // Get those arguments in the order [resolve, reject, ...restOfArgs] args.unshift(reject); args.unshift(resolve); } else if (opts.callbackStyle === 'node') { args.push((err: any, result: any) => { if (err) { reject(err); } else { resolve(result); } }); } else if (opts.callbackStyle === 'object' && opts.successName && opts.errorName) { let obj: any = {}; obj[opts.successName] = resolve; obj[opts.errorName] = reject; args.push(obj); } else if (typeof opts.successIndex !== 'undefined' || typeof opts.errorIndex !== 'undefined') { const setSuccessIndex = () => { // If we've specified a success/error index if (opts.successIndex > args.length) { args[opts.successIndex] = resolve; } else { args.splice(opts.successIndex, 0, resolve); } }; const setErrorIndex = () => { // We don't want that the reject cb gets spliced into the position of an optional argument that has not been defined and thus causing non expected behaviour. if (opts.errorIndex > args.length) { args[opts.errorIndex] = reject; // insert the reject fn at the correct specific index } else { args.splice(opts.errorIndex, 0, reject); // otherwise just splice it into the array } }; if (opts.successIndex > opts.errorIndex) { setErrorIndex(); setSuccessIndex(); } else { setSuccessIndex(); setErrorIndex(); } } else { // Otherwise, let's tack them on to the end of the argument list // which is 90% of cases args.push(resolve); args.push(reject); } return args; } function callCordovaPlugin(pluginObj: any, methodName: string, args: any[], opts: any = {}, resolve?: Function, reject?: Function) { // Try to figure out where the success/error callbacks need to be bound // to our promise resolve/reject handlers. args = setIndex(args, opts, resolve, reject); const availabilityCheck = checkAvailability(pluginObj, methodName); if (availabilityCheck === true) { const pluginInstance = getPlugin(pluginObj.constructor.getPluginRef()); return pluginInstance[methodName].apply(pluginInstance, args); } else { return availabilityCheck; } } function wrapPromise(pluginObj: any, methodName: string, args: any[], opts: any = {}) { let pluginResult: any, rej: Function; const p = getPromise((resolve: Function, reject: Function) => { pluginResult = callCordovaPlugin(pluginObj, methodName, args, opts, resolve, reject); rej = reject; }); // Angular throws an error on unhandled rejection, but in this case we have already printed // a warning that Cordova is undefined or the plugin is uninstalled, so there is no reason // to error if (pluginResult && pluginResult.error) { p.catch(() => { }); typeof rej === 'function' && rej(pluginResult.error); } return p; } function wrapOtherPromise(pluginObj: any, methodName: string, args: any[], opts: any = {}) { return getPromise((resolve: Function, reject: Function) => { const pluginResult = callCordovaPlugin(pluginObj, methodName, args, opts); if (pluginResult) { if (pluginResult.error) { reject(pluginResult.error); } else if (pluginResult.then) { pluginResult.then(resolve).catch(reject); } } else { reject({ error: 'unexpected_error' }); } }); } function wrapObservable(pluginObj: any, methodName: string, args: any[], opts: any = {}) { return new Observable(observer => { let pluginResult = callCordovaPlugin(pluginObj, methodName, args, opts, observer.next.bind(observer), observer.error.bind(observer)); if (pluginResult && pluginResult.error) { observer.error(pluginResult.error); observer.complete(); } return () => { try { if (opts.clearFunction) { if (opts.clearWithArgs) { return callCordovaPlugin(pluginObj, opts.clearFunction, args, opts, observer.next.bind(observer), observer.error.bind(observer)); } return callCordovaPlugin(pluginObj, opts.clearFunction, []); } } catch (e) { console.warn('Unable to clear the previous observable watch for', pluginObj.constructor.getPluginName(), methodName); console.warn(e); } }; }); } function callInstance(pluginObj: any, methodName: string, args: any[], opts: any = {}, resolve?: Function, reject?: Function) { args = setIndex(args, opts, resolve, reject); if (instanceAvailability(pluginObj, methodName)) { return pluginObj._objectInstance[methodName].apply(pluginObj._objectInstance, args); } } /** * Wrap the event with an observable * @private * @param event even name * @param element The element to attach the event listener to * @returns {Observable} */ export function wrapEventObservable(event: string, element: any = window): Observable<any> { return Observable.fromEvent(element, event); } /** * Certain plugins expect the user to override methods in the plugin. For example, * window.cordova.plugins.backgroundMode.onactivate = function() { ... }. * * Unfortunately, this is brittle and would be better wrapped as an Observable. overrideFunction * does just this. * @private */ export function overrideFunction(pluginObj: any, methodName: string, args: any[], opts: any = {}): Observable<any> { return new Observable(observer => { const availabilityCheck = checkAvailability(pluginObj, methodName); if (availabilityCheck === true) { const pluginInstance = getPlugin(pluginObj.constructor.getPluginRef()); pluginInstance[methodName] = observer.next.bind(observer); return () => pluginInstance[methodName] = () => { }; } else { observer.error(availabilityCheck); observer.complete(); } }); } /** * @private */ export const wrap = function(pluginObj: any, methodName: string, opts: CordovaOptions = {}) { return (...args: any[]) => { if (opts.sync) { // Sync doesn't wrap the plugin with a promise or observable, it returns the result as-is return callCordovaPlugin(pluginObj, methodName, args, opts); } else if (opts.observable) { return wrapObservable(pluginObj, methodName, args, opts); } else if (opts.eventObservable && opts.event) { return wrapEventObservable(opts.event, opts.element); } else if (opts.otherPromise) { return wrapOtherPromise(pluginObj, methodName, args, opts); } else { return wrapPromise(pluginObj, methodName, args, opts); } }; }; /** * @private */ export function wrapInstance(pluginObj: any, methodName: string, opts: any = {}) { return (...args: any[]) => { if (opts.sync) { return callInstance(pluginObj, methodName, args, opts); } else if (opts.observable) { return new Observable(observer => { let pluginResult = callInstance(pluginObj, methodName, args, opts, observer.next.bind(observer), observer.error.bind(observer)); if (pluginResult && pluginResult.error) { observer.error(pluginResult.error); observer.complete(); } return () => { try { if (opts.clearWithArgs) { return callInstance(pluginObj, opts.clearFunction, args, opts, observer.next.bind(observer), observer.error.bind(observer)); } return callInstance(pluginObj, opts.clearFunction, []); } catch (e) { console.warn('Unable to clear the previous observable watch for', pluginObj.constructor.getPluginName(), methodName); console.warn(e); } }; }); } else if (opts.otherPromise) { return getPromise((resolve: Function, reject: Function) => { let result = callInstance(pluginObj, methodName, args, opts, resolve, reject); if (result && !!result.then) { result.then(resolve, reject); } else { reject(); } }); } else { return getPromise((resolve: Function, reject: Function) => callInstance(pluginObj, methodName, args, opts, resolve, reject)); } }; }
}
aabb.rs
//axis aligned bounding box use crate::ray::Ray; use crate::vec3::*; #[derive(Debug, Copy, Clone)] pub struct AABB { min: Point, max: Point, } impl AABB { pub fn new_e() -> AABB { AABB { min: Point::new_e(), max: Point::new_e(), } } pub fn new(a: &Point, b: &Point) -> AABB { AABB { min: *a, max: *b } } pub fn min(&self) -> Point { self.min } pub fn max(&self) -> Point
pub fn hit(&self, r: &Ray, tmin: f64, tmax: f64) -> bool { for a in 0..3 { let t0 = f64::min( (self.min[a] - r.origin()[a]) / r.direction()[a], (self.max[a] - r.origin()[a]) / r.direction()[a], ); let t1 = f64::max( (self.min[a] - r.origin()[a]) / r.direction()[a], (self.max[a] - r.origin()[a]) / r.direction()[a], ); let tmin = f64::max(t0, tmin); let tmax = f64::min(t1, tmax); if tmax <= tmin { return false; } } true } } pub fn surrounding_box(box0: &AABB, box1: &AABB) -> AABB { let small = Point::new( f64::min(box0.min().x(), box1.min().x()), f64::min(box0.min().y(), box1.min().y()), f64::min(box0.min().z(), box1.min().z()), ); let big = Point::new( f64::max(box0.max().x(), box1.max().x()), f64::max(box0.max().y(), box1.max().y()), f64::max(box0.max().z(), box1.max().z()), ); AABB::new(&small, &big) }
{ self.max }
ticket.shema.ts
import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose'; import { Document } from 'mongoose'; export type TicketsDocument = Tickets & Document; @Schema() export class
{ @Prop() id: string; @Prop() name: string; @Prop() cost: number; } export const TicketsSchema = SchemaFactory.createForClass(Tickets);
Tickets
unicode.rs
use crate::utils::{is_allowed, snippet, span_lint_and_sugg}; use rustc::hir::*; use rustc::lint::{LateContext, LateLintPass, LintArray, LintPass}; use rustc::{declare_lint_pass, declare_tool_lint}; use rustc_errors::Applicability; use syntax::ast::LitKind; use syntax::source_map::Span; use unicode_normalization::UnicodeNormalization; declare_clippy_lint! { /// **What it does:** Checks for the Unicode zero-width space in the code. /// /// **Why is this bad?** Having an invisible character in the code makes for all /// sorts of April fools, but otherwise is very much frowned upon. /// /// **Known problems:** None. /// /// **Example:** You don't see it, but there may be a zero-width space /// somewhere in this text. pub ZERO_WIDTH_SPACE, correctness, "using a zero-width space in a string literal, which is confusing" } declare_clippy_lint! { /// **What it does:** Checks for non-ASCII characters in string literals. /// /// **Why is this bad?** Yeah, we know, the 90's called and wanted their charset /// back. Even so, there still are editors and other programs out there that /// don't work well with Unicode. So if the code is meant to be used /// internationally, on multiple operating systems, or has other portability /// requirements, activating this lint could be useful. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// let x = String::from("€"); /// ``` /// Could be written as: /// ```rust /// let x = String::from("\u{20ac}"); /// ``` pub NON_ASCII_LITERAL, pedantic, "using any literal non-ASCII chars in a string literal instead of using the `\\u` escape" } declare_clippy_lint! { /// **What it does:** Checks for string literals that contain Unicode in a form /// that is not equal to its /// [NFC-recomposition](http://www.unicode.org/reports/tr15/#Norm_Forms). /// /// **Why is this bad?** If such a string is compared to another, the results /// may be surprising. /// /// **Known problems** None. /// /// **Example:** You may not see it, but "à"" and "à"" aren't the same string. The /// former when escaped is actually `"a\u{300}"` while the latter is `"\u{e0}"`. pub UNICODE_NOT_NFC, pedantic, "using a Unicode literal not in NFC normal form (see [Unicode tr15](http://www.unicode.org/reports/tr15/) for further information)" } declare_lint_pass!(Unicode => [ZERO_WIDTH_SPACE, NON_ASCII_LITERAL, UNICODE_NOT_NFC]); impl LateLintPass<'_, '_> for Unicode { fn check_expr(&mut self, cx: &LateContext<'_, '_>, expr: &'_ Expr) { if let ExprKind::Lit(ref lit) = expr.node { if let LitKind::Str(_, _) = lit.node { check_str(cx, lit.span, expr.hir_id) } } } } fn escape<T: Iterator<Item = char>>(s: T) -> String { let mut result = String::new(); for c in s { if c as u32 > 0x7F { for d in c.escape_unicode() { result.push(d) } } else { result.push(c); } } result } fn check_str(cx: &LateContext<'_, '_>, span: Span, id: HirId) {
let string = snippet(cx, span, ""); if string.contains('\u{200B}') { span_lint_and_sugg( cx, ZERO_WIDTH_SPACE, span, "zero-width space detected", "consider replacing the string with", string.replace("\u{200B}", "\\u{200B}"), Applicability::MachineApplicable, ); } if string.chars().any(|c| c as u32 > 0x7F) { span_lint_and_sugg( cx, NON_ASCII_LITERAL, span, "literal non-ASCII character detected", "consider replacing the string with", if is_allowed(cx, UNICODE_NOT_NFC, id) { escape(string.chars()) } else { escape(string.nfc()) }, Applicability::MachineApplicable, ); } if is_allowed(cx, NON_ASCII_LITERAL, id) && string.chars().zip(string.nfc()).any(|(a, b)| a != b) { span_lint_and_sugg( cx, UNICODE_NOT_NFC, span, "non-NFC Unicode sequence detected", "consider replacing the string with", string.nfc().collect::<String>(), Applicability::MachineApplicable, ); } }
v1_function_index.rs
use std::convert::TryFrom; use num_derive::{FromPrimitive, ToPrimitive}; use num_traits::{FromPrimitive, ToPrimitive}; #[derive(Debug, PartialEq, FromPrimitive, ToPrimitive)] #[repr(usize)] pub enum FunctionIndex { WriteFuncIndex, WriteLocalFuncIndex, ReadFuncIndex, ReadLocalFuncIndex, AddFuncIndex, AddLocalFuncIndex, NewFuncIndex, RetFuncIndex, CallContractFuncIndex, GetArgFuncIndex, GetKeyFuncIndex, GasFuncIndex, HasKeyFuncIndex, PutKeyFuncIndex, StoreFnIndex, StoreFnAtHashIndex, IsValidURefFnIndex, RevertFuncIndex, AddAssociatedKeyFuncIndex, RemoveAssociatedKeyFuncIndex, UpdateAssociatedKeyFuncIndex, SetActionThresholdFuncIndex, LoadNamedKeysFuncIndex, RemoveKeyFuncIndex, GetCallerIndex, GetBlocktimeIndex, CreatePurseIndex, TransferToAccountIndex, TransferFromPurseToAccountIndex, TransferFromPurseToPurseIndex, GetBalanceIndex, GetPhaseIndex, UpgradeContractAtURefIndex, GetSystemContractIndex, GetMainPurseIndex, GetArgSizeFuncIndex, ReadHostBufferIndex, } impl Into<usize> for FunctionIndex { fn into(self) -> usize { // NOTE: This can't fail as `FunctionIndex` is represented by usize, // so this serves mostly as a syntax sugar. self.to_usize().unwrap() } } impl TryFrom<usize> for FunctionIndex { type Error = &'static str; fn
(value: usize) -> Result<Self, Self::Error> { FromPrimitive::from_usize(value).ok_or("Invalid function index") } } #[cfg(test)] mod tests { use super::FunctionIndex; use std::convert::TryFrom; #[test] fn primitive_to_enum() { FunctionIndex::try_from(19).expect("Unable to create enum from number"); } #[test] fn enum_to_primitive() { let element = FunctionIndex::UpdateAssociatedKeyFuncIndex; let _primitive: usize = element.into(); } #[test] fn invalid_index() { assert!(FunctionIndex::try_from(123_456_789usize).is_err()); } }
try_from
audio.js
var myAudio = new Audio(); myAudio.src = "music.mp3";
chrome.browserAction.onClicked.addListener(function(event){ //open a new tab on Click, choose another song/high quality of current if (playing === "yes"){ chrome.tabs.create({url: "chrome://newtab"}) myAudio.currentTime=0; myAudio.play(); playing = "no"; }else{ myAudio.pause(); playing = "yes"; } })
var playing = "yes";
train_MIL_classification_trained_cnn_models.py
# Run MIL classification use pretrained CNN models # Reference: 1.Campanella, G. et al. Clinical-grade computational pathology using weakly supervised # deep learning on whole slide images. Nat Med 25, 1301–1309 (2019). # doi:10.1038/s41591-019-0508-1. Available from http://www.nature.com/articles/s41591-019-0508-1 # The source codes of the referenced paper available at https://github.com/MSKCC-Computational-Pathology/MIL-nature-medicine-2019 # This code was modified by Shengjia Chen for our work. import argparse import os import random import sys from pathlib import Path from types import SimpleNamespace from typing import Callable, Optional, Union from urllib.error import HTTPError import glob import numpy as np import pandas as pd import pytorch_lightning as pl import torch import torch.backends.cudnn as cudnn import torch.nn as nn import torch.nn.functional as F import torchvision.models as models from PIL import Image from pytorch_lightning.callbacks import (EarlyStopping, LearningRateMonitor, ModelCheckpoint) from pytorch_lightning.lite import LightningLite from pytorch_lightning.loops import Loop from skimage import io from sklearn.preprocessing import LabelEncoder from torch.utils.data import DataLoader, Dataset from torch.utils.tensorboard import SummaryWriter from torchvision import transforms from tqdm import tqdm sys.path.append('/gpfs/scratch/sc9295/digPath/MSI_vs_MSS_Classification/Step1_Training_MSI_MSS') from train_tile_level_classification import MSI_MSS_Module from sklearn.metrics import (auc, confusion_matrix, f1_score, roc_auc_score, roc_curve) best_acc = 0 def inference(loader, model): model.eval() probs = torch.FloatTensor(len(loader.dataset)) with torch.no_grad(): for i, input in enumerate(loader): # print( # 'Inference\tEpoch: [{}/{}]\tBatch: [{}/{}]'.format(run+1, args.nepochs, i+1, len(loader))) output = F.softmax(model(input), dim=1) probs[i*args.batch_size:i*args.batch_size + input.size(0)] = output.detach()[:, 1].clone() return probs.cpu().numpy() def train(run, loader, model, criterion, optimizer): mo
def calc_err(pred, real): pred = np.array(pred) real = np.array(real) pos = np.equal(pred, real) neq = np.not_equal(pred, real) acc = float(pos.sum())/pred.shape[0] err = float(neq.sum())/pred.shape[0] fpr = float(np.logical_and(pred == 1, neq).sum())/(real == 0).sum() fnr = float(np.logical_and(pred == 0, neq).sum())/(real == 1).sum() return acc, err, fpr, fnr def group_argtopk(groups, data, k=1): # groups in slide, data is prob of each tile k = min(k,len(data)) order = np.lexsort((data, groups)) groups = groups[order] data = data[order] index = np.empty(len(groups), 'bool') index[-k:] = True index[:-k] = groups[k:] != groups[:-k] return list(order[index]) # output top prob tile index in each slide def group_max(groups, data, nmax): out = np.empty(nmax) out[:] = np.nan order = np.lexsort((data, groups)) groups = groups[order] data = data[order] index = np.empty(len(groups), 'bool') index[-1] = True index[:-1] = groups[1:] != groups[:-1] out[groups[index]] = data[index] return out class MILdataset(Dataset): def __init__(self, libraryfile_dir='', root_dir='', dataset_mode='Train', transform=None, subset_rate=None): libraryfile_path = os.path.join( libraryfile_dir, f'CRC_DX_{dataset_mode}_ALL.csv') lib = pd.read_csv(libraryfile_path) lib = lib if subset_rate is None else lib.sample( frac=subset_rate, random_state=2022) lib = lib.sort_values(['subject_id'], ignore_index=True) lib.to_csv(os.path.join(libraryfile_dir, f'{dataset_mode}_temporary.csv')) slides = [] for i, name in enumerate(lib['subject_id'].unique()): # sys.stdout.write( # 'Slides: [{}/{}]\r'.format(i+1, len(lib['subject_id'].unique()))) # sys.stdout.flush() slides.append(name) # Flatten grid grid = [] slideIDX = [] for i, g in enumerate(lib['subject_id'].unique()): tiles = lib[lib['subject_id'] == g]['slice_id'] grid.extend(tiles) slideIDX.extend([i]*len(tiles)) # print('Number of tiles: {}'.format(len(grid))) self.dataframe = self.load_data_and_get_class(lib) self.slidenames = list(lib['subject_id'].values) self.slides = slides self.targets = self.dataframe['Class'] self.grid = grid self.slideIDX = slideIDX self.transform = transform self.root_dir = root_dir self.dset = f"CRC_DX_{dataset_mode}" def setmode(self, mode): self.mode = mode def maketraindata(self, idxs): self.t_data = [(self.slideIDX[x], self.grid[x], self.targets[x]) for x in idxs] def shuffletraindata(self): self.t_data = random.sample(self.t_data, len(self.t_data)) def load_data_and_get_class(self, df): df.loc[df['label'] == 'MSI', 'Class'] = 1 df.loc[df['label'] == 'MSS', 'Class'] = 0 return df def __getitem__(self, index): if self.mode == 1: slideIDX = self.slideIDX[index] tile_id = self.grid[index] slide_id = self.slides[slideIDX] img_name = "blk-{}-{}.png".format(tile_id, slide_id) target = self.targets[index] label = 'CRC_DX_MSIMUT' if target == 1 else 'CRC_DX_MSS' img_path = os.path.join(self.root_dir, self.dset, label, img_name) img = io.imread(img_path) if self.transform is not None: img = self.transform(img) return img elif self.mode == 2: slideIDX, tile_id, target = self.t_data[index] slide_id = self.slides[slideIDX] label = 'CRC_DX_MSIMUT' if target == 1 else 'CRC_DX_MSS' img_name = "blk-{}-{}.png".format(tile_id, slide_id) img_path = os.path.join(self.root_dir, self.dset, label, img_name) img = io.imread(img_path) if self.transform is not None: img = self.transform(img) return img, target def __len__(self): if self.mode == 1: return len(self.grid) elif self.mode == 2: return len(self.t_data) class Lite(LightningLite): def run(self, args): global best_acc print(args) self.seed_everything(2022) model_name = args.model_name sample_rate = args.sample_rate ckpt_path = os.path.join(args.model_path, f'{args.model_name}_bs{args.batch_size}_lr{args.learning_rate}') ckpt_file_path = glob.glob(os.path.join(ckpt_path,'*.ckpt'))[0] model = MSI_MSS_Module.load_from_checkpoint(ckpt_file_path) optimizer = torch.optim.AdamW( model.parameters(), lr=args.learning_rate, weight_decay=1e-4) if args.weights == 0.5: criterion = nn.CrossEntropyLoss() else: w = torch.Tensor([1-args.weights, args.weights]) criterion = nn.CrossEntropyLoss(w) # Scale model and optimizers model, optimizer = self.setup(model, optimizer, move_to_device=True) DATA_MEANS = [0.485, 0.456, 0.406] DATA_STD = [0.229, 0.224, 0.225] train_transform = transforms.Compose([ transforms.ToPILImage(), transforms.ToTensor(), transforms.RandomHorizontalFlip(), transforms.Normalize(DATA_MEANS, DATA_STD)]) test_transform = transforms.Compose([ transforms.ToPILImage(), transforms.ToTensor(), transforms.Normalize(DATA_MEANS, DATA_STD)]) train_dataset = MILdataset( args.lib_dir, args.root_dir, 'Train', transform=train_transform, subset_rate=sample_rate) val_dataset = MILdataset( args.lib_dir, args.root_dir, 'Val', transform=test_transform, subset_rate=sample_rate) test_dataset = MILdataset( args.lib_dir, args.root_dir, 'Test', transform=test_transform, subset_rate=sample_rate) train_dataloader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True) val_dataloader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True) test_dataloader = DataLoader(test_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers, pin_memory=True) train_dataloader, val_dataloader, test_dataloader = self.setup_dataloaders( train_dataloader, val_dataloader, test_dataloader, move_to_device=True) # open output file version_name = f'MIL_{model_name}_bs{args.batch_size}_lr{args.learning_rate}_w{args.weights}_k{args.k}_output' # logger output_path = os.path.join(args.output_path,version_name) writer = SummaryWriter(output_path) for epoch in tqdm(range(args.nepochs)): train_dataset.setmode(1) # print("train_set_len:", len(train_dataloader.dataset)) probs = inference(train_dataloader, model) # return the indices of topk tile(s) in each slides topk = group_argtopk( np.array(train_dataset.slideIDX), probs, args.k) train_dataset.maketraindata(topk) train_dataset.shuffletraindata() train_dataset.setmode(2) model.train() running_loss = 0. for i, (input, target) in enumerate(train_dataloader): output = model(input) loss = criterion(output, target.long()) optimizer.zero_grad() self.backward(loss) optimizer.step() running_loss += loss.item()*input.size(0) train_loss = running_loss/len(train_dataloader.dataset) print( 'Training\tEpoch: [{}/{}]\tLoss: {}'.format(epoch+1, args.nepochs, train_loss)) writer.add_scalar('train_loss', train_loss, epoch+1) # Validation if (epoch+1) % args.test_every == 0: val_dataset.setmode(1) probs = inference(val_dataloader, model) maxs = group_max(np.array(val_dataset.slideIDX), probs, len(val_dataset.targets)) pred = [1 if x >= 0.5 else 0 for x in probs] val_acc, err, fpr, fnr = calc_err(pred, val_dataset.targets) print('Validation\tEpoch: [{}/{}]\t ACC: {}\tError: {}\tFPR: {}\tFNR: {}'.format( epoch+1, args.nepochs, val_acc, err, fpr, fnr)) writer.add_scalar('val_acc', val_acc, epoch+1) writer.add_scalar('fpr', fpr, epoch+1) writer.add_scalar('fnr', fnr, epoch+1) # Save best model err = (fpr+fnr)/2. if 1-err >= best_acc: best_acc = 1-err obj = { 'epoch': epoch+1, 'state_dict': model.state_dict(), 'best_acc': best_acc, 'optimizer': optimizer.state_dict() } torch.save(obj, os.path.join(output_path, 'checkpoint_best.pth')) # test ch = torch.load(os.path.join(output_path,'checkpoint_best.pth')) # load params model.load_state_dict(ch['state_dict']) model = model.cuda() cudnn.benchmark = True train_dataset.setmode(1) val_dataset.setmode(1) test_dataset.setmode(1) # Train probs = inference(train_dataloader, model) maxs = group_max(np.array(train_dataset.slideIDX), probs, len(train_dataset.targets)) fp = open(os.path.join(output_path, f'Train_{version_name}.csv'), 'w') fp.write('slides,tiles,target,prediction,probability\n') for slides, tiles, target, prob in zip(train_dataset.slidenames, train_dataset.grid, train_dataset.targets, probs): fp.write('{},{},{},{},{}\n'.format(slides, tiles, target, int(prob>=0.5), prob)) fp.close() # Val probs = inference(val_dataloader, model) maxs = group_max(np.array(val_dataset.slideIDX), probs, len(val_dataset.targets)) fp = open(os.path.join(output_path, f'Val_{version_name}.csv'), 'w') fp.write('slides,tiles,target,prediction,probability\n') for slides, tiles, target, prob in zip(val_dataset.slidenames, val_dataset.grid, val_dataset.targets, probs): fp.write('{},{},{},{},{}\n'.format(slides, tiles, target, int(prob>=0.5), prob)) fp.close() # Test probs = inference(test_dataloader, model) maxs = group_max(np.array(test_dataset.slideIDX), probs, len(test_dataset.targets)) fp = open(os.path.join(output_path, f'Test_{version_name}.csv'), 'w') fp.write('slides,tiles,target,prediction,probability\n') for slides, tiles, target, prob in zip(test_dataset.slidenames, test_dataset.grid, test_dataset.targets, probs): fp.write('{},{},{},{},{}\n'.format(slides, tiles, target, int(prob>=0.5), prob)) fp.close() pred = [1 if x >= 0.5 else 0 for x in probs] test_acc, err, fnr, fpr = calc_err(pred, test_dataset.targets) test_f1_score = f1_score(test_dataset.targets, pred, average='binary') try: test_auroc_score = roc_auc_score(test_dataset.targets, probs) writer.add_scalar("test_auroc_score", test_auroc_score) except ValueError: writer.add_scalar('test_auroc_score', .0) writer.add_scalar('test_f1_score', test_f1_score) writer.add_scalar('test_acc', test_acc) def main(args): Lite(devices="auto", accelerator="auto").run(args) if __name__ == "__main__": parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( "--root_dir", type=Path, required=True, help="root directory of dataset", ) parser.add_argument( "--lib_dir", type=Path, required=True, help="root directory of libraryfile", ) parser.add_argument( "--model_path", type=Path, required=True, help="root directory of pretrained models", ) parser.add_argument( "--output_path", type=Path, required=True, help="output directory", ) parser.add_argument( "--model_name", default='alexnet', choices=('resnet18', 'resnet34', 'alexnet', 'vgg', 'squeezenet', 'densenet', 'inception'), type=str, help="model use for train", ) parser.add_argument( "--sample_rate", default=1, type=float, help="undersample rate", ) parser.add_argument( "--batch_size", default=128, type=int, help="batch size", ) parser.add_argument( "--learning_rate", default=1e-3, type=float, help="learning rate", ) parser.add_argument( "--num_workers", default=0, type=int, required=True, help="number of workers", ) parser.add_argument( "--nepochs", default=50, type=int, help="training epoch", ) parser.add_argument( '--test_every', default=1, type=int, help='test on val every (default: 10)') parser.add_argument( "--weights", default=0.5, type=float, help="unbalanced positive class weight (default: 0.5, balanced classes)", ) parser.add_argument( "--k", default=1, type=int, help="top k tiles are assumed to be of the same class as the slide (default: 1, standard MIL)", ) args = parser.parse_args() main(args)
del.train() running_loss = 0. for i, (input, target) in enumerate(loader): input = input.cuda() target = target.cuda() output = model(input) loss = criterion(output, target) optimizer.zero_grad() loss.backward() optimizer.step() running_loss += loss.item()*input.size(0) return running_loss/len(loader.dataset)
papertrail-test.js
/* * papertrail-test.js: Tests for instances of the Papertrail transport * * (C) 2012 Ken Perkins * MIT LICENSE * */ // TODO still some work to get these working... var path = require('path'), vows = require('vows'), assert = require('assert'), winston = require('winston'), helpers = require('winston/test/helpers'), Papertrail = require('../lib/winston-papertrail').Papertrail; function
(transport) { assert.instanceOf(transport, Papertrail); assert.isFunction(transport.log); }; var transport = new Papertrail({host: 'localhost', port: 12345}); vows.describe('winston-papertrail').addBatch({ "An instance of the Papertrail Transport": { "should have the proper methods defined": function () { assertPapertrail(transport); }, "the log() method": helpers.testNpmLevels(transport, "should log messages to papertrail", function (ign, err, meta, result) { assert.isTrue(!err); assert.isObject(result); }) } }).export(module);
assertPapertrail
impls_ty.rs
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This module contains `HashStable` implementations for various data types //! from rustc::ty in no particular order. use ich::{StableHashingContext, NodeIdHashingMode}; use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey, StableHasher, StableHasherResult}; use std::hash as std_hash; use std::mem; use middle::region; use traits; use ty; impl<'gcx, T> HashStable<StableHashingContext<'gcx>> for &'gcx ty::Slice<T> where T: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { (&self[..]).hash_stable(hcx, hasher); } } impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::subst::Kind<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { self.as_type().hash_stable(hcx, hasher); self.as_region().hash_stable(hcx, hasher); } } impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::RegionKind { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::ReErased | ty::ReStatic | ty::ReEmpty => { // No variant fields to hash for these ... } ty::ReLateBound(db, ty::BrAnon(i)) => { db.depth.hash_stable(hcx, hasher); i.hash_stable(hcx, hasher); } ty::ReLateBound(db, ty::BrNamed(def_id, name)) => { db.depth.hash_stable(hcx, hasher); def_id.hash_stable(hcx, hasher); name.hash_stable(hcx, hasher); } ty::ReLateBound(db, ty::BrEnv) => { db.depth.hash_stable(hcx, hasher); } ty::ReEarlyBound(ty::EarlyBoundRegion { def_id, index, name }) => { def_id.hash_stable(hcx, hasher); index.hash_stable(hcx, hasher); name.hash_stable(hcx, hasher); } ty::ReScope(scope) => { scope.hash_stable(hcx, hasher); } ty::ReFree(ref free_region) => { free_region.hash_stable(hcx, hasher); } ty::ReLateBound(..) | ty::ReVar(..) | ty::ReSkolemized(..) => { bug!("TypeIdHasher: unexpected region {:?}", *self) } } } } impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::adjustment::AutoBorrow<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::adjustment::AutoBorrow::Ref(ref region, mutability) => { region.hash_stable(hcx, hasher); mutability.hash_stable(hcx, hasher); } ty::adjustment::AutoBorrow::RawPtr(mutability) => { mutability.hash_stable(hcx, hasher); } } } } impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::adjustment::Adjust<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::adjustment::Adjust::NeverToAny | ty::adjustment::Adjust::ReifyFnPointer | ty::adjustment::Adjust::UnsafeFnPointer | ty::adjustment::Adjust::ClosureFnPointer | ty::adjustment::Adjust::MutToConstPointer | ty::adjustment::Adjust::Unsize => {} ty::adjustment::Adjust::Deref(ref overloaded) => { overloaded.hash_stable(hcx, hasher); } ty::adjustment::Adjust::Borrow(ref autoref) => { autoref.hash_stable(hcx, hasher); } } } } impl_stable_hash_for!(struct ty::adjustment::Adjustment<'tcx> { kind, target }); impl_stable_hash_for!(struct ty::adjustment::OverloadedDeref<'tcx> { region, mutbl }); impl_stable_hash_for!(struct ty::UpvarBorrow<'tcx> { kind, region }); impl_stable_hash_for!(struct ty::UpvarId { var_id, closure_expr_id }); impl_stable_hash_for!(enum ty::BorrowKind { ImmBorrow, UniqueImmBorrow, MutBorrow }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::UpvarCapture<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::UpvarCapture::ByValue => {} ty::UpvarCapture::ByRef(ref up_var_borrow) => { up_var_borrow.hash_stable(hcx, hasher); } } } } impl_stable_hash_for!(struct ty::GenSig<'tcx> { yield_ty, return_ty }); impl_stable_hash_for!(struct ty::FnSig<'tcx> { inputs_and_output, variadic, unsafety, abi }); impl<'gcx, T> HashStable<StableHashingContext<'gcx>> for ty::Binder<T> where T: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let ty::Binder(ref inner) = *self; inner.hash_stable(hcx, hasher); } } impl_stable_hash_for!(enum ty::ClosureKind { Fn, FnMut, FnOnce }); impl_stable_hash_for!(enum ty::Visibility { Public, Restricted(def_id), Invisible }); impl_stable_hash_for!(struct ty::TraitRef<'tcx> { def_id, substs }); impl_stable_hash_for!(struct ty::TraitPredicate<'tcx> { trait_ref }); impl_stable_hash_for!(tuple_struct ty::EquatePredicate<'tcx> { t1, t2 }); impl_stable_hash_for!(struct ty::SubtypePredicate<'tcx> { a_is_expected, a, b }); impl<'gcx, A, B> HashStable<StableHashingContext<'gcx>> for ty::OutlivesPredicate<A, B> where A: HashStable<StableHashingContext<'gcx>>, B: HashStable<StableHashingContext<'gcx>>, { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let ty::OutlivesPredicate(ref a, ref b) = *self; a.hash_stable(hcx, hasher); b.hash_stable(hcx, hasher); } } impl_stable_hash_for!(struct ty::ProjectionPredicate<'tcx> { projection_ty, ty }); impl_stable_hash_for!(struct ty::ProjectionTy<'tcx> { substs, item_def_id }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::Predicate<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::Predicate::Trait(ref pred) => { pred.hash_stable(hcx, hasher); } ty::Predicate::Equate(ref pred) => { pred.hash_stable(hcx, hasher); } ty::Predicate::Subtype(ref pred) => { pred.hash_stable(hcx, hasher); } ty::Predicate::RegionOutlives(ref pred) => { pred.hash_stable(hcx, hasher); } ty::Predicate::TypeOutlives(ref pred) => { pred.hash_stable(hcx, hasher); } ty::Predicate::Projection(ref pred) => { pred.hash_stable(hcx, hasher); } ty::Predicate::WellFormed(ty) => { ty.hash_stable(hcx, hasher); } ty::Predicate::ObjectSafe(def_id) => { def_id.hash_stable(hcx, hasher); } ty::Predicate::ClosureKind(def_id, closure_kind) => { def_id.hash_stable(hcx, hasher); closure_kind.hash_stable(hcx, hasher); } ty::Predicate::ConstEvaluatable(def_id, substs) => { def_id.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); } } } } impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::AdtFlags { fn hash_stable<W: StableHasherResult>(&self, _: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { std_hash::Hash::hash(self, hasher); } } impl_stable_hash_for!(struct ty::VariantDef { did, name, discr, fields, ctor_kind }); impl_stable_hash_for!(enum ty::VariantDiscr { Explicit(def_id), Relative(distance) }); impl_stable_hash_for!(struct ty::FieldDef { did, name, vis }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ::middle::const_val::ConstVal<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { use middle::const_val::ConstVal::*; use middle::const_val::ConstAggregate::*; mem::discriminant(self).hash_stable(hcx, hasher); match *self { Integral(ref value) => { value.hash_stable(hcx, hasher); } Float(ref value) => { value.hash_stable(hcx, hasher); } Str(ref value) => { value.hash_stable(hcx, hasher); } ByteStr(ref value) => { value.hash_stable(hcx, hasher); } Bool(value) => { value.hash_stable(hcx, hasher); } Char(value) => { value.hash_stable(hcx, hasher); } Variant(def_id) => { def_id.hash_stable(hcx, hasher); } Function(def_id, substs) => { def_id.hash_stable(hcx, hasher); hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { substs.hash_stable(hcx, hasher); }); } Aggregate(Struct(ref name_values)) => { let mut values = name_values.to_vec(); values.sort_unstable_by_key(|&(ref name, _)| name.clone()); values.hash_stable(hcx, hasher); } Aggregate(Tuple(ref value)) => { value.hash_stable(hcx, hasher); } Aggregate(Array(ref value)) => { value.hash_stable(hcx, hasher); } Aggregate(Repeat(ref value, times)) => { value.hash_stable(hcx, hasher); times.hash_stable(hcx, hasher); } Unevaluated(def_id, substs) => { def_id.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); } } } } impl_stable_hash_for!(struct ::middle::const_val::ByteArray<'tcx> { data }); impl_stable_hash_for!(struct ty::Const<'tcx> { ty, val }); impl_stable_hash_for!(struct ::middle::const_val::ConstEvalErr<'tcx> { span, kind }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ::middle::const_val::ErrKind<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { use middle::const_val::ErrKind::*; mem::discriminant(self).hash_stable(hcx, hasher); match *self { CannotCast | MissingStructField | NonConstPath | ExpectedConstTuple | ExpectedConstStruct | IndexedNonVec | IndexNotUsize | MiscBinaryOp | MiscCatchAll | IndexOpFeatureGated | TypeckError => { // nothing to do } UnimplementedConstVal(s) => { s.hash_stable(hcx, hasher); } IndexOutOfBounds { len, index } => { len.hash_stable(hcx, hasher); index.hash_stable(hcx, hasher); } Math(ref const_math_err) => { const_math_err.hash_stable(hcx, hasher); } LayoutError(ref layout_error) => { layout_error.hash_stable(hcx, hasher); } ErroneousReferencedConstant(ref const_val) => { const_val.hash_stable(hcx, hasher); } } } } impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs }); impl_stable_hash_for!(struct ty::GeneratorInterior<'tcx> { witness }); impl_stable_hash_for!(struct ty::GenericPredicates<'tcx> { parent, predicates }); impl_stable_hash_for!(enum ty::Variance { Covariant, Invariant, Contravariant, Bivariant }); impl_stable_hash_for!(enum ty::adjustment::CustomCoerceUnsized { Struct(index) }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::Generics { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let ty::Generics { parent, parent_regions, parent_types, ref regions, ref types, // Reverse map to each `TypeParameterDef`'s `index` field, from // `def_id.index` (`def_id.krate` is the same as the item's). type_param_to_index: _, // Don't hash this has_self, has_late_bound_regions, } = *self; parent.hash_stable(hcx, hasher); parent_regions.hash_stable(hcx, hasher); parent_types.hash_stable(hcx, hasher); regions.hash_stable(hcx, hasher); types.hash_stable(hcx, hasher); has_self.hash_stable(hcx, hasher); has_late_bound_regions.hash_stable(hcx, hasher); } } impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::RegionParameterDef { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let ty::RegionParameterDef { name, def_id, index, pure_wrt_drop } = *self; name.hash_stable(hcx, hasher); def_id.hash_stable(hcx, hasher); index.hash_stable(hcx, hasher); pure_wrt_drop.hash_stable(hcx, hasher); } } impl_stable_hash_for!(struct ty::TypeParameterDef { name, def_id, index, has_default, object_lifetime_default, pure_wrt_drop, synthetic }); impl<'gcx, T> HashStable<StableHashingContext<'gcx>> for ::middle::resolve_lifetime::Set1<T> where T: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { use middle::resolve_lifetime::Set1; mem::discriminant(self).hash_stable(hcx, hasher); match *self { Set1::Empty | Set1::Many => { // Nothing to do. } Set1::One(ref value) => { value.hash_stable(hcx, hasher); } } } } impl_stable_hash_for!(enum ::middle::resolve_lifetime::Region { Static, EarlyBound(index, decl), LateBound(db_index, decl), LateBoundAnon(db_index, anon_index), Free(call_site_scope_data, decl) }); impl_stable_hash_for!(struct ty::DebruijnIndex { depth }); impl_stable_hash_for!(enum ty::cast::CastKind { CoercionCast, PtrPtrCast, PtrAddrCast, AddrPtrCast, NumericCast, EnumCast, PrimIntCast, U8CharCast, ArrayPtrCast, FnPtrPtrCast, FnPtrAddrCast }); impl_stable_hash_for!(struct ::middle::region::FirstStatementIndex { idx }); impl_stable_hash_for!(struct ::middle::region::Scope { id, code }); impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for region::Scope { type KeyType = region::Scope; #[inline] fn to_stable_hash_key(&self, _: &StableHashingContext<'gcx>) -> region::Scope { *self } } impl_stable_hash_for!(struct ::middle::region::BlockRemainder { block, first_statement_index }); impl_stable_hash_for!(struct ty::adjustment::CoerceUnsizedInfo { custom_kind }); impl_stable_hash_for!(struct ty::FreeRegion { scope, bound_region }); impl_stable_hash_for!(enum ty::BoundRegion { BrAnon(index), BrNamed(def_id, name), BrFresh(index), BrEnv }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::TypeVariants<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { use ty::TypeVariants::*; mem::discriminant(self).hash_stable(hcx, hasher); match *self { TyBool | TyChar | TyStr | TyError | TyNever => { // Nothing more to hash. } TyInt(int_ty) => { int_ty.hash_stable(hcx, hasher); } TyUint(uint_ty) => { uint_ty.hash_stable(hcx, hasher); } TyFloat(float_ty) => { float_ty.hash_stable(hcx, hasher); } TyAdt(adt_def, substs) => { adt_def.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); } TyArray(inner_ty, len) => { inner_ty.hash_stable(hcx, hasher); len.hash_stable(hcx, hasher); } TySlice(inner_ty) => { inner_ty.hash_stable(hcx, hasher); } TyRawPtr(pointee_ty) => { pointee_ty.hash_stable(hcx, hasher); } TyRef(region, pointee_ty) => { region.hash_stable(hcx, hasher); pointee_ty.hash_stable(hcx, hasher); } TyFnDef(def_id, substs) => { def_id.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); } TyFnPtr(ref sig) => { sig.hash_stable(hcx, hasher); } TyDynamic(ref existential_predicates, region) => { existential_predicates.hash_stable(hcx, hasher); region.hash_stable(hcx, hasher); } TyClosure(def_id, closure_substs) => { def_id.hash_stable(hcx, hasher); closure_substs.hash_stable(hcx, hasher); } TyGenerator(def_id, closure_substs, interior) => { def_id.hash_stable(hcx, hasher); closure_substs.hash_stable(hcx, hasher); interior.hash_stable(hcx, hasher); } TyTuple(inner_tys, from_diverging_type_var) => { inner_tys.hash_stable(hcx, hasher); from_diverging_type_var.hash_stable(hcx, hasher); } TyProjection(ref projection_ty) => { projection_ty.hash_stable(hcx, hasher); } TyAnon(def_id, substs) => { def_id.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); } TyParam(param_ty) => { param_ty.hash_stable(hcx, hasher); } TyInfer(..) => { bug!("ty::TypeVariants::hash_stable() - Unexpected variant {:?}.", *self) } } } } impl_stable_hash_for!(struct ty::ParamTy { idx, name }); impl_stable_hash_for!(struct ty::TypeAndMut<'tcx> { ty, mutbl }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::ExistentialPredicate<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { mem::discriminant(self).hash_stable(hcx, hasher); match *self { ty::ExistentialPredicate::Trait(ref trait_ref) => { trait_ref.hash_stable(hcx, hasher); } ty::ExistentialPredicate::Projection(ref projection) => { projection.hash_stable(hcx, hasher); } ty::ExistentialPredicate::AutoTrait(def_id) => { def_id.hash_stable(hcx, hasher); } } } } impl_stable_hash_for!(struct ty::ExistentialTraitRef<'tcx> { def_id, substs }); impl_stable_hash_for!(struct ty::ExistentialProjection<'tcx> { item_def_id, substs, ty }); impl_stable_hash_for!(struct ty::Instance<'tcx> { def, substs }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::InstanceDef<'gcx> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { mem::discriminant(self).hash_stable(hcx, hasher); match *self {
def_id.hash_stable(hcx, hasher); } ty::InstanceDef::FnPtrShim(def_id, ty) => { def_id.hash_stable(hcx, hasher); ty.hash_stable(hcx, hasher); } ty::InstanceDef::Virtual(def_id, n) => { def_id.hash_stable(hcx, hasher); n.hash_stable(hcx, hasher); } ty::InstanceDef::ClosureOnceShim { call_once } => { call_once.hash_stable(hcx, hasher); } ty::InstanceDef::DropGlue(def_id, t) => { def_id.hash_stable(hcx, hasher); t.hash_stable(hcx, hasher); } ty::InstanceDef::CloneShim(def_id, t) => { def_id.hash_stable(hcx, hasher); t.hash_stable(hcx, hasher); } } } } impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::TraitDef { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let ty::TraitDef { // We already have the def_path_hash below, no need to hash it twice def_id: _, unsafety, paren_sugar, has_default_impl, def_path_hash, } = *self; unsafety.hash_stable(hcx, hasher); paren_sugar.hash_stable(hcx, hasher); has_default_impl.hash_stable(hcx, hasher); def_path_hash.hash_stable(hcx, hasher); } } impl_stable_hash_for!(struct ty::Destructor { did }); impl_stable_hash_for!(struct ty::DtorckConstraint<'tcx> { outlives, dtorck_types }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::CrateVariancesMap { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let ty::CrateVariancesMap { ref dependencies, ref variances, // This is just an irrelevant helper value. empty_variance: _, } = *self; dependencies.hash_stable(hcx, hasher); variances.hash_stable(hcx, hasher); } } impl_stable_hash_for!(struct ty::AssociatedItem { def_id, name, kind, vis, defaultness, container, method_has_self_argument }); impl_stable_hash_for!(enum ty::AssociatedKind { Const, Method, Type }); impl_stable_hash_for!(enum ty::AssociatedItemContainer { TraitContainer(def_id), ImplContainer(def_id) }); impl<'gcx, T> HashStable<StableHashingContext<'gcx>> for ty::steal::Steal<T> where T: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { self.borrow().hash_stable(hcx, hasher); } } impl_stable_hash_for!(struct ty::ParamEnv<'tcx> { caller_bounds, reveal }); impl_stable_hash_for!(enum traits::Reveal { UserFacing, All }); impl_stable_hash_for!(enum ::middle::privacy::AccessLevel { Reachable, Exported, Public }); impl<'gcx> HashStable<StableHashingContext<'gcx>> for ::middle::privacy::AccessLevels { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { let ::middle::privacy::AccessLevels { ref map } = *self; map.hash_stable(hcx, hasher); }); } } impl_stable_hash_for!(struct ty::CrateInherentImpls { inherent_impls }); impl_stable_hash_for!(enum ::session::CompileIncomplete { Stopped, Errored(error_reported) }); impl_stable_hash_for!(struct ::util::common::ErrorReported {}); impl_stable_hash_for!(tuple_struct ::middle::reachable::ReachableSet { reachable_set }); impl<'gcx, N> HashStable<StableHashingContext<'gcx>> for traits::Vtable<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { use traits::Vtable::*; mem::discriminant(self).hash_stable(hcx, hasher); match self { &VtableImpl(ref table_impl) => table_impl.hash_stable(hcx, hasher), &VtableDefaultImpl(ref table_def_impl) => table_def_impl.hash_stable(hcx, hasher), &VtableParam(ref table_param) => table_param.hash_stable(hcx, hasher), &VtableObject(ref table_obj) => table_obj.hash_stable(hcx, hasher), &VtableBuiltin(ref table_builtin) => table_builtin.hash_stable(hcx, hasher), &VtableClosure(ref table_closure) => table_closure.hash_stable(hcx, hasher), &VtableFnPointer(ref table_fn_pointer) => table_fn_pointer.hash_stable(hcx, hasher), &VtableGenerator(ref table_generator) => table_generator.hash_stable(hcx, hasher), } } } impl<'gcx, N> HashStable<StableHashingContext<'gcx>> for traits::VtableImplData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let traits::VtableImplData { impl_def_id, substs, ref nested, } = *self; impl_def_id.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); nested.hash_stable(hcx, hasher); } } impl<'gcx, N> HashStable<StableHashingContext<'gcx>> for traits::VtableDefaultImplData<N> where N: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let traits::VtableDefaultImplData { trait_def_id, ref nested, } = *self; trait_def_id.hash_stable(hcx, hasher); nested.hash_stable(hcx, hasher); } } impl<'gcx, N> HashStable<StableHashingContext<'gcx>> for traits::VtableObjectData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let traits::VtableObjectData { upcast_trait_ref, vtable_base, ref nested, } = *self; upcast_trait_ref.hash_stable(hcx, hasher); vtable_base.hash_stable(hcx, hasher); nested.hash_stable(hcx, hasher); } } impl<'gcx, N> HashStable<StableHashingContext<'gcx>> for traits::VtableBuiltinData<N> where N: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let traits::VtableBuiltinData { ref nested, } = *self; nested.hash_stable(hcx, hasher); } } impl<'gcx, N> HashStable<StableHashingContext<'gcx>> for traits::VtableClosureData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let traits::VtableClosureData { closure_def_id, substs, ref nested, } = *self; closure_def_id.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); nested.hash_stable(hcx, hasher); } } impl<'gcx, N> HashStable<StableHashingContext<'gcx>> for traits::VtableFnPointerData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let traits::VtableFnPointerData { fn_ty, ref nested, } = *self; fn_ty.hash_stable(hcx, hasher); nested.hash_stable(hcx, hasher); } } impl<'gcx, N> HashStable<StableHashingContext<'gcx>> for traits::VtableGeneratorData<'gcx, N> where N: HashStable<StableHashingContext<'gcx>> { fn hash_stable<W: StableHasherResult>(&self, hcx: &mut StableHashingContext<'gcx>, hasher: &mut StableHasher<W>) { let traits::VtableGeneratorData { closure_def_id, substs, ref nested, } = *self; closure_def_id.hash_stable(hcx, hasher); substs.hash_stable(hcx, hasher); nested.hash_stable(hcx, hasher); } }
ty::InstanceDef::Item(def_id) => { def_id.hash_stable(hcx, hasher); } ty::InstanceDef::Intrinsic(def_id) => {
config.js
module.exports = { title: 'Leo`s TechStack', description: 'leochen cqy blog', base: "/blog/", head: [ ['link', { rel: 'icon', href: '/img/logo.png' }] ], markdown: { lineNumbers: true }, locales: { '/': { lang: 'zh-CN', // 将会被设置为 <html> 的 lang 属性 } }, plugins: ['@vuepress/back-to-top'], themeConfig: { logo: '/img/logo.png', smoothScroll: true, lastUpdated: '最后更新时间', repo: 'YuQuanSir/blog', // 自定义仓库链接文字。默认从 `themeConfig.repo` 中自动推断为 // "GitHub"/"GitLab"/"Bitbucket" 其中之一,或是 "Source"。 repoLabel: '查看源码', // 以下为可选的编辑链接选项 // // 假如你的文档仓库和项目本身不在一个仓库: // docsRepo: 'vuejs/vuepress', // // 假如文档不是放在仓库的根目录下: docsDir: 'docs', // // 假如文档放在一个特定的分支下: // docsBranch: 'master', // // 默认是 false, 设置为 true 来启用 editLinks: true, // 默认为 "Edit this page" editLinkText: '帮助我们改善此页面!', nav: [{ text: '首页', link: '/' }, { text: '最新', link: '/lastUpdate' }, { text: '计算机基础',
items: [ { text: '操作系统', link: '/jsjbasic/os/' }, { text: '数据结构', link: '/jsjbasic/dstructure/' }, { text: '算法', link: '/jsjbasic/algorithm/' } ] }, { text: 'Node', ariaLabel: 'Node Menu', items: [ { text: '基础知识', link: '/node/ndoe_basic/' }, { text: '核心模块', link: '/node/hxmodule/' }, { text: '第三方模块', link: '/node/dsfmodule/' }, { text: 'Express', link: '/node/express/' }, { text: 'Koa', link: '/node/koa/' }, { text: 'mongoDB', link: '/node/mongodb/' }, { text: '项目实战', link: '/node/shizhan/' } ] }, { text: '前端', ariaLabel: 'frontend Menu', items: [{ text: '脚本语言', items: [ { text: 'JavaScript', link: '/script/javascript/js0' }, { text: 'ES6', link: '/script/ES6/' }, { text: 'TypScript', link: '/script/typescript/' } ] }, { text: 'JS库', items: [ { text: 'jQurey', link: '/js_lib/jquery/' }, { text: 'zepto', link: '/js_lib/zepto/' } ] }, { text: '样式', items: [ { text: 'HTML', link: '/ui/html/' }, { text: 'CSS', link: '/ui/css/' }, { text: 'SASS', link: '/ui/sass/' }, { text: 'LESS', link: '/ui/less/' }, { text: 'JS DOM', link: '/ui/js_dom/' }, { text: 'canvas', link: '/ui/canvas/' }, ] }, { text: '框架', items: [ { text: 'BootStrap', link: '/framework/bootstrap/' }, { text: 'Vue', link: '/framework/vue/' }, { text: 'React', link: '/framework/react/' }, { text: 'Angular', link: '/framework/angular/' } ] }, { text: '构建工具', items: [ { text: 'Gulp', link: '/gjtool/gulp/' }, { text: 'WebPack', link: '/gjtool/webpack/' }, { text: 'Grunt', link: '/gjtool/grunt/' }, ] }, ] }, { text: '其他', ariaLabel: 'other Menu', items: [ { text: '友情链接', link: '/links' }, { text: '技术', items: [ { text: 'python', link: '/other_tech/python/' }, { text: 'java', link: '/other_tech/java/' }, { text: 'php', link: '/other_tech/php/' }, { text: 'c', link: '/other_tech/c/' } ] }, { text: '记录', items: [ { text: '思路想法随笔', link: '/note/suibi_note/' }, { text: '零散技术笔记', link: '/note/ls_note/' }, { text: '错误日志', link: '/note/error_note/' }, { text: '疑问记录', link: '/note/yiwen_note/' } ] } ] } ], sidebarDepth: 3, sidebar: { '/script/javascript/': [{ title: 'JavaScript', collapsable: false, sidebarDepth: 3, children: [ '/script/javascript/js0', '/script/javascript/js1', '/script/javascript/js2', '/script/javascript/js3' ] }] } } }
ariaLabel: 'basic Menu',
bases.py
#!python import string # Hint: Use these string constants to encode/decode hexadecimal digits and more # string.digits is '0123456789' # string.hexdigits is '0123456789abcdefABCDEF' # string.ascii_lowercase is 'abcdefghijklmnopqrstuvwxyz' # string.ascii_uppercase is 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' # string.ascii_letters is ascii_lowercase + ascii_uppercase # string.printable is digits + ascii_letters + punctuation + whitespace def
(digits, base): """Decode given digits in given base to number in base 10. digits: str -- string representation of number (in given base) base: int -- base of given number return: int -- integer representation of number (in base 10)""" # Handle up to base 36 [0-9a-z] assert 2 <= base <= 36, f'base is out of range: {base}' # Decode digits from any base (2 up to 36) # for each digit, use the position or the index an the base to digit * base ** index decimal_num = 0 digits = digits[::-1] for i in range(len(digits)): digit = int(digits[i], base=base) decimal_num += digit * base ** i return decimal_num def encode(number, base): """Encode given number in base 10 to digits in given base. number: int -- integer representation of number (in base 10) base: int -- base to convert to return: str -- string representation of number (in given base)""" # Handle up to base 36 [0-9a-z] assert 2 <= base <= 36, f'base is out of range: {base}' # Handle unsigned numbers only for now assert number >= 0, f'number is negative: {number}' # binary (base 2) # 10 -> 2: # 10/2 = 5: 0 # 5/2 = 2: 1 # 2/2 = 1: 0 # 1/2 = 0: 1 - then read the remainders bottom up: 1010 = 1 * 2^3 + 0 * 2^2 + 1 * 2^1 + 0 * 2^0 # Encode number in any base (2 up to 36) result = "" while number > 0: remainder = number % base number -= remainder number = number // base if remainder > 9: remainder = string.ascii_lowercase[remainder-10] result = str(remainder) + result return result def convert(digits, base1, base2): """Convert given digits in base1 to digits in base2. digits: str -- string representation of number (in base1) base1: int -- base of given number base2: int -- base to convert to return: str -- string representation of number (in base2)""" # Handle up to base 36 [0-9a-z] assert 2 <= base1 <= 36, f'base1 is out of range: {base1}' assert 2 <= base2 <= 36, f'base2 is out of range: {base2}' # start by using decode to decoded digits in base 10 form # use encode to turn base 10 digits into desired base form # Convert digits from any base to any base (2 up to 36) decoded_base10 = decode(digits, base1) result = encode(decoded_base10, base2) return result def main(): """Read command-line arguments and convert given digits between bases.""" import sys args = sys.argv[1:] # Ignore script file name if len(args) == 3: digits = args[0] base1 = int(args[1]) base2 = int(args[2]) # Convert given digits between bases result = convert(digits, base1, base2) print(f'{digits} in base {base1} is {result} in base {base2}') else: print(f'Usage: {sys.argv[0]} digits base1 base2') print('Converts digits from base1 to base2') if __name__ == '__main__': main()
decode
interceptLinkClicks.ts
import { locationUtil } from '@grafana/data'; import { locationService, navigationLogger } from '@grafana/runtime'; export function
(e: MouseEvent) { const anchor = getParentAnchor(e.target as HTMLElement); // Ignore if opening new tab or already default prevented if (e.ctrlKey || e.metaKey || e.defaultPrevented) { return; } if (anchor) { let href = anchor.getAttribute('href'); const target = anchor.getAttribute('target'); if (href && !target) { navigationLogger('utils', false, 'intercepting link click', e); e.preventDefault(); href = locationUtil.stripBaseFromUrl(href); // Ensure old angular urls with no starting '/' are handled the same as before // Make sure external links are handled correctly // That is they where seen as being absolute from app root if (href[0] !== '/') { // if still contains protocol or is a mailto link, it's an absolute link to another domain or web application if (href.indexOf('://') > 0 || href.indexOf('mailto:') === 0) { window.location.href = href; return; } else { href = `/${href}`; } } locationService.push(href); } } } function getParentAnchor(element: HTMLElement | null): HTMLElement | null { while (element !== null && element.tagName) { if (element.tagName.toUpperCase() === 'A') { return element; } element = element.parentNode as HTMLElement; } return null; }
interceptLinkClicks
crc.rs
#![allow(non_snake_case, non_upper_case_globals)] #![allow(non_camel_case_types)] //! CRC1 //! //! Used by: stm32mp153, stm32mp157 use crate::RWRegister; #[cfg(not(feature = "nosync"))] use core::marker::PhantomData; /// CRC data register pub mod CRC_DR { /// DR pub mod DR { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (32 bits: 0xffffffff << 0) pub const mask: u32 = 0xffffffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// CRC independent data register pub mod CRC_IDR { /// IDR pub mod IDR { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (32 bits: 0xffffffff << 0) pub const mask: u32 = 0xffffffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// CRC control register pub mod CRC_CR { /// RESET pub mod RESET { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// POLYSIZE pub mod POLYSIZE { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (2 bits: 0b11 << 3) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// REV_IN pub mod REV_IN { /// Offset (5 bits) pub const offset: u32 = 5; /// Mask (2 bits: 0b11 << 5) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// REV_OUT pub mod REV_OUT { /// Offset (7 bits) pub const offset: u32 = 7; /// Mask (1 bit: 1 << 7) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// CRC initial value pub mod CRC_INIT { /// CRC_INIT pub mod CRC_INIT { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (32 bits: 0xffffffff << 0) pub const mask: u32 = 0xffffffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// CRC polynomial pub mod CRC_POL { /// POL pub mod POL { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (32 bits: 0xffffffff << 0) pub const mask: u32 = 0xffffffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } #[repr(C)] pub struct RegisterBlock { /// CRC data register pub CRC_DR: RWRegister<u32>, /// CRC independent data register pub CRC_IDR: RWRegister<u32>, /// CRC control register pub CRC_CR: RWRegister<u32>,
/// CRC initial value pub CRC_INIT: RWRegister<u32>, /// CRC polynomial pub CRC_POL: RWRegister<u32>, } pub struct ResetValues { pub CRC_DR: u32, pub CRC_IDR: u32, pub CRC_CR: u32, pub CRC_INIT: u32, pub CRC_POL: u32, } #[cfg(not(feature = "nosync"))] pub struct Instance { pub(crate) addr: u32, pub(crate) _marker: PhantomData<*const RegisterBlock>, } #[cfg(not(feature = "nosync"))] impl ::core::ops::Deref for Instance { type Target = RegisterBlock; #[inline(always)] fn deref(&self) -> &RegisterBlock { unsafe { &*(self.addr as *const _) } } } #[cfg(feature = "rtic")] unsafe impl Send for Instance {}
_reserved1: [u32; 1],
0006_auto_20210531_1145.py
# Generated by Django 3.1.2 on 2021-05-31 14:45 import datetime from django.db import migrations, models class
(migrations.Migration): dependencies = [ ('abastece', '0005_auto_20210528_1946'), ] operations = [ migrations.AlterField( model_name='pedido', name='timestamp', field=models.DateTimeField(default=datetime.datetime(2021, 5, 31, 11, 45, 20, 503212), editable=False, verbose_name='fecha y hora'), ), migrations.AlterField( model_name='producto', name='titulo', field=models.CharField(max_length=200), ), ]
Migration
bank.go
package bank import "fmt" // Bank :: Struct type Bank struct { Account Person Balance float32 } // Person :: Struct type Person struct { Name string Cpf string } // Transfer :: Function func (bank *Bank) Transfer(account Bank, money float32) { if bank.Balance >= money { account.Balance += money bank.Balance -= money fmt.Printf("Transferência no valor de R$%.2f, feita com sucesso para %s\n", money, account.Account.Name) } else { fmt.Println("Abistinência do valor requisitado.") } } // Draw :: Function func (bank *Bank) Draw(money float32) { if bank.Balance >= money {
lse { fmt.Println("Não foi possível sacar o valor requisitado.") } } // Deposit :: Function func (bank *Bank) Deposit(money float32) { bank.Balance += money fmt.Printf("O valor R$%.2f foi depositado com sucesso!\n", money) } func (bank Bank) Extract() { fmt.Printf("O valor que você %s tem guardado na sua conta é %.2f\n", bank.Account.Name, bank.Balance) }
bank.Balance -= money fmt.Printf("O valor R$%.2f foi sacado com sucesso!\n", money) } e
main.rs
use structopt::StructOpt; mod opts; use opts::DisplayMode; mod parse; use parse::{Node, CtNode, Line, parse}; const BARS: &[char] = &[ ' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█', ]; fn main() { const BARS_WIDE: usize = 8; let opt = opts::Options::from_args(); let mode = opt.display.validate(); let (title, func): (&str, fn(&Node, usize, &str) -> Vec<CtNode>) = match mode { DisplayMode::Fuzzy => ("Fuzzy", Node::top_inclusive_filt), DisplayMode::Exact => ("Exact", Node::top_exclusive), DisplayMode::Heat => ("Heatmap", Node::top_inclusive), }; let t = parse(opt.file, opt.shell.validate()); // println!("{:#?}", t); let lines = ct_node_to_list_line(func(&t, opt.count, "")); println!(""); println!(" {} Commands ", title); println!(""); println!("| HEAT | COUNT | COMMAND "); println!("| -------- | -------- | ---------"); for i in lines.iter() { println!("| {} | {:8} | {}", pct_to_bar(i.pct, BARS_WIDE), i.node.count, i.node.full_text); } println!(""); } fn ct_node_to_list_
<CtNode>) -> Vec<Line> { let max = if let Some(item) = in_dat.first() { item.count as f64 } else { return vec![]; }; in_dat.drain(..).map(|line| { Line { pct: (line.count as f64) / max, node: line, } }) .collect() } fn pct_to_bar(pct: f64, width: usize) -> String { let mult = (BARS.len() - 1) * width; let ct = pct * (mult as f64); let ct = ct.round(); let mut ct = ct as usize; let mut out = String::with_capacity(width); for _ in 0..width { let idx = std::cmp::min(ct, BARS.len() - 1); ct -= idx; out.push(BARS[idx]); } out } pub fn eject(reason: &str) -> ! { eprintln!("{}", reason); std::process::exit(-1); }
line(mut in_dat: Vec
data_source_update_reasons_pagination_response.py
# coding: utf-8 """ LogicMonitor REST API LogicMonitor is a SaaS-based performance monitoring platform that provides full visibility into complex, hybrid infrastructures, offering granular performance monitoring and actionable data and insights. logicmonitor_sdk enables you to manage your LogicMonitor account programmatically. # noqa: E501 OpenAPI spec version: 1.0.0 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from logicmonitor_sdk.models.update_reason import UpdateReason # noqa: F401,E501 class DataSourceUpdateReasonsPaginationResponse(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'total': 'int', 'search_id': 'str', 'items': 'list[UpdateReason]' } attribute_map = { 'total': 'total', 'search_id': 'searchId', 'items': 'items' } def __init__(self, total=None, search_id=None, items=None): # noqa: E501 """DataSourceUpdateReasonsPaginationResponse - a model defined in Swagger""" # noqa: E501 self._total = None self._search_id = None self._items = None self.discriminator = None if total is not None: self.total = total if search_id is not None: self.search_id = search_id if items is not None: self.items = items @property def total(self): """Gets the total of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :return: The total of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :rtype: int """ return self._total @total.setter def total(self, total): """Sets the total of this DataSourceUpdateReasonsPaginationResponse. :param total: The total of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :type: int """ self._total = total @property def search_id(self): """Gets the search_id of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :return: The search_id of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :rtype: str """ return self._search_id @search_id.setter def search_id(self, search_id): """Sets the search_id of this DataSourceUpdateReasonsPaginationResponse. :param search_id: The search_id of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :type: str """ self._search_id = search_id @property def items(self): """Gets the items of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :return: The items of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :rtype: list[UpdateReason] """ return self._items @items.setter def
(self, items): """Sets the items of this DataSourceUpdateReasonsPaginationResponse. :param items: The items of this DataSourceUpdateReasonsPaginationResponse. # noqa: E501 :type: list[UpdateReason] """ self._items = items def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(DataSourceUpdateReasonsPaginationResponse, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, DataSourceUpdateReasonsPaginationResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
items
download_wordvecs.py
import zipfile import argparse import os from squad_preprocess import maybe_download def setup_args(): parser = argparse.ArgumentParser() parser.add_argument("--download_dir", required=True) # where to put the downloaded glove files return parser.parse_args() def
(): args = setup_args() glove_base_url = "http://nlp.stanford.edu/data/" glove_filename = "glove.6B.zip" print("\nDownloading wordvecs to {}".format(args.download_dir)) if not os.path.exists(args.download_dir): os.makedirs(args.download_dir) maybe_download(glove_base_url, glove_filename, args.download_dir, 862182613) glove_zip_ref = zipfile.ZipFile(os.path.join(args.download_dir, glove_filename), 'r') glove_zip_ref.extractall(args.download_dir) glove_zip_ref.close() if __name__ == '__main__': main()
main
apps.py
from django.apps import AppConfig class
(AppConfig): name = 'markers'
MarkersConfig
Brodsmulesti.tsx
import React, { useState } from 'react'; import { useSelector } from 'react-redux'; import { AppState } from 'store/reducers'; import Lenke from 'nav-frontend-lenker'; import { Bilde } from '../../../common/bilde/Bilde'; import HomeIcon from 'ikoner/home.svg'; import { HoyreChevron } from 'nav-frontend-chevron'; import { postMessageToApp } from 'utils/messages'; import { Locale } from 'store/reducers/language-duck'; import Tekst, { finnTekst } from 'tekster/finn-tekst'; import BEMHelper from 'utils/bem'; import { getArbeidsflateContext } from '../../../common/arbeidsflate-lenker/arbeidsflate-lenker'; import './Brodsmulesti.less'; export interface Breadcrumb { url: string; title: string; handleInApp?: boolean; } interface Props { breadcrumbs: Breadcrumb[]; }
const { environment } = useSelector((state: AppState) => state); const { XP_BASE_URL } = environment; const [showAll, setShowAll] = useState(false); const { status } = useSelector((state: AppState) => state.arbeidsflate); const { language } = useSelector((state: AppState) => state.language); const context = getArbeidsflateContext(XP_BASE_URL, status); const { breadcrumbs } = props; const isLanguageNorwegian = language === Locale.NYNORSK || language === Locale.BOKMAL; const breadcrumbsCase = breadcrumbs.map((b) => ({ ...b, title: b.title .split(' ') .map((title, i) => (!i ? `${title.charAt(0).toUpperCase() + title.slice(1)}` : `${title}`)) .join(' '), })); const breadcrumbsSliced = showAll ? breadcrumbsCase : breadcrumbsCase.slice(breadcrumbsCase.length - 2); const homeUrlMap: { [key: string]: string } = { nb: `${XP_BASE_URL}`, nn: `${XP_BASE_URL}`, en: `${XP_BASE_URL}/en/home`, se: `${XP_BASE_URL}/se/samegiella`, }; return ( <nav className={cls.className} aria-label={finnTekst('brodsmulesti', language)} itemProp="breadcrumb"> <ol> <li className="typo-normal"> <Lenke href={homeUrlMap[language]} className={cls.element('home')}> <Bilde asset={HomeIcon} className={cls.element('icon')} /> <span>nav.no</span> <HoyreChevron /> </Lenke> </li> {isLanguageNorwegian && ( <li className="typo-normal"> <Lenke href={context.url}> <span> <Tekst id={context.lenkeTekstId} /> </span> <HoyreChevron /> </Lenke> </li> )} {!showAll && breadcrumbs.length > 2 && ( <li className="typo-normal"> <button aria-label={finnTekst('brodsmulesti-se-alle', language)} className={`${cls.element('view-all')} lenke`} onClick={(e) => { e.preventDefault(); setShowAll(true); }} > <span>...</span> <HoyreChevron /> </button> </li> )} {breadcrumbsSliced.map((breadcrumb, i) => ( <li key={i} className="typo-normal" aria-current={i + 1 === breadcrumbsSliced.length && `page`}> {i + 1 !== breadcrumbsSliced.length ? ( breadcrumb.handleInApp ? ( <Lenke href={breadcrumb.url} className={cls.element('transform')} onClick={(e) => { e.preventDefault(); postMessageToApp('breadcrumbClick', breadcrumb); }} > <span>{breadcrumb.title}</span> <HoyreChevron /> </Lenke> ) : ( <Lenke href={breadcrumb.url} className={cls.element('transform')}> <span>{breadcrumb.title}</span> <HoyreChevron /> </Lenke> ) ) : ( <span className={cls.element('transform')}>{breadcrumb.title}</span> )} </li> ))} </ol> </nav> ); }; export default Brodsmulesti;
export const Brodsmulesti = (props: Props) => { const cls = BEMHelper('brodsmulesti');
models.py
import requests from django.db import models from django.utils import timezone from users.models import CustomUser from datetime import datetime def get_coordinate(gps, ref): coordinate = gps[0] + gps[1]/60 + gps[2]/3600 if ref == 'W': coordinate = -coordinate return coordinate def get_timestamp(timestamp_string): datetime_object = datetime.strptime(timestamp_string, '%Y:%m:%d %H:%M:%S') return datetime_object class Photo(models.Model): name = models.CharField(max_length=120) lat = models.DecimalField(max_digits=9, decimal_places=6) lon = models.DecimalField(max_digits=9, decimal_places=6) timestamp = models.DateTimeField(auto_now_add=True, auto_now=False) user = models.ForeignKey(CustomUser, on_delete=models.CASCADE) airspace_name = models.CharField(max_length=120, default='') airspace_class = models.CharField(max_length=120, default='G') def save_many(photos, user): for photo in photos: name = photo['ImageDescription'] lat = get_coordinate(photo['GPSLatitude'], photo['GPSLatitudeRef']) lon = get_coordinate(photo['GPSLongitude'], photo['GPSLongitudeRef']) timestamp = get_timestamp(photo['DateTimeOriginal']) t = requests.post( 'http://airspace-service.herokuapp.com/geo/getAirspace', data = {"longitude": lon, "latitude": lat} ) airspace_data=t.json() airspace_name =airspace_data['name'] airspace_class =airspace_data['class'] photo_model = Photo( name=name, lat=lat, lon=lon, timestamp=timestamp, user=user, airspace_name=airspace_name, airspace_class=airspace_class ) photo_model.save() def get_all(user): return Photo.objects.filter(user=user).values( 'id', 'name', 'lat', 'lon', 'timestamp', 'airspace_name', 'airspace_class' ) def delete_all(user): return Photo.objects.filter(user=user).delete()
return Photo.objects.filter(user=user,id=id).delete()
def delete_one(user,id):
access.rs
use crate::storage::SparseSetIndex; use fixedbitset::FixedBitSet; use std::marker::PhantomData; /// `Access` keeps track of read and write accesses to values within a collection. /// /// This is used for ensuring systems are executed soundly. #[derive(Debug, Eq, PartialEq, Clone)] pub struct Access<T: SparseSetIndex> { reads_all: bool, /// A combined set of T read and write accesses. reads_and_writes: FixedBitSet, writes: FixedBitSet, marker: PhantomData<T>, } impl<T: SparseSetIndex> Default for Access<T> { fn default() -> Self { Self { reads_all: false, reads_and_writes: Default::default(), writes: Default::default(), marker: PhantomData, } } } impl<T: SparseSetIndex> Access<T> { pub fn grow(&mut self, bits: usize) { self.reads_and_writes.grow(bits); self.writes.grow(bits); } /// Adds a read access for the given index. pub fn
(&mut self, index: T) { self.reads_and_writes.grow(index.sparse_set_index() + 1); self.reads_and_writes.insert(index.sparse_set_index()); } /// Adds a write access for the given index. pub fn add_write(&mut self, index: T) { self.reads_and_writes.grow(index.sparse_set_index() + 1); self.writes.grow(index.sparse_set_index() + 1); self.reads_and_writes.insert(index.sparse_set_index()); self.writes.insert(index.sparse_set_index()); } /// Returns true if this `Access` contains a read access for the given index. pub fn has_read(&self, index: T) -> bool { if self.reads_all { true } else { self.reads_and_writes.contains(index.sparse_set_index()) } } /// Returns true if this `Access` contains a write access for the given index. pub fn has_write(&self, index: T) -> bool { self.writes.contains(index.sparse_set_index()) } /// Sets this `Access` to having read access for all indices. pub fn read_all(&mut self) { self.reads_all = true; } /// Returns true if this `Access` has read access to all indices. pub fn reads_all(&self) -> bool { self.reads_all } /// Clears all recorded accesses. pub fn clear(&mut self) { self.reads_all = false; self.reads_and_writes.clear(); self.writes.clear(); } /// Extends this `Access` with another, copying all accesses of `other` into this. pub fn extend(&mut self, other: &Access<T>) { self.reads_all = self.reads_all || other.reads_all; self.reads_and_writes.union_with(&other.reads_and_writes); self.writes.union_with(&other.writes); } /// Returns true if this `Access` is compatible with `other`. /// /// Two `Access` instances are incompatible with each other if one `Access` has a write for /// which the other also has a write or a read. pub fn is_compatible(&self, other: &Access<T>) -> bool { if self.reads_all { 0 == other.writes.count_ones(..) } else if other.reads_all { 0 == self.writes.count_ones(..) } else { self.writes.is_disjoint(&other.reads_and_writes) && self.reads_and_writes.is_disjoint(&other.writes) } } /// Calculates conflicting accesses between this `Access` and `other`. pub fn get_conflicts(&self, other: &Access<T>) -> Vec<T> { let mut conflicts = FixedBitSet::default(); if self.reads_all { conflicts.extend(other.writes.ones()); } if other.reads_all { conflicts.extend(self.writes.ones()); } conflicts.extend(self.writes.intersection(&other.reads_and_writes)); conflicts.extend(self.reads_and_writes.intersection(&other.writes)); conflicts .ones() .map(SparseSetIndex::get_sparse_set_index) .collect() } } #[derive(Clone, Eq, PartialEq)] pub struct FilteredAccess<T: SparseSetIndex> { access: Access<T>, with: FixedBitSet, without: FixedBitSet, } impl<T: SparseSetIndex> Default for FilteredAccess<T> { fn default() -> Self { Self { access: Access::default(), with: Default::default(), without: Default::default(), } } } impl<T: SparseSetIndex> FilteredAccess<T> { #[inline] pub fn access(&self) -> &Access<T> { &self.access } pub fn add_read(&mut self, index: T) { self.access.add_read(index.clone()); self.add_with(index); } pub fn add_write(&mut self, index: T) { self.access.add_write(index.clone()); self.add_with(index); } pub fn add_with(&mut self, index: T) { self.with.grow(index.sparse_set_index() + 1); self.with.insert(index.sparse_set_index()); } pub fn add_without(&mut self, index: T) { self.without.grow(index.sparse_set_index() + 1); self.without.insert(index.sparse_set_index()); } pub fn is_compatible(&self, other: &FilteredAccess<T>) -> bool { if self.access.is_compatible(&other.access) { true } else { self.with.intersection(&other.without).next().is_some() || self.without.intersection(&other.with).next().is_some() } } pub fn extend(&mut self, access: &FilteredAccess<T>) { self.access.extend(&access.access); self.with.union_with(&access.with); self.without.union_with(&access.without); } } pub struct FilteredAccessSet<T: SparseSetIndex> { combined_access: Access<T>, filtered_accesses: Vec<FilteredAccess<T>>, } impl<T: SparseSetIndex> FilteredAccessSet<T> { #[inline] pub fn combined_access(&self) -> &Access<T> { &self.combined_access } #[inline] pub fn combined_access_mut(&mut self) -> &mut Access<T> { &mut self.combined_access } pub fn get_conflicts(&self, filtered_access: &FilteredAccess<T>) -> Vec<T> { // if combined unfiltered access is incompatible, check each filtered access for // compatibility if !filtered_access.access.is_compatible(&self.combined_access) { for current_filtered_access in self.filtered_accesses.iter() { if !current_filtered_access.is_compatible(filtered_access) { return current_filtered_access .access .get_conflicts(&filtered_access.access); } } } Vec::new() } pub fn add(&mut self, filtered_access: FilteredAccess<T>) { self.combined_access.extend(&filtered_access.access); self.filtered_accesses.push(filtered_access); } } impl<T: SparseSetIndex> Default for FilteredAccessSet<T> { fn default() -> Self { Self { combined_access: Default::default(), filtered_accesses: Vec::new(), } } } #[cfg(test)] mod tests { use crate::query::{Access, FilteredAccess}; #[test] fn access_get_conflicts() { let mut access_a = Access::<usize>::default(); access_a.add_read(0); access_a.add_read(1); let mut access_b = Access::<usize>::default(); access_b.add_read(0); access_b.add_write(1); assert_eq!(access_a.get_conflicts(&access_b), vec![1]); let mut access_c = Access::<usize>::default(); access_c.add_write(0); access_c.add_write(1); assert_eq!(access_a.get_conflicts(&access_c), vec![0, 1]); assert_eq!(access_b.get_conflicts(&access_c), vec![0, 1]); let mut access_d = Access::<usize>::default(); access_d.add_read(0); assert_eq!(access_d.get_conflicts(&access_a), vec![]); assert_eq!(access_d.get_conflicts(&access_b), vec![]); assert_eq!(access_d.get_conflicts(&access_c), vec![0]); } #[test] fn filtered_access_extend() { let mut access_a = FilteredAccess::<usize>::default(); access_a.add_read(0); access_a.add_read(1); access_a.add_with(2); let mut access_b = FilteredAccess::<usize>::default(); access_b.add_read(0); access_b.add_write(3); access_b.add_without(4); access_a.extend(&access_b); let mut expected = FilteredAccess::<usize>::default(); expected.add_read(0); expected.add_read(1); expected.add_with(2); expected.add_write(3); expected.add_without(4); assert!(access_a.eq(&expected)); } }
add_read
justifier.py
# Copyright 2019 Quantapix Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= class Justifier: def __init__(self, **kw): super().__init__(**kw) self.justs = [0] * 9 self.offsets = [(0, 0, 0, 1, 1, 1, 1, 1, 1), (0, -1, -2, 0, 0, 0, 1, 1, 1), (0, -1, -2, 0, -1, -2, 0, 0, 0)] def init_justs(self, justs):
def calc_just(self, justs): for i in justs: i = self.justs[i] + (i % 3) if i == 1: return 'justify-content-center' elif i > 1: return 'justify-content-end' return 'justify-content-start'
for i in justs: i = i // 3 os = self.offsets[i] if os: self.justs = [sum(x) for x in zip(self.justs, os)] self.offsets[i] = None
request.py
from datetime import date, datetime from dateutil.tz import tzutc import logging import json from gzip import GzipFile from requests.auth import HTTPBasicAuth from requests import sessions from io import BytesIO from posthog.version import VERSION from posthog.utils import remove_trailing_slash _session = sessions.Session() def post(api_key, host=None, gzip=False, timeout=15, **kwargs):
class APIError(Exception): def __init__(self, status, code, message): self.message = message self.status = status self.code = code def __str__(self): msg = "[PostHog] {0}: {1} ({2})" return msg.format(self.code, self.message, self.status) class DatetimeSerializer(json.JSONEncoder): def default(self, obj): if isinstance(obj, (date, datetime)): return obj.isoformat() return json.JSONEncoder.default(self, obj)
"""Post the `kwargs` to the API""" log = logging.getLogger('posthog') body = kwargs body["sentAt"] = datetime.utcnow().replace(tzinfo=tzutc()).isoformat() url = remove_trailing_slash(host or 'https://t.posthog.com') + '/batch/' body['api_key'] = api_key data = json.dumps(body, cls=DatetimeSerializer) log.debug('making request: %s', data) headers = { 'Content-Type': 'application/json', 'User-Agent': 'analytics-python/' + VERSION } if gzip: headers['Content-Encoding'] = 'gzip' buf = BytesIO() with GzipFile(fileobj=buf, mode='w') as gz: # 'data' was produced by json.dumps(), # whose default encoding is utf-8. gz.write(data.encode('utf-8')) data = buf.getvalue() res = _session.post(url, data=data, headers=headers, timeout=timeout) if res.status_code == 200: log.debug('data uploaded successfully') return res try: payload = res.json() log.debug('received response: %s', payload) raise APIError(res.status_code, payload['code'], payload['message']) except ValueError: raise APIError(res.status_code, 'unknown', res.text)
color_mapper.ts
import {Transform} from "../transforms/transform" import {Factor} from "../ranges/factor_range" import * as p from "core/properties" import {Arrayable, Color} from "core/types" import {color2hex} from "core/util/color" import {is_little_endian} from "core/util/compat" export interface RGBAMapper { v_compute(xs: Arrayable<number> | Arrayable<Factor>): Uint8Array } export function
(color: string): number { if (color[0] != "#") color = color2hex(color) if (color.length != 9) color = color + 'ff' return parseInt(color.slice(1), 16) } export function _convert_palette(palette: Color[]): Uint32Array { const new_palette = new Uint32Array(palette.length) for (let i = 0, end = palette.length; i < end; i++) new_palette[i] = _convert_color(palette[i]) return new_palette } export function _uint32_to_rgba(values: Uint32Array): Uint8Array { if (is_little_endian) { const view = new DataView(values.buffer) for (let i = 0, end = values.length; i < end; i++) view.setUint32(i*4, values[i]) } return new Uint8Array(values.buffer) } export namespace ColorMapper { export interface Attrs extends Transform.Attrs { palette: Color[] nan_color: Color } export interface Props extends Transform.Props {} } export interface ColorMapper extends ColorMapper.Attrs {} export abstract class ColorMapper extends Transform<Color> { properties: ColorMapper.Props constructor(attrs?: Partial<ColorMapper.Attrs>) { super(attrs) } static initClass(): void { this.prototype.type = "ColorMapper" this.define({ palette: [ p.Any ], // TODO (bev) nan_color: [ p.Color, "gray" ], }) } compute(_x: number): never { // If it's just a single value, then a color mapper doesn't really make sense. throw new Error("not supported") } v_compute(xs: Arrayable<number> | Arrayable<Factor>): Arrayable<Color> { const values: Color[] = new Array(xs.length) this._v_compute(xs, values, this.palette, this._colors((c) => c)) return values } get rgba_mapper(): RGBAMapper { const self = this const palette = _convert_palette(this.palette) const colors = this._colors(_convert_color) return { v_compute(xs: Arrayable<number> | Arrayable<Factor>): Uint8Array { const values = new Uint32Array(xs.length) self._v_compute(xs, values, palette, colors) return _uint32_to_rgba(values) }, } } protected _colors<T>(conv: (c: Color) => T): {nan_color: T} { return {nan_color: conv(this.nan_color)} } protected abstract _v_compute<T>(xs: Arrayable<number> | Arrayable<Factor>, values: Arrayable<T>, palette: Arrayable<T>, colors: {nan_color: T}): void } ColorMapper.initClass()
_convert_color
1.py
index = 0 for item in ['ab', 'cd', 'ef']:
print("{0}: {1}".format(index, item)) index += 1
struct_with_anon_union.rs
/* automatically generated by rust-bindgen */ #![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)] #[repr(C)] #[derive(Copy, Clone)] pub struct foo { pub bar: foo__bindgen_ty_1, } #[repr(C)] #[derive(Copy, Clone)] pub union foo__bindgen_ty_1 { pub a: ::std::os::raw::c_uint, pub b: ::std::os::raw::c_ushort, _bindgen_union_align: u32, } #[test] fn bindgen_test_layout_foo__bindgen_ty_1() { assert_eq!( ::std::mem::size_of::<foo__bindgen_ty_1>(), 4usize, concat!("Size of: ", stringify!(foo__bindgen_ty_1)) ); assert_eq!( ::std::mem::align_of::<foo__bindgen_ty_1>(), 4usize, concat!("Alignment of ", stringify!(foo__bindgen_ty_1)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo__bindgen_ty_1>())).a as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(foo__bindgen_ty_1), "::", stringify!(a) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo__bindgen_ty_1>())).b as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(foo__bindgen_ty_1), "::", stringify!(b) ) ); } impl Default for foo__bindgen_ty_1 { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } #[test] fn
() { assert_eq!( ::std::mem::size_of::<foo>(), 4usize, concat!("Size of: ", stringify!(foo)) ); assert_eq!( ::std::mem::align_of::<foo>(), 4usize, concat!("Alignment of ", stringify!(foo)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo>())).bar as *const _ as usize }, 0usize, concat!("Offset of field: ", stringify!(foo), "::", stringify!(bar)) ); } impl Default for foo { fn default() -> Self { unsafe { ::std::mem::zeroed() } } }
bindgen_test_layout_foo
pro_vecbase_ctrl.rs
#[doc = "Register `PRO_VECBASE_CTRL` reader"] pub struct R(crate::R<PRO_VECBASE_CTRL_SPEC>); impl core::ops::Deref for R { type Target = crate::R<PRO_VECBASE_CTRL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<PRO_VECBASE_CTRL_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<PRO_VECBASE_CTRL_SPEC>) -> Self { R(reader) } } #[doc = "Register `PRO_VECBASE_CTRL` writer"] pub struct W(crate::W<PRO_VECBASE_CTRL_SPEC>); impl core::ops::Deref for W { type Target = crate::W<PRO_VECBASE_CTRL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<PRO_VECBASE_CTRL_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<PRO_VECBASE_CTRL_SPEC>) -> Self { W(writer) } } #[doc = "Field `PRO_OUT_VECBASE_SEL` reader - "] pub struct PRO_OUT_VECBASE_SEL_R(crate::FieldReader<u8, u8>); impl PRO_OUT_VECBASE_SEL_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { PRO_OUT_VECBASE_SEL_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PRO_OUT_VECBASE_SEL_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PRO_OUT_VECBASE_SEL` writer - "] pub struct PRO_OUT_VECBASE_SEL_W<'a> { w: &'a mut W, } impl<'a> PRO_OUT_VECBASE_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x03) | (value as u32 & 0x03); self.w } } impl R { #[doc = "Bits 0:1"] #[inline(always)] pub fn pro_out_vecbase_sel(&self) -> PRO_OUT_VECBASE_SEL_R { PRO_OUT_VECBASE_SEL_R::new((self.bits & 0x03) as u8) } } impl W { #[doc = "Bits 0:1"] #[inline(always)] pub fn pro_out_vecbase_sel(&mut self) -> PRO_OUT_VECBASE_SEL_W { PRO_OUT_VECBASE_SEL_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self
} #[doc = "\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pro_vecbase_ctrl](index.html) module"] pub struct PRO_VECBASE_CTRL_SPEC; impl crate::RegisterSpec for PRO_VECBASE_CTRL_SPEC { type Ux = u32; } #[doc = "`read()` method returns [pro_vecbase_ctrl::R](R) reader structure"] impl crate::Readable for PRO_VECBASE_CTRL_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [pro_vecbase_ctrl::W](W) writer structure"] impl crate::Writable for PRO_VECBASE_CTRL_SPEC { type Writer = W; } #[doc = "`reset()` method sets PRO_VECBASE_CTRL to value 0"] impl crate::Resettable for PRO_VECBASE_CTRL_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
{ self.0.bits(bits); self }
c8f4b08529a4_.py
"""empty message Revision ID: c8f4b08529a4 Revises: bbd324935815 Create Date: 2017-05-02 00:04:57.131824 """ # revision identifiers, used by Alembic. revision = 'c8f4b08529a4' down_revision = 'bbd324935815'
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('entries', sa.Column('user_id', sa.Integer(), nullable=True)) op.create_foreign_key(None, 'entries', 'users', ['user_id'], ['id'], ondelete='cascade') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'entries', type_='foreignkey') op.drop_column('entries', 'user_id') # ### end Alembic commands ###
from alembic import op import sqlalchemy as sa
solution.go
package main import ( "fmt" ) /* 思路一: 遍历haystack,当当前字母与needle中的第一个字母匹配时,比较后续字母,完全相等时返回当前遍历进度的index,否则继续遍历,匹配失败则返回-1 func strStr1(haystack string, needle string) int { // faster 100% less 65.52% if len(needle) == 0 { return 0 } else { if len(haystack) == 0 { return -1 } } left, index := 0, 0 for { if left == len(haystack) || left+index == len(haystack) { return -1 } if haystack[left+index] == needle[index] { if index == len(needle)-1 { return left } index++ } else { index = 0 left++ } } } */ /* 思路二: 比思路一更简便,当第一个字母匹配时,直接截取haystack[index:index+len(needle)]与needle比较,无需逐个字母比较 */ func strStr(haystack string, needle string) int { // faster 100% less 65.52% hlen, nlen := len(haystack), len(needle) // 当hlen等于nlen的时候,需要i == 0 for i := 0; i <= hlen-nlen; i++ { if haystack[i:i+nlen] == needle { return
i } } return -1 } func main() { haystack := "mississippi" needle := "issipi" result := strStr(haystack, needle) fmt.Println(result) }
ast.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // The Rust abstract syntax tree. pub use self::TyParamBound::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; pub use self::PathParameters::*; pub use symbol::{Ident, Symbol as Name}; pub use util::ThinVec; use syntax_pos::{Span, DUMMY_SP}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::{Mark, SyntaxContext}; use print::pprust; use ptr::P; use rustc_data_structures::indexed_vec; use symbol::{Symbol, keywords}; use tokenstream::{ThinTokenStream, TokenStream}; use serialize::{self, Encoder, Decoder}; use std::collections::HashSet; use std::fmt; use std::rc::Rc; use std::u32; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, pub span: Span, pub ident: Ident, } impl fmt::Debug for Lifetime { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "lifetime({}: {})", self.id, pprust::lifetime_to_string(self)) } } /// A lifetime definition, e.g. `'a: 'b+'c+'d` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct LifetimeDef { pub attrs: ThinVec<Attribute>, pub lifetime: Lifetime, pub bounds: Vec<Lifetime> } /// A "Path" is essentially Rust's notion of a name. /// /// It's represented as a sequence of identifiers, /// along with a bunch of supporting information. /// /// E.g. `std::cmp::PartialEq` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Path { pub span: Span, /// The segments in the path: the things separated by `::`. /// Global paths begin with `keywords::CrateRoot`. pub segments: Vec<PathSegment>, } impl<'a> PartialEq<&'a str> for Path { fn eq(&self, string: &&'a str) -> bool { self.segments.len() == 1 && self.segments[0].identifier.name == *string } } impl fmt::Debug for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "path({})", pprust::path_to_string(self)) } } impl fmt::Display for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", pprust::path_to_string(self)) } } impl Path { // convert a span and an identifier to the corresponding // 1-segment path pub fn from_ident(s: Span, identifier: Ident) -> Path { Path { span: s, segments: vec![PathSegment::from_ident(identifier, s)], } } pub fn default_to_global(mut self) -> Path { if !self.is_global() && !::parse::token::Ident(self.segments[0].identifier).is_path_segment_keyword() { self.segments.insert(0, PathSegment::crate_root(self.span)); } self } pub fn is_global(&self) -> bool { !self.segments.is_empty() && self.segments[0].identifier.name == keywords::CrateRoot.name() } } /// A segment of a path: an identifier, an optional lifetime, and a set of types. /// /// E.g. `std`, `String` or `Box<T>` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct PathSegment { /// The identifier portion of this path segment. pub identifier: Ident, /// Span of the segment identifier. pub span: Span, /// Type/lifetime parameters attached to this path. They come in /// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that /// this is more than just simple syntactic sugar; the use of /// parens affects the region binding rules, so we preserve the /// distinction. /// The `Option<P<..>>` wrapper is purely a size optimization; /// `None` is used to represent both `Path` and `Path<>`. pub parameters: Option<P<PathParameters>>, } impl PathSegment { pub fn from_ident(ident: Ident, span: Span) -> Self { PathSegment { identifier: ident, span: span, parameters: None } } pub fn crate_root(span: Span) -> Self { PathSegment { identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() }, span: span, parameters: None, } } } /// Parameters of a path segment. /// /// E.g. `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum PathParameters { /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>` AngleBracketed(AngleBracketedParameterData), /// The `(A,B)` and `C` in `Foo(A,B) -> C` Parenthesized(ParenthesizedParameterData), } /// A path like `Foo<'a, T>` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Default)] pub struct AngleBracketedParameterData { /// The lifetime parameters for this path segment. pub lifetimes: Vec<Lifetime>, /// The type parameters for this path segment, if present. pub types: Vec<P<Ty>>, /// Bindings (equality constraints) on associated types, if present. /// /// E.g., `Foo<A=Bar>`. pub bindings: Vec<TypeBinding>, } impl Into<Option<P<PathParameters>>> for AngleBracketedParameterData { fn into(self) -> Option<P<PathParameters>> { let empty = self.lifetimes.is_empty() && self.types.is_empty() && self.bindings.is_empty(); if empty { None } else { Some(P(PathParameters::AngleBracketed(self))) } } } /// A path like `Foo(A,B) -> C` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ParenthesizedParameterData { /// Overall span pub span: Span, /// `(A,B)` pub inputs: Vec<P<Ty>>, /// `C` pub output: Option<P<Ty>>, } #[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Debug)] pub struct NodeId(u32); impl NodeId { pub fn new(x: usize) -> NodeId { assert!(x < (u32::MAX as usize)); NodeId(x as u32) } pub fn from_u32(x: u32) -> NodeId { NodeId(x) } pub fn as_usize(&self) -> usize { self.0 as usize } pub fn as_u32(&self) -> u32 { self.0 } pub fn placeholder_from_mark(mark: Mark) -> Self { NodeId(mark.as_u32()) } pub fn placeholder_to_mark(self) -> Mark { Mark::from_u32(self.0) } } impl fmt::Display for NodeId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
} impl serialize::UseSpecializedEncodable for NodeId { fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { s.emit_u32(self.0) } } impl serialize::UseSpecializedDecodable for NodeId { fn default_decode<D: Decoder>(d: &mut D) -> Result<NodeId, D::Error> { d.read_u32().map(NodeId) } } impl indexed_vec::Idx for NodeId { fn new(idx: usize) -> Self { NodeId::new(idx) } fn index(self) -> usize { self.as_usize() } } /// Node id used to represent the root of the crate. pub const CRATE_NODE_ID: NodeId = NodeId(0); /// When parsing and doing expansions, we initially give all AST nodes this AST /// node value. Then later, in the renumber pass, we renumber them to have /// small, positive ids. pub const DUMMY_NODE_ID: NodeId = NodeId(!0); /// The AST represents all type param bounds as types. /// typeck::collect::compute_bounds matches these against /// the "special" built-in traits (see middle::lang_items) and /// detects Copy, Send and Sync. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TyParamBound { TraitTyParamBound(PolyTraitRef, TraitBoundModifier), RegionTyParamBound(Lifetime) } /// A modifier on a bound, currently this is only used for `?Sized`, where the /// modifier is `Maybe`. Negative bounds should also be handled here. #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TraitBoundModifier { None, Maybe, } pub type TyParamBounds = Vec<TyParamBound>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TyParam { pub attrs: ThinVec<Attribute>, pub ident: Ident, pub id: NodeId, pub bounds: TyParamBounds, pub default: Option<P<Ty>>, pub span: Span, } /// Represents lifetimes and type parameters attached to a declaration /// of a function, enum, trait, etc. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Generics { pub lifetimes: Vec<LifetimeDef>, pub ty_params: Vec<TyParam>, pub where_clause: WhereClause, pub span: Span, } impl Generics { pub fn is_lt_parameterized(&self) -> bool { !self.lifetimes.is_empty() } pub fn is_type_parameterized(&self) -> bool { !self.ty_params.is_empty() } pub fn is_parameterized(&self) -> bool { self.is_lt_parameterized() || self.is_type_parameterized() } pub fn span_for_name(&self, name: &str) -> Option<Span> { for t in &self.ty_params { if t.ident.name == name { return Some(t.span); } } None } } impl Default for Generics { /// Creates an instance of `Generics`. fn default() -> Generics { Generics { lifetimes: Vec::new(), ty_params: Vec::new(), where_clause: WhereClause { id: DUMMY_NODE_ID, predicates: Vec::new(), }, span: DUMMY_SP, } } } /// A `where` clause in a definition #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereClause { pub id: NodeId, pub predicates: Vec<WherePredicate>, } /// A single predicate in a `where` clause #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum WherePredicate { /// A type binding, e.g. `for<'c> Foo: Send+Clone+'c` BoundPredicate(WhereBoundPredicate), /// A lifetime predicate, e.g. `'a: 'b+'c` RegionPredicate(WhereRegionPredicate), /// An equality predicate (unsupported) EqPredicate(WhereEqPredicate), } /// A type bound. /// /// E.g. `for<'c> Foo: Send+Clone+'c` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereBoundPredicate { pub span: Span, /// Any lifetimes from a `for` binding pub bound_lifetimes: Vec<LifetimeDef>, /// The type being bounded pub bounded_ty: P<Ty>, /// Trait and lifetime bounds (`Clone+Send+'static`) pub bounds: TyParamBounds, } /// A lifetime predicate. /// /// E.g. `'a: 'b+'c` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereRegionPredicate { pub span: Span, pub lifetime: Lifetime, pub bounds: Vec<Lifetime>, } /// An equality predicate (unsupported). /// /// E.g. `T=int` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereEqPredicate { pub id: NodeId, pub span: Span, pub lhs_ty: P<Ty>, pub rhs_ty: P<Ty>, } /// The set of MetaItems that define the compilation environment of the crate, /// used to drive conditional compilation pub type CrateConfig = HashSet<(Name, Option<Symbol>)>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Crate { pub module: Mod, pub attrs: Vec<Attribute>, pub span: Span, } /// A spanned compile-time attribute list item. pub type NestedMetaItem = Spanned<NestedMetaItemKind>; /// Possible values inside of compile-time attribute lists. /// /// E.g. the '..' in `#[name(..)]`. #[derive(Clone, Eq, RustcEncodable, RustcDecodable, Hash, Debug, PartialEq)] pub enum NestedMetaItemKind { /// A full MetaItem, for recursive meta items. MetaItem(MetaItem), /// A literal. /// /// E.g. "foo", 64, true Literal(Lit), } /// A spanned compile-time attribute item. /// /// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MetaItem { pub name: Name, pub node: MetaItemKind, pub span: Span, } /// A compile-time attribute item. /// /// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum MetaItemKind { /// Word meta item. /// /// E.g. `test` as in `#[test]` Word, /// List meta item. /// /// E.g. `derive(..)` as in `#[derive(..)]` List(Vec<NestedMetaItem>), /// Name value meta item. /// /// E.g. `feature = "foo"` as in `#[feature = "foo"]` NameValue(Lit) } /// A Block (`{ .. }`). /// /// E.g. `{ .. }` as in `fn foo() { .. }` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Block { /// Statements in a block pub stmts: Vec<Stmt>, pub id: NodeId, /// Distinguishes between `unsafe { ... }` and `{ ... }` pub rules: BlockCheckMode, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Pat { pub id: NodeId, pub node: PatKind, pub span: Span, } impl fmt::Debug for Pat { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self)) } } impl Pat { pub fn walk<F>(&self, it: &mut F) -> bool where F: FnMut(&Pat) -> bool { if !it(self) { return false; } match self.node { PatKind::Ident(_, _, Some(ref p)) => p.walk(it), PatKind::Struct(_, ref fields, _) => { fields.iter().all(|field| field.node.pat.walk(it)) } PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => { s.iter().all(|p| p.walk(it)) } PatKind::Box(ref s) | PatKind::Ref(ref s, _) => { s.walk(it) } PatKind::Slice(ref before, ref slice, ref after) => { before.iter().all(|p| p.walk(it)) && slice.iter().all(|p| p.walk(it)) && after.iter().all(|p| p.walk(it)) } PatKind::Wild | PatKind::Lit(_) | PatKind::Range(..) | PatKind::Ident(..) | PatKind::Path(..) | PatKind::Mac(_) => { true } } } } /// A single field in a struct pattern /// /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` /// are treated the same as` x: x, y: ref y, z: ref mut z`, /// except is_shorthand is true #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct FieldPat { /// The identifier for the field pub ident: Ident, /// The pattern the field is destructured to pub pat: P<Pat>, pub is_shorthand: bool, pub attrs: ThinVec<Attribute>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BindingMode { ByRef(Mutability), ByValue(Mutability), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum RangeEnd { Included, Excluded, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum PatKind { /// Represents a wildcard pattern (`_`) Wild, /// A `PatKind::Ident` may either be a new bound variable (`ref mut binding @ OPT_SUBPATTERN`), /// or a unit struct/variant pattern, or a const pattern (in the last two cases the third /// field must be `None`). Disambiguation cannot be done with parser alone, so it happens /// during name resolution. Ident(BindingMode, SpannedIdent, Option<P<Pat>>), /// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`. /// The `bool` is `true` in the presence of a `..`. Struct(Path, Vec<Spanned<FieldPat>>, bool), /// A tuple struct/variant pattern `Variant(x, y, .., z)`. /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position. /// 0 <= position <= subpats.len() TupleStruct(Path, Vec<P<Pat>>, Option<usize>), /// A possibly qualified path pattern. /// Unquailfied path patterns `A::B::C` can legally refer to variants, structs, constants /// or associated constants. Quailfied path patterns `<A>::B::C`/`<A as Trait>::B::C` can /// only legally refer to associated constants. Path(Option<QSelf>, Path), /// A tuple pattern `(a, b)`. /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position. /// 0 <= position <= subpats.len() Tuple(Vec<P<Pat>>, Option<usize>), /// A `box` pattern Box(P<Pat>), /// A reference pattern, e.g. `&mut (a, b)` Ref(P<Pat>, Mutability), /// A literal Lit(P<Expr>), /// A range pattern, e.g. `1...2` or `1..2` Range(P<Expr>, P<Expr>, RangeEnd), /// `[a, b, ..i, y, z]` is represented as: /// `PatKind::Slice(box [a, b], Some(i), box [y, z])` Slice(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>), /// A macro pattern; pre-expansion Mac(Mac), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum Mutability { Mutable, Immutable, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BinOpKind { /// The `+` operator (addition) Add, /// The `-` operator (subtraction) Sub, /// The `*` operator (multiplication) Mul, /// The `/` operator (division) Div, /// The `%` operator (modulus) Rem, /// The `&&` operator (logical and) And, /// The `||` operator (logical or) Or, /// The `^` operator (bitwise xor) BitXor, /// The `&` operator (bitwise and) BitAnd, /// The `|` operator (bitwise or) BitOr, /// The `<<` operator (shift left) Shl, /// The `>>` operator (shift right) Shr, /// The `==` operator (equality) Eq, /// The `<` operator (less than) Lt, /// The `<=` operator (less than or equal to) Le, /// The `!=` operator (not equal to) Ne, /// The `>=` operator (greater than or equal to) Ge, /// The `>` operator (greater than) Gt, } impl BinOpKind { pub fn to_string(&self) -> &'static str { use self::BinOpKind::*; match *self { Add => "+", Sub => "-", Mul => "*", Div => "/", Rem => "%", And => "&&", Or => "||", BitXor => "^", BitAnd => "&", BitOr => "|", Shl => "<<", Shr => ">>", Eq => "==", Lt => "<", Le => "<=", Ne => "!=", Ge => ">=", Gt => ">", } } pub fn lazy(&self) -> bool { match *self { BinOpKind::And | BinOpKind::Or => true, _ => false } } pub fn is_shift(&self) -> bool { match *self { BinOpKind::Shl | BinOpKind::Shr => true, _ => false } } pub fn is_comparison(&self) -> bool { use self::BinOpKind::*; match *self { Eq | Lt | Le | Ne | Gt | Ge => true, And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false, } } /// Returns `true` if the binary operator takes its arguments by value pub fn is_by_value(&self) -> bool { !self.is_comparison() } } pub type BinOp = Spanned<BinOpKind>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum UnOp { /// The `*` operator for dereferencing Deref, /// The `!` operator for logical inversion Not, /// The `-` operator for negation Neg, } impl UnOp { /// Returns `true` if the unary operator takes its argument by value pub fn is_by_value(u: UnOp) -> bool { match u { UnOp::Neg | UnOp::Not => true, _ => false, } } pub fn to_string(op: UnOp) -> &'static str { match op { UnOp::Deref => "*", UnOp::Not => "!", UnOp::Neg => "-", } } } /// A statement #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Stmt { pub id: NodeId, pub node: StmtKind, pub span: Span, } impl Stmt { pub fn add_trailing_semicolon(mut self) -> Self { self.node = match self.node { StmtKind::Expr(expr) => StmtKind::Semi(expr), StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| { (mac, MacStmtStyle::Semicolon, attrs) })), node => node, }; self } } impl fmt::Debug for Stmt { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "stmt({}: {})", self.id.to_string(), pprust::stmt_to_string(self)) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum StmtKind { /// A local (let) binding. Local(P<Local>), /// An item definition. Item(P<Item>), /// Expr without trailing semi-colon. Expr(P<Expr>), Semi(P<Expr>), Mac(P<(Mac, MacStmtStyle, ThinVec<Attribute>)>), } #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum MacStmtStyle { /// The macro statement had a trailing semicolon, e.g. `foo! { ... };` /// `foo!(...);`, `foo![...];` Semicolon, /// The macro statement had braces; e.g. foo! { ... } Braces, /// The macro statement had parentheses or brackets and no semicolon; e.g. /// `foo!(...)`. All of these will end up being converted into macro /// expressions. NoBraces, } // FIXME (pending discussion of #1697, #2178...): local should really be // a refinement on pat. /// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Local { pub pat: P<Pat>, pub ty: Option<P<Ty>>, /// Initializer expression to set the value, if any pub init: Option<P<Expr>>, pub id: NodeId, pub span: Span, pub attrs: ThinVec<Attribute>, } /// An arm of a 'match'. /// /// E.g. `0...10 => { println!("match!") }` as in /// /// ``` /// match 123 { /// 0...10 => { println!("match!") }, /// _ => { println!("no match!") }, /// } /// ``` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Arm { pub attrs: Vec<Attribute>, pub pats: Vec<P<Pat>>, pub guard: Option<P<Expr>>, pub body: P<Expr>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Field { pub ident: SpannedIdent, pub expr: P<Expr>, pub span: Span, pub is_shorthand: bool, pub attrs: ThinVec<Attribute>, } pub type SpannedIdent = Spanned<Ident>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum BlockCheckMode { Default, Unsafe(UnsafeSource), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum UnsafeSource { CompilerGenerated, UserProvided, } /// An expression #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash,)] pub struct Expr { pub id: NodeId, pub node: ExprKind, pub span: Span, pub attrs: ThinVec<Attribute> } impl fmt::Debug for Expr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self)) } } /// Limit types of a range (inclusive or exclusive) #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum RangeLimits { /// Inclusive at the beginning, exclusive at the end HalfOpen, /// Inclusive at the beginning and end Closed, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ExprKind { /// A `box x` expression. Box(P<Expr>), /// First expr is the place; second expr is the value. InPlace(P<Expr>, P<Expr>), /// An array (`[a, b, c, d]`) Array(Vec<P<Expr>>), /// A function call /// /// The first field resolves to the function itself, /// and the second field is the list of arguments Call(P<Expr>, Vec<P<Expr>>), /// A method call (`x.foo::<'static, Bar, Baz>(a, b, c, d)`) /// /// The `PathSegment` represents the method name and its generic arguments /// (within the angle brackets). /// The first element of the vector of `Expr`s is the expression that evaluates /// to the object on which the method is being called on (the receiver), /// and the remaining elements are the rest of the arguments. /// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as /// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`. MethodCall(PathSegment, Vec<P<Expr>>), /// A tuple (`(a, b, c ,d)`) Tup(Vec<P<Expr>>), /// A binary operation (For example: `a + b`, `a * b`) Binary(BinOp, P<Expr>, P<Expr>), /// A unary operation (For example: `!x`, `*x`) Unary(UnOp, P<Expr>), /// A literal (For example: `1`, `"foo"`) Lit(P<Lit>), /// A cast (`foo as f64`) Cast(P<Expr>, P<Ty>), Type(P<Expr>, P<Ty>), /// An `if` block, with an optional else block /// /// `if expr { block } else { expr }` If(P<Expr>, P<Block>, Option<P<Expr>>), /// An `if let` expression with an optional else block /// /// `if let pat = expr { block } else { expr }` /// /// This is desugared to a `match` expression. IfLet(P<Pat>, P<Expr>, P<Block>, Option<P<Expr>>), /// A while loop, with an optional label /// /// `'label: while expr { block }` While(P<Expr>, P<Block>, Option<SpannedIdent>), /// A while-let loop, with an optional label /// /// `'label: while let pat = expr { block }` /// /// This is desugared to a combination of `loop` and `match` expressions. WhileLet(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>), /// A for loop, with an optional label /// /// `'label: for pat in expr { block }` /// /// This is desugared to a combination of `loop` and `match` expressions. ForLoop(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>), /// Conditionless loop (can be exited with break, continue, or return) /// /// `'label: loop { block }` Loop(P<Block>, Option<SpannedIdent>), /// A `match` block. Match(P<Expr>, Vec<Arm>), /// A closure (for example, `move |a, b, c| a + b + c`) /// /// The final span is the span of the argument block `|...|` Closure(CaptureBy, P<FnDecl>, P<Expr>, Span), /// A block (`{ ... }`) Block(P<Block>), /// A catch block (`catch { ... }`) Catch(P<Block>), /// An assignment (`a = foo()`) Assign(P<Expr>, P<Expr>), /// An assignment with an operator /// /// For example, `a += 1`. AssignOp(BinOp, P<Expr>, P<Expr>), /// Access of a named struct field (`obj.foo`) Field(P<Expr>, SpannedIdent), /// Access of an unnamed field of a struct or tuple-struct /// /// For example, `foo.0`. TupField(P<Expr>, Spanned<usize>), /// An indexing operation (`foo[2]`) Index(P<Expr>, P<Expr>), /// A range (`1..2`, `1..`, `..2`, `1...2`, `1...`, `...2`) Range(Option<P<Expr>>, Option<P<Expr>>, RangeLimits), /// Variable reference, possibly containing `::` and/or type /// parameters, e.g. foo::bar::<baz>. /// /// Optionally "qualified", /// E.g. `<Vec<T> as SomeTrait>::SomeType`. Path(Option<QSelf>, Path), /// A referencing operation (`&a` or `&mut a`) AddrOf(Mutability, P<Expr>), /// A `break`, with an optional label to break, and an optional expression Break(Option<SpannedIdent>, Option<P<Expr>>), /// A `continue`, with an optional label Continue(Option<SpannedIdent>), /// A `return`, with an optional value to be returned Ret(Option<P<Expr>>), /// Output of the `asm!()` macro InlineAsm(P<InlineAsm>), /// A macro invocation; pre-expansion Mac(Mac), /// A struct literal expression. /// /// For example, `Foo {x: 1, y: 2}`, or /// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`. Struct(Path, Vec<Field>, Option<P<Expr>>), /// An array literal constructed from one repeated element. /// /// For example, `[1; 5]`. The first expression is the element /// to be repeated; the second is the number of times to repeat it. Repeat(P<Expr>, P<Expr>), /// No-op: used solely so we can pretty-print faithfully Paren(P<Expr>), /// `expr?` Try(P<Expr>), } /// The explicit Self type in a "qualified path". The actual /// path, including the trait and the associated item, is stored /// separately. `position` represents the index of the associated /// item qualified with this Self type. /// /// ```ignore (only-for-syntax-highlight) /// <Vec<T> as a::b::Trait>::AssociatedItem /// ^~~~~ ~~~~~~~~~~~~~~^ /// ty position = 3 /// /// <Vec<T>>::AssociatedItem /// ^~~~~ ^ /// ty position = 0 /// ``` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct QSelf { pub ty: P<Ty>, pub position: usize } /// A capture clause #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum CaptureBy { Value, Ref, } pub type Mac = Spanned<Mac_>; /// Represents a macro invocation. The Path indicates which macro /// is being invoked, and the vector of token-trees contains the source /// of the macro invocation. /// /// NB: the additional ident for a macro_rules-style macro is actually /// stored in the enclosing item. Oog. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Mac_ { pub path: Path, pub tts: ThinTokenStream, } impl Mac_ { pub fn stream(&self) -> TokenStream { self.tts.clone().into() } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MacroDef { pub tokens: ThinTokenStream, pub legacy: bool, } impl MacroDef { pub fn stream(&self) -> TokenStream { self.tokens.clone().into() } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum StrStyle { /// A regular string, like `"foo"` Cooked, /// A raw string, like `r##"foo"##` /// /// The uint is the number of `#` symbols used Raw(usize) } /// A literal pub type Lit = Spanned<LitKind>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum LitIntType { Signed(IntTy), Unsigned(UintTy), Unsuffixed, } /// Literal kind. /// /// E.g. `"foo"`, `42`, `12.34` or `bool` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum LitKind { /// A string literal (`"foo"`) Str(Symbol, StrStyle), /// A byte string (`b"foo"`) ByteStr(Rc<Vec<u8>>), /// A byte char (`b'f'`) Byte(u8), /// A character literal (`'a'`) Char(char), /// An integer literal (`1`) Int(u128, LitIntType), /// A float literal (`1f64` or `1E10f64`) Float(Symbol, FloatTy), /// A float literal without a suffix (`1.0 or 1.0E10`) FloatUnsuffixed(Symbol), /// A boolean literal Bool(bool), } impl LitKind { /// Returns true if this literal is a string and false otherwise. pub fn is_str(&self) -> bool { match *self { LitKind::Str(..) => true, _ => false, } } /// Returns true if this literal has no suffix. Note: this will return true /// for literals with prefixes such as raw strings and byte strings. pub fn is_unsuffixed(&self) -> bool { match *self { // unsuffixed variants LitKind::Str(..) | LitKind::ByteStr(..) | LitKind::Byte(..) | LitKind::Char(..) | LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::FloatUnsuffixed(..) | LitKind::Bool(..) => true, // suffixed variants LitKind::Int(_, LitIntType::Signed(..)) | LitKind::Int(_, LitIntType::Unsigned(..)) | LitKind::Float(..) => false, } } /// Returns true if this literal has a suffix. pub fn is_suffixed(&self) -> bool { !self.is_unsuffixed() } } // NB: If you change this, you'll probably want to change the corresponding // type structure in middle/ty.rs as well. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MutTy { pub ty: P<Ty>, pub mutbl: Mutability, } /// Represents a method's signature in a trait declaration, /// or in an implementation. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct MethodSig { pub unsafety: Unsafety, pub constness: Spanned<Constness>, pub abi: Abi, pub decl: P<FnDecl>, pub generics: Generics, } /// Represents an item declaration within a trait declaration, /// possibly including a default implementation. A trait item is /// either required (meaning it doesn't have an implementation, just a /// signature) or provided (meaning it has a default implementation). #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TraitItem { pub id: NodeId, pub ident: Ident, pub attrs: Vec<Attribute>, pub node: TraitItemKind, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TraitItemKind { Const(P<Ty>, Option<P<Expr>>), Method(MethodSig, Option<P<Block>>), Type(TyParamBounds, Option<P<Ty>>), Macro(Mac), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ImplItem { pub id: NodeId, pub ident: Ident, pub vis: Visibility, pub defaultness: Defaultness, pub attrs: Vec<Attribute>, pub node: ImplItemKind, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ImplItemKind { Const(P<Ty>, P<Expr>), Method(MethodSig, P<Block>), Type(P<Ty>), Macro(Mac), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum IntTy { Is, I8, I16, I32, I64, I128, } impl fmt::Debug for IntTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for IntTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.ty_to_string()) } } impl IntTy { pub fn ty_to_string(&self) -> &'static str { match *self { IntTy::Is => "isize", IntTy::I8 => "i8", IntTy::I16 => "i16", IntTy::I32 => "i32", IntTy::I64 => "i64", IntTy::I128 => "i128", } } pub fn val_to_string(&self, val: i128) -> String { // cast to a u128 so we can correctly print INT128_MIN. All integral types // are parsed as u128, so we wouldn't want to print an extra negative // sign. format!("{}{}", val as u128, self.ty_to_string()) } pub fn bit_width(&self) -> Option<usize> { Some(match *self { IntTy::Is => return None, IntTy::I8 => 8, IntTy::I16 => 16, IntTy::I32 => 32, IntTy::I64 => 64, IntTy::I128 => 128, }) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum UintTy { Us, U8, U16, U32, U64, U128, } impl UintTy { pub fn ty_to_string(&self) -> &'static str { match *self { UintTy::Us => "usize", UintTy::U8 => "u8", UintTy::U16 => "u16", UintTy::U32 => "u32", UintTy::U64 => "u64", UintTy::U128 => "u128", } } pub fn val_to_string(&self, val: u128) -> String { format!("{}{}", val, self.ty_to_string()) } pub fn bit_width(&self) -> Option<usize> { Some(match *self { UintTy::Us => return None, UintTy::U8 => 8, UintTy::U16 => 16, UintTy::U32 => 32, UintTy::U64 => 64, UintTy::U128 => 128, }) } } impl fmt::Debug for UintTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for UintTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.ty_to_string()) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub enum FloatTy { F32, F64, } impl fmt::Debug for FloatTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(self, f) } } impl fmt::Display for FloatTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.ty_to_string()) } } impl FloatTy { pub fn ty_to_string(&self) -> &'static str { match *self { FloatTy::F32 => "f32", FloatTy::F64 => "f64", } } pub fn bit_width(&self) -> usize { match *self { FloatTy::F32 => 32, FloatTy::F64 => 64, } } } // Bind a type to an associated type: `A=Foo`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TypeBinding { pub id: NodeId, pub ident: Ident, pub ty: P<Ty>, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Ty { pub id: NodeId, pub node: TyKind, pub span: Span, } impl fmt::Debug for Ty { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "type({})", pprust::ty_to_string(self)) } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct BareFnTy { pub unsafety: Unsafety, pub abi: Abi, pub lifetimes: Vec<LifetimeDef>, pub decl: P<FnDecl> } /// The different kinds of types recognized by the compiler #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TyKind { /// A variable-length slice (`[T]`) Slice(P<Ty>), /// A fixed length array (`[T; n]`) Array(P<Ty>, P<Expr>), /// A raw pointer (`*const T` or `*mut T`) Ptr(MutTy), /// A reference (`&'a T` or `&'a mut T`) Rptr(Option<Lifetime>, MutTy), /// A bare function (e.g. `fn(usize) -> bool`) BareFn(P<BareFnTy>), /// The never type (`!`) Never, /// A tuple (`(A, B, C, D,...)`) Tup(Vec<P<Ty>> ), /// A path (`module::module::...::Type`), optionally /// "qualified", e.g. `<Vec<T> as SomeTrait>::SomeType`. /// /// Type parameters are stored in the Path itself Path(Option<QSelf>, Path), /// A trait object type `Bound1 + Bound2 + Bound3` /// where `Bound` is a trait or a lifetime. TraitObject(TyParamBounds), /// An `impl Bound1 + Bound2 + Bound3` type /// where `Bound` is a trait or a lifetime. ImplTrait(TyParamBounds), /// No-op; kept solely so that we can pretty-print faithfully Paren(P<Ty>), /// Unused for now Typeof(P<Expr>), /// TyKind::Infer means the type should be inferred instead of it having been /// specified. This can appear anywhere in a type. Infer, /// Inferred type of a `self` or `&self` argument in a method. ImplicitSelf, // A macro in the type position. Mac(Mac), /// Placeholder for a kind that has failed to be defined. Err, } /// Inline assembly dialect. /// /// E.g. `"intel"` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")`` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum AsmDialect { Att, Intel, } /// Inline assembly. /// /// E.g. `"={eax}"(result)` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")`` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsmOutput { pub constraint: Symbol, pub expr: P<Expr>, pub is_rw: bool, pub is_indirect: bool, } /// Inline assembly. /// /// E.g. `asm!("NOP");` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsm { pub asm: Symbol, pub asm_str_style: StrStyle, pub outputs: Vec<InlineAsmOutput>, pub inputs: Vec<(Symbol, P<Expr>)>, pub clobbers: Vec<Symbol>, pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, pub ctxt: SyntaxContext, } /// An argument in a function header. /// /// E.g. `bar: usize` as in `fn foo(bar: usize)` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Arg { pub ty: P<Ty>, pub pat: P<Pat>, pub id: NodeId, } /// Alternative representation for `Arg`s describing `self` parameter of methods. /// /// E.g. `&mut self` as in `fn foo(&mut self)` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum SelfKind { /// `self`, `mut self` Value(Mutability), /// `&'lt self`, `&'lt mut self` Region(Option<Lifetime>, Mutability), /// `self: TYPE`, `mut self: TYPE` Explicit(P<Ty>, Mutability), } pub type ExplicitSelf = Spanned<SelfKind>; impl Arg { pub fn to_self(&self) -> Option<ExplicitSelf> { if let PatKind::Ident(BindingMode::ByValue(mutbl), ident, _) = self.pat.node { if ident.node.name == keywords::SelfValue.name() { return match self.ty.node { TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))), TyKind::Rptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyKind::ImplicitSelf => { Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) } _ => Some(respan(self.pat.span.to(self.ty.span), SelfKind::Explicit(self.ty.clone(), mutbl))), } } } None } pub fn is_self(&self) -> bool { if let PatKind::Ident(_, ident, _) = self.pat.node { ident.node.name == keywords::SelfValue.name() } else { false } } pub fn from_self(eself: ExplicitSelf, eself_ident: SpannedIdent) -> Arg { let span = eself.span.to(eself_ident.span); let infer_ty = P(Ty { id: DUMMY_NODE_ID, node: TyKind::ImplicitSelf, span: span, }); let arg = |mutbl, ty| Arg { pat: P(Pat { id: DUMMY_NODE_ID, node: PatKind::Ident(BindingMode::ByValue(mutbl), eself_ident, None), span: span, }), ty: ty, id: DUMMY_NODE_ID, }; match eself.node { SelfKind::Explicit(ty, mutbl) => arg(mutbl, ty), SelfKind::Value(mutbl) => arg(mutbl, infer_ty), SelfKind::Region(lt, mutbl) => arg(Mutability::Immutable, P(Ty { id: DUMMY_NODE_ID, node: TyKind::Rptr(lt, MutTy { ty: infer_ty, mutbl: mutbl }), span: span, })), } } } /// Header (not the body) of a function declaration. /// /// E.g. `fn foo(bar: baz)` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct FnDecl { pub inputs: Vec<Arg>, pub output: FunctionRetTy, pub variadic: bool } impl FnDecl { pub fn get_self(&self) -> Option<ExplicitSelf> { self.inputs.get(0).and_then(Arg::to_self) } pub fn has_self(&self) -> bool { self.inputs.get(0).map(Arg::is_self).unwrap_or(false) } } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Unsafety { Unsafe, Normal, } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Constness { Const, NotConst, } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Defaultness { Default, Final, } impl fmt::Display for Unsafety { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(match *self { Unsafety::Normal => "normal", Unsafety::Unsafe => "unsafe", }, f) } } #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum ImplPolarity { /// `impl Trait for Type` Positive, /// `impl !Trait for Type` Negative, } impl fmt::Debug for ImplPolarity { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ImplPolarity::Positive => "positive".fmt(f), ImplPolarity::Negative => "negative".fmt(f), } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum FunctionRetTy { /// Return type is not specified. /// /// Functions default to `()` and /// closures default to inference. Span points to where return /// type would be inserted. Default(Span), /// Everything else Ty(P<Ty>), } impl FunctionRetTy { pub fn span(&self) -> Span { match *self { FunctionRetTy::Default(span) => span, FunctionRetTy::Ty(ref ty) => ty.span, } } } /// Module declaration. /// /// E.g. `mod foo;` or `mod foo { .. }` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Mod { /// A span from the first token past `{` to the last token until `}`. /// For `mod foo;`, the inner span ranges from the first token /// to the last token in the external file. pub inner: Span, pub items: Vec<P<Item>>, } /// Foreign module declaration. /// /// E.g. `extern { .. }` or `extern C { .. }` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ForeignMod { pub abi: Abi, pub items: Vec<ForeignItem>, } /// Global inline assembly /// /// aka module-level assembly or file-scoped assembly #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct GlobalAsm { pub asm: Symbol, pub ctxt: SyntaxContext, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct EnumDef { pub variants: Vec<Variant>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Variant_ { pub name: Ident, pub attrs: Vec<Attribute>, pub data: VariantData, /// Explicit discriminant, e.g. `Foo = 1` pub disr_expr: Option<P<Expr>>, } pub type Variant = Spanned<Variant_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct PathListItem_ { pub name: Ident, /// renamed in list, e.g. `use foo::{bar as baz};` pub rename: Option<Ident>, pub id: NodeId, } pub type PathListItem = Spanned<PathListItem_>; pub type ViewPath = Spanned<ViewPath_>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ViewPath_ { /// `foo::bar::baz as quux` /// /// or just /// /// `foo::bar::baz` (with `as baz` implicitly on the right) ViewPathSimple(Ident, Path), /// `foo::bar::*` ViewPathGlob(Path), /// `foo::bar::{a,b,c}` ViewPathList(Path, Vec<PathListItem>) } impl ViewPath_ { pub fn path(&self) -> &Path { match *self { ViewPathSimple(_, ref path) | ViewPathGlob (ref path) | ViewPathList(ref path, _) => path } } } /// Distinguishes between Attributes that decorate items and Attributes that /// are contained as statements within items. These two cases need to be /// distinguished for pretty-printing. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum AttrStyle { Outer, Inner, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct AttrId(pub usize); /// Meta-data associated with an item /// Doc-comments are promoted to attributes that have is_sugared_doc = true #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Attribute { pub id: AttrId, pub style: AttrStyle, pub path: Path, pub tokens: TokenStream, pub is_sugared_doc: bool, pub span: Span, } /// TraitRef's appear in impls. /// /// resolve maps each TraitRef's ref_id to its defining trait; that's all /// that the ref_id is for. The impl_id maps to the "self type" of this impl. /// If this impl is an ItemKind::Impl, the impl_id is redundant (it could be the /// same as the impl's node id). #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TraitRef { pub path: Path, pub ref_id: NodeId, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct PolyTraitRef { /// The `'a` in `<'a> Foo<&'a T>` pub bound_lifetimes: Vec<LifetimeDef>, /// The `Foo<&'a T>` in `<'a> Foo<&'a T>` pub trait_ref: TraitRef, pub span: Span, } impl PolyTraitRef { pub fn new(lifetimes: Vec<LifetimeDef>, path: Path, span: Span) -> Self { PolyTraitRef { bound_lifetimes: lifetimes, trait_ref: TraitRef { path: path, ref_id: DUMMY_NODE_ID }, span: span, } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Visibility { Public, Crate(Span), Restricted { path: P<Path>, id: NodeId }, Inherited, } /// Field of a struct. /// /// E.g. `bar: usize` as in `struct Foo { bar: usize }` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct StructField { pub span: Span, pub ident: Option<Ident>, pub vis: Visibility, pub id: NodeId, pub ty: P<Ty>, pub attrs: Vec<Attribute>, } /// Fields and Ids of enum variants and structs /// /// For enum variants: `NodeId` represents both an Id of the variant itself (relevant for all /// variant kinds) and an Id of the variant's constructor (not relevant for `Struct`-variants). /// One shared Id can be successfully used for these two purposes. /// Id of the whole enum lives in `Item`. /// /// For structs: `NodeId` represents an Id of the structure's constructor, so it is not actually /// used for `Struct`-structs (but still presents). Structures don't have an analogue of "Id of /// the variant itself" from enum variants. /// Id of the whole struct lives in `Item`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum VariantData { /// Struct variant. /// /// E.g. `Bar { .. }` as in `enum Foo { Bar { .. } }` Struct(Vec<StructField>, NodeId), /// Tuple variant. /// /// E.g. `Bar(..)` as in `enum Foo { Bar(..) }` Tuple(Vec<StructField>, NodeId), /// Unit variant. /// /// E.g. `Bar = ..` as in `enum Foo { Bar = .. }` Unit(NodeId), } impl VariantData { pub fn fields(&self) -> &[StructField] { match *self { VariantData::Struct(ref fields, _) | VariantData::Tuple(ref fields, _) => fields, _ => &[], } } pub fn id(&self) -> NodeId { match *self { VariantData::Struct(_, id) | VariantData::Tuple(_, id) | VariantData::Unit(id) => id } } pub fn is_struct(&self) -> bool { if let VariantData::Struct(..) = *self { true } else { false } } pub fn is_tuple(&self) -> bool { if let VariantData::Tuple(..) = *self { true } else { false } } pub fn is_unit(&self) -> bool { if let VariantData::Unit(..) = *self { true } else { false } } } /// An item /// /// The name might be a dummy name in case of anonymous items #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Item { pub ident: Ident, pub attrs: Vec<Attribute>, pub id: NodeId, pub node: ItemKind, pub vis: Visibility, pub span: Span, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ItemKind { /// An`extern crate` item, with optional original crate name. /// /// E.g. `extern crate foo` or `extern crate foo_bar as foo` ExternCrate(Option<Name>), /// A use declaration (`use` or `pub use`) item. /// /// E.g. `use foo;`, `use foo::bar;` or `use foo::bar as FooBar;` Use(P<ViewPath>), /// A static item (`static` or `pub static`). /// /// E.g. `static FOO: i32 = 42;` or `static FOO: &'static str = "bar";` Static(P<Ty>, Mutability, P<Expr>), /// A constant item (`const` or `pub const`). /// /// E.g. `const FOO: i32 = 42;` Const(P<Ty>, P<Expr>), /// A function declaration (`fn` or `pub fn`). /// /// E.g. `fn foo(bar: usize) -> usize { .. }` Fn(P<FnDecl>, Unsafety, Spanned<Constness>, Abi, Generics, P<Block>), /// A module declaration (`mod` or `pub mod`). /// /// E.g. `mod foo;` or `mod foo { .. }` Mod(Mod), /// An external module (`extern` or `pub extern`). /// /// E.g. `extern {}` or `extern "C" {}` ForeignMod(ForeignMod), /// Module-level inline assembly (from `global_asm!()`) GlobalAsm(P<GlobalAsm>), /// A type alias (`type` or `pub type`). /// /// E.g. `type Foo = Bar<u8>;` Ty(P<Ty>, Generics), /// An enum definition (`enum` or `pub enum`). /// /// E.g. `enum Foo<A, B> { C<A>, D<B> }` Enum(EnumDef, Generics), /// A struct definition (`struct` or `pub struct`). /// /// E.g. `struct Foo<A> { x: A }` Struct(VariantData, Generics), /// A union definition (`union` or `pub union`). /// /// E.g. `union Foo<A, B> { x: A, y: B }` Union(VariantData, Generics), /// A Trait declaration (`trait` or `pub trait`). /// /// E.g. `trait Foo { .. }` or `trait Foo<T> { .. }` Trait(Unsafety, Generics, TyParamBounds, Vec<TraitItem>), // Default trait implementation. /// /// E.g. `impl Trait for .. {}` or `impl<T> Trait<T> for .. {}` DefaultImpl(Unsafety, TraitRef), /// An implementation. /// /// E.g. `impl<A> Foo<A> { .. }` or `impl<A> Trait for Foo<A> { .. }` Impl(Unsafety, ImplPolarity, Defaultness, Generics, Option<TraitRef>, // (optional) trait this impl implements P<Ty>, // self Vec<ImplItem>), /// A macro invocation. /// /// E.g. `macro_rules! foo { .. }` or `foo!(..)` Mac(Mac), /// A macro definition. MacroDef(MacroDef), } impl ItemKind { pub fn descriptive_variant(&self) -> &str { match *self { ItemKind::ExternCrate(..) => "extern crate", ItemKind::Use(..) => "use", ItemKind::Static(..) => "static item", ItemKind::Const(..) => "constant item", ItemKind::Fn(..) => "function", ItemKind::Mod(..) => "module", ItemKind::ForeignMod(..) => "foreign module", ItemKind::GlobalAsm(..) => "global asm", ItemKind::Ty(..) => "type alias", ItemKind::Enum(..) => "enum", ItemKind::Struct(..) => "struct", ItemKind::Union(..) => "union", ItemKind::Trait(..) => "trait", ItemKind::Mac(..) | ItemKind::MacroDef(..) | ItemKind::Impl(..) | ItemKind::DefaultImpl(..) => "item" } } } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct ForeignItem { pub ident: Ident, pub attrs: Vec<Attribute>, pub node: ForeignItemKind, pub id: NodeId, pub span: Span, pub vis: Visibility, } /// An item within an `extern` block #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum ForeignItemKind { /// A foreign function Fn(P<FnDecl>, Generics), /// A foreign static item (`static ext: u8`), with optional mutability /// (the boolean is true when mutable) Static(P<Ty>, bool), } impl ForeignItemKind { pub fn descriptive_variant(&self) -> &str { match *self { ForeignItemKind::Fn(..) => "foreign function", ForeignItemKind::Static(..) => "foreign static item" } } } #[cfg(test)] mod tests { use serialize; use super::*; // are ASTs encodable? #[test] fn check_asts_encodable() { fn assert_encodable<T: serialize::Encodable>() {} assert_encodable::<Crate>(); } }
{ fmt::Display::fmt(&self.0, f) }
image.py
""":mod:`wand.image` --- Image objects ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Opens and manipulates images. Image objects can be used in :keyword:`with` statement, and these resources will be automatically managed (even if any error happened):: with Image(filename='pikachu.png') as i: print('width =', i.width) print('height =', i.height) """ import collections import ctypes import functools import numbers import weakref from . import compat from .api import MagickPixelPacket, libc, libmagick, library from .color import Color from .compat import (binary, binary_type, encode_filename, file_types, string_type, text, xrange) from .exceptions import MissingDelegateError, WandException from .resource import DestroyedResourceError, Resource from .font import Font __all__ = ('ALPHA_CHANNEL_TYPES', 'CHANNELS', 'COLORSPACE_TYPES', 'COMPARE_METRICS', 'COMPOSITE_OPERATORS', 'COMPRESSION_TYPES', 'EVALUATE_OPS', 'FILTER_TYPES', 'GRAVITY_TYPES', 'IMAGE_TYPES', 'ORIENTATION_TYPES', 'UNIT_TYPES', 'FUNCTION_TYPES', 'BaseImage', 'ChannelDepthDict', 'ChannelImageDict', 'ClosedImageError', 'HistogramDict', 'Image', 'ImageProperty', 'Iterator', 'Metadata', 'OptionDict', 'manipulative') #: (:class:`tuple`) The list of filter types. #: #: - ``'undefined'`` #: - ``'point'`` #: - ``'box'`` #: - ``'triangle'`` #: - ``'hermite'`` #: - ``'hanning'`` #: - ``'hamming'`` #: - ``'blackman'`` #: - ``'gaussian'`` #: - ``'quadratic'`` #: - ``'cubic'`` #: - ``'catrom'`` #: - ``'mitchell'`` #: - ``'jinc'`` #: - ``'sinc'`` #: - ``'sincfast'`` #: - ``'kaiser'`` #: - ``'welsh'`` #: - ``'parzen'`` #: - ``'bohman'`` #: - ``'bartlett'`` #: - ``'lagrange'`` #: - ``'lanczos'`` #: - ``'lanczossharp'`` #: - ``'lanczos2'`` #: - ``'lanczos2sharp'`` #: - ``'robidoux'`` #: - ``'robidouxsharp'`` #: - ``'cosine'`` #: - ``'spline'`` #: - ``'sentinel'`` #: #: .. seealso:: #: #: `ImageMagick Resize Filters`__ #: Demonstrates the results of resampling images using the various #: resize filters and blur settings available in ImageMagick. #: #: __ http://www.imagemagick.org/Usage/resize/ FILTER_TYPES = ('undefined', 'point', 'box', 'triangle', 'hermite', 'hanning', 'hamming', 'blackman', 'gaussian', 'quadratic', 'cubic', 'catrom', 'mitchell', 'jinc', 'sinc', 'sincfast', 'kaiser', 'welsh', 'parzen', 'bohman', 'bartlett', 'lagrange', 'lanczos', 'lanczossharp', 'lanczos2', 'lanczos2sharp', 'robidoux', 'robidouxsharp', 'cosine', 'spline', 'sentinel') #: (:class:`tuple`) The list of compare metric types #: #: - ``'undefined'`` #: - ``'absolute'`` #: - ``'mean_absolute'`` #: - ``'mean_error_per_pixel'`` #: - ``'mean_squared'`` #: - ``'normalized_cross_correlation'`` #: - ``'peak_absolute'`` #: - ``'peak_signal_to_noise_ratio'`` #: - ``'perceptual_hash'`` #: - ``'root_mean_square'`` #: .. seealso:: #: #: `ImageMagick Compare Operations`__ #: #: __ http://www.imagemagick.org/Usage/compare/ #: #: .. versionadded:: 0.4.3 COMPARE_METRICS = ('undefined', 'absolute', 'mean_absolute', 'mean_error_per_pixel', 'mean_squared', 'normalized_cross_correlation', 'peak_absolute', 'peak_signal_to_noise_ratio', 'perceptual_hash', 'root_mean_square') #: (:class:`tuple`) The list of composition operators #: #: - ``'undefined'`` #: - ``'no'`` #: - ``'add'`` #: - ``'atop'`` #: - ``'blend'`` #: - ``'bumpmap'`` #: - ``'change_mask'``
#: - ``'clear'`` #: - ``'color_burn'`` #: - ``'color_dodge'`` #: - ``'colorize'`` #: - ``'copy_black'`` #: - ``'copy_blue'`` #: - ``'copy'`` #: - ``'copy_cyan'`` #: - ``'copy_green'`` #: - ``'copy_magenta'`` #: - ``'copy_opacity'`` #: - ``'copy_red'`` #: - ``'copy_yellow'`` #: - ``'darken'`` #: - ``'dst_atop'`` #: - ``'dst'`` #: - ``'dst_in'`` #: - ``'dst_out'`` #: - ``'dst_over'`` #: - ``'difference'`` #: - ``'displace'`` #: - ``'dissolve'`` #: - ``'exclusion'`` #: - ``'hard_light'`` #: - ``'hue'`` #: - ``'in'`` #: - ``'lighten'`` #: - ``'linear_light'`` #: - ``'luminize'`` #: - ``'minus'`` #: - ``'modulate'`` #: - ``'multiply'`` #: - ``'out'`` #: - ``'over'`` #: - ``'overlay'`` #: - ``'plus'`` #: - ``'replace'`` #: - ``'saturate'`` #: - ``'screen'`` #: - ``'soft_light'`` #: - ``'src_atop'`` #: - ``'src'`` #: - ``'src_in'`` #: - ``'src_out'`` #: - ``'src_over'`` #: - ``'subtract'`` #: - ``'threshold'`` #: - ``'xor'`` #: - ``'divide'`` #: #: .. versionchanged:: 0.3.0 #: Renamed from :const:`COMPOSITE_OPS` to :const:`COMPOSITE_OPERATORS`. #: #: .. seealso:: #: #: `Compositing Images`__ ImageMagick v6 Examples #: Image composition is the technique of combining images that have, #: or do not have, transparency or an alpha channel. #: This is usually performed using the IM :program:`composite` command. #: It may also be performed as either part of a larger sequence of #: operations or internally by other image operators. #: #: `ImageMagick Composition Operators`__ #: Demonstrates the results of applying the various composition #: composition operators. #: #: __ http://www.imagemagick.org/Usage/compose/ #: __ http://www.rubblewebs.co.uk/imagemagick/operators/compose.php COMPOSITE_OPERATORS = ( 'undefined', 'no', 'add', 'atop', 'blend', 'bumpmap', 'change_mask', 'clear', 'color_burn', 'color_dodge', 'colorize', 'copy_black', 'copy_blue', 'copy', 'copy_cyan', 'copy_green', 'copy_magenta', 'copy_opacity', 'copy_red', 'copy_yellow', 'darken', 'dst_atop', 'dst', 'dst_in', 'dst_out', 'dst_over', 'difference', 'displace', 'dissolve', 'exclusion', 'hard_light', 'hue', 'in', 'lighten', 'linear_light', 'luminize', 'minus', 'modulate', 'multiply', 'out', 'over', 'overlay', 'plus', 'replace', 'saturate', 'screen', 'soft_light', 'src_atop', 'src', 'src_in', 'src_out', 'src_over', 'subtract', 'threshold', 'xor', 'divide' ) #: (:class:`dict`) The dictionary of channel types. #: #: - ``'undefined'`` #: - ``'red'`` #: - ``'gray'`` #: - ``'cyan'`` #: - ``'green'`` #: - ``'magenta'`` #: - ``'blue'`` #: - ``'yellow'`` #: - ``'alpha'`` #: - ``'opacity'`` #: - ``'black'`` #: - ``'index'`` #: - ``'composite_channels'`` #: - ``'all_channels'`` #: - ``'true_alpha'`` #: - ``'rgb_channels'`` #: - ``'gray_channels'`` #: - ``'sync_channels'`` #: - ``'default_channels'`` #: #: .. seealso:: #: #: `ImageMagick Color Channels`__ #: Lists the various channel types with descriptions of each #: #: __ http://www.imagemagick.org/Magick++/Enumerations.html#ChannelType CHANNELS = dict(undefined=0, red=1, gray=1, cyan=1, green=2, magenta=2, blue=4, yellow=4, alpha=8, opacity=8, black=32, index=32, composite_channels=47, all_channels=134217727, true_alpha=64, rgb_channels=128, gray_channels=128, sync_channels=256, default_channels=134217719) #: (:class:`tuple`) The list of evaluation operators #: #: - ``'undefined'`` #: - ``'add'`` #: - ``'and'`` #: - ``'divide'`` #: - ``'leftshift'`` #: - ``'max'`` #: - ``'min'`` #: - ``'multiply'`` #: - ``'or'`` #: - ``'rightshift'`` #: - ``'set'`` #: - ``'subtract'`` #: - ``'xor'`` #: - ``'pow'`` #: - ``'log'`` #: - ``'threshold'`` #: - ``'thresholdblack'`` #: - ``'thresholdwhite'`` #: - ``'gaussiannoise'`` #: - ``'impulsenoise'`` #: - ``'laplaciannoise'`` #: - ``'multiplicativenoise'`` #: - ``'poissonnoise'`` #: - ``'uniformnoise'`` #: - ``'cosine'`` #: - ``'sine'`` #: - ``'addmodulus'`` #: - ``'mean'`` #: - ``'abs'`` #: - ``'exponential'`` #: - ``'median'`` #: - ``'sum'`` #: #: .. seealso:: #: #: `ImageMagick Image Evaluation Operators`__ #: Describes the MagickEvaluateImageChannel method and lists the #: various evaluations operators #: #: __ http://www.magickwand.org/MagickEvaluateImage.html EVALUATE_OPS = ('undefined', 'add', 'and', 'divide', 'leftshift', 'max', 'min', 'multiply', 'or', 'rightshift', 'set', 'subtract', 'xor', 'pow', 'log', 'threshold', 'thresholdblack', 'thresholdwhite', 'gaussiannoise', 'impulsenoise', 'laplaciannoise', 'multiplicativenoise', 'poissonnoise', 'uniformnoise', 'cosine', 'sine', 'addmodulus', 'mean', 'abs', 'exponential', 'median', 'sum', 'rootmeansquare') #: (:class:`tuple`) The list of colorspaces. #: #: - ``'undefined'`` #: - ``'rgb'`` #: - ``'gray'`` #: - ``'transparent'`` #: - ``'ohta'`` #: - ``'lab'`` #: - ``'xyz'`` #: - ``'ycbcr'`` #: - ``'ycc'`` #: - ``'yiq'`` #: - ``'ypbpr'`` #: - ``'yuv'`` #: - ``'cmyk'`` #: - ``'srgb'`` #: - ``'hsb'`` #: - ``'hsl'`` #: - ``'hwb'`` #: - ``'rec601luma'`` #: - ``'rec601ycbcr'`` #: - ``'rec709luma'`` #: - ``'rec709ycbcr'`` #: - ``'log'`` #: - ``'cmy'`` #: - ``'luv'`` #: - ``'hcl'`` #: - ``'lch'`` #: - ``'lms'`` #: - ``'lchab'`` #: - ``'lchuv'`` #: - ``'scrgb'`` #: - ``'hsi'`` #: - ``'hsv'`` #: - ``'hclp'`` #: - ``'ydbdr'`` #: #: .. seealso:: #: #: `ImageMagick Color Management`__ #: Describes the ImageMagick color management operations #: #: __ http://www.imagemagick.org/script/color-management.php #: #: .. versionadded:: 0.3.4 COLORSPACE_TYPES = ('undefined', 'rgb', 'gray', 'transparent', 'ohta', 'lab', 'xyz', 'ycbcr', 'ycc', 'yiq', 'ypbpr', 'yuv', 'cmyk', 'srgb', 'hsb', 'hsl', 'hwb', 'rec601luma', 'rec601ycbcr', 'rec709luma', 'rec709ycbcr', 'log', 'cmy', 'luv', 'hcl', 'lch', 'lms', 'lchab', 'lchuv', 'scrgb', 'hsi', 'hsv', 'hclp', 'ydbdr') #: (:class:`tuple`) The list of alpha channel types #: #: - ``'undefined'`` #: - ``'activate'`` #: - ``'background'`` #: - ``'copy'`` #: - ``'deactivate'`` #: - ``'extract'`` #: - ``'opaque'`` #: - ``'reset'`` #: - ``'set'`` #: - ``'shape'`` #: - ``'transparent'`` #: - ``'flatten'`` #: - ``'remove'`` #: #: .. seealso:: #: `ImageMagick Image Channel`__ #: Describes the SetImageAlphaChannel method which can be used #: to modify alpha channel. Also describes AlphaChannelType #: #: __ http://www.imagemagick.org/api/channel.php#SetImageAlphaChannel ALPHA_CHANNEL_TYPES = ('undefined', 'activate', 'background', 'copy', 'deactivate', 'extract', 'opaque', 'reset', 'set', 'shape', 'transparent', 'flatten', 'remove') #: (:class:`tuple`) The list of image types #: #: - ``'undefined'`` #: - ``'bilevel'`` #: - ``'grayscale'`` #: - ``'grayscalematte'`` #: - ``'palette'`` #: - ``'palettematte'`` #: - ``'truecolor'`` #: - ``'truecolormatte'`` #: - ``'colorseparation'`` #: - ``'colorseparationmatte'`` #: - ``'optimize'`` #: - ``'palettebilevelmatte'`` #: #: .. seealso:: #: #: `ImageMagick Image Types`__ #: Describes the MagickSetImageType method which can be used #: to set the type of an image #: #: __ http://www.imagemagick.org/api/magick-image.php#MagickSetImageType IMAGE_TYPES = ('undefined', 'bilevel', 'grayscale', 'grayscalematte', 'palette', 'palettematte', 'truecolor', 'truecolormatte', 'colorseparation', 'colorseparationmatte', 'optimize', 'palettebilevelmatte') #: (:class:`tuple`) The list of resolution unit types. #: #: - ``'undefined'`` #: - ``'pixelsperinch'`` #: - ``'pixelspercentimeter'`` #: #: .. seealso:: #: #: `ImageMagick Image Units`__ #: Describes the MagickSetImageUnits method which can be used #: to set image units of resolution #: #: __ http://www.imagemagick.org/api/magick-image.php#MagickSetImageUnits UNIT_TYPES = 'undefined', 'pixelsperinch', 'pixelspercentimeter' #: (:class:`tuple`) The list of :attr:`~BaseImage.gravity` types. #: #: .. versionadded:: 0.3.0 GRAVITY_TYPES = ('forget', 'north_west', 'north', 'north_east', 'west', 'center', 'east', 'south_west', 'south', 'south_east', 'static') #: (:class:`tuple`) The list of :attr:`~BaseImage.orientation` types. #: #: .. versionadded:: 0.3.0 ORIENTATION_TYPES = ('undefined', 'top_left', 'top_right', 'bottom_right', 'bottom_left', 'left_top', 'right_top', 'right_bottom', 'left_bottom') #: (:class:`collections.Set`) The set of available :attr:`~BaseImage.options`. #: #: .. versionadded:: 0.3.0 #: #: .. versionchanged:: 0.3.4 #: Added ``'jpeg:sampling-factor'`` option. #: #: .. versionchanged:: 0.3.9 #: Added ``'pdf:use-cropbox'`` option. OPTIONS = frozenset(['fill', 'jpeg:sampling-factor', 'pdf:use-cropbox']) #: (:class:`tuple`) The list of :attr:`Image.compression` types. #: #: .. versionadded:: 0.3.6 COMPRESSION_TYPES = ( 'undefined', 'b44a', 'b44', 'bzip', 'dxt1', 'dxt3', 'dxt5', 'fax', 'group4', 'jbig1', # ISO/IEC std 11544 / ITU-T rec T.82 'jbig2', # ISO/IEC std 14492 / ITU-T rec T.88 'jpeg2000', # ISO/IEC std 15444-1 'jpeg', 'losslessjpeg', 'lzma', # Lempel-Ziv-Markov chain algorithm 'lzw', 'no', 'piz', 'pxr24', 'rle', 'zip', 'zips' ) #: (:class:`tuple`) The list of :attr:`Image.function` types. #: #: - ``'undefined'`` #: - ``'polynomial'`` #: - ``'sinusoid'`` #: - ``'arcsin'`` #: - ``'arctan'`` FUNCTION_TYPES = ('undefined', 'polynomial', 'sinusoid', 'arcsin', 'arctan') #: (:class:`tuple`) The list of :method:`Image.distort` methods. #: #: - ``'undefined'`` #: - ``'affine'`` #: - ``'affine_projection'`` #: - ``'scale_rotate_translate'`` #: - ``'perspective'`` #: - ``'perspective_projection'`` #: - ``'bilinear_forward'`` #: - ``'bilinear_reverse'`` #: - ``'polynomial'`` #: - ``'arc'`` #: - ``'polar'`` #: - ``'depolar'`` #: - ``'cylinder_2_plane'`` #: - ``'plane_2_cylinder'`` #: - ``'barrel'`` #: - ``'barrel_inverse'`` #: - ``'shepards'`` #: - ``'resize'`` #: - ``'sentinel'`` #: #: .. versionadded:: 0.4.1 DISTORTION_METHODS = ( 'undefined', 'affine', 'affine_projection', 'scale_rotate_translate', 'perspective', 'perspective_projection', 'bilinear_forward', 'bilinear_reverse', 'polynomial', 'arc', 'polar', 'depolar', 'cylinder_2_plane', 'plane_2_cylinder', 'barrel', 'barrel_inverse', 'shepards', 'resize', 'sentinel' ) #: (:class:`tuple`) The list of :attr:`~BaseImage.virtual_pixel` types. #: - ``'undefined'`` #: - ``'background'`` #: - ``'constant'`` #: - ``'dither'`` #: - ``'edge'`` #: - ``'mirror'`` #: - ``'random'`` #: - ``'tile'`` #: - ``'transparent'`` #: - ``'mask'`` #: - ``'black'`` #: - ``'gray'`` #: - ``'white'`` #: - ``'horizontal_tile'`` #: - ``'vertical_tile'`` #: - ``'horizontal_tile_edge'`` #: - ``'vertical_tile_edge'`` #: - ``'checker_tile'`` #: #: .. versionadded:: 0.4.1 VIRTUAL_PIXEL_METHOD = ('undefined', 'background', 'constant', 'dither', 'edge', 'mirror', 'random', 'tile', 'transparent', 'mask', 'black', 'gray', 'white', 'horizontal_tile', 'vertical_tile', 'horizontal_tile_edge', 'vertical_tile_edge', 'checker_tile') #: (:class:`tuple`) The list of :attr:`~BaseImage.layer_method` types. #: - ``'undefined'`` #: - ``'coalesce'`` #: - ``'compareany'`` #: - ``'compareclear'`` #: - ``'compareoverlay'`` #: - ``'dispose'`` #: - ``'optimize'`` #: - ``'optimizeimage'`` #: - ``'optimizeplus'`` #: - ``'optimizetrans'`` #: - ``'removedups'`` #: - ``'removezero'`` #: - ``'composite'`` #: - ``'merge'`` #: - ``'flatten'`` #: - ``'mosaic'`` #: - ``'trimbounds'`` #: .. versionadded:: 0.4.3 IMAGE_LAYER_METHOD = ('undefined', 'coalesce', 'compareany', 'compareclear', 'compareoverlay', 'dispose', 'optimize', 'optimizeimage', 'optimizeplus', 'optimizetrans', 'removedups', 'removezero', 'composite', 'merge', 'flatten', 'mosaic', 'trimbounds') def manipulative(function): """Mark the operation manipulating itself instead of returning new one.""" @functools.wraps(function) def wrapped(self, *args, **kwargs): result = function(self, *args, **kwargs) self.dirty = True return result return wrapped class BaseImage(Resource): """The abstract base of :class:`Image` (container) and :class:`~wand.sequence.SingleImage`. That means the most of operations, defined in this abstract classs, are possible for both :class:`Image` and :class:`~wand.sequence.SingleImage`. .. versionadded:: 0.3.0 """ #: (:class:`OptionDict`) The mapping of internal option settings. #: #: .. versionadded:: 0.3.0 #: #: .. versionchanged:: 0.3.4 #: Added ``'jpeg:sampling-factor'`` option. #: #: .. versionchanged:: 0.3.9 #: Added ``'pdf:use-cropbox'`` option. options = None #: (:class:`collections.Sequence`) The list of #: :class:`~wand.sequence.SingleImage`\ s that the image contains. #: #: .. versionadded:: 0.3.0 sequence = None #: (:class:`bool`) Whether the image is changed or not. dirty = None c_is_resource = library.IsMagickWand c_destroy_resource = library.DestroyMagickWand c_get_exception = library.MagickGetException c_clear_exception = library.MagickClearException __slots__ = '_wand', def __init__(self, wand): self.wand = wand self.channel_images = ChannelImageDict(self) self.channel_depths = ChannelDepthDict(self) self.options = OptionDict(self) self.dirty = False @property def wand(self): """Internal pointer to the MagickWand instance. It may raise :exc:`ClosedImageError` when the instance has destroyed already. """ try: return self.resource except DestroyedResourceError: raise ClosedImageError(repr(self) + ' is closed already') @wand.setter def wand(self, wand): try: self.resource = wand except TypeError: raise TypeError(repr(wand) + ' is not a MagickWand instance') @wand.deleter def wand(self): del self.resource def clone(self): """Clones the image. It is equivalent to call :class:`Image` with ``image`` parameter. :: with img.clone() as cloned: # manipulate the cloned image pass :returns: the cloned new image :rtype: :class:`Image` .. versionadded:: 0.1.1 """ return Image(image=self) def __len__(self): return self.height def __iter__(self): return Iterator(image=self) def __getitem__(self, idx): if (not isinstance(idx, string_type) and isinstance(idx, collections.Iterable)): idx = tuple(idx) d = len(idx) if not (1 <= d <= 2): raise ValueError('index cannot be {0}-dimensional'.format(d)) elif d == 2: x, y = idx x_slice = isinstance(x, slice) y_slice = isinstance(y, slice) if x_slice and not y_slice: y = slice(y, y + 1) elif not x_slice and y_slice: x = slice(x, x + 1) elif not (x_slice or y_slice): if not (isinstance(x, numbers.Integral) and isinstance(y, numbers.Integral)): raise TypeError('x and y must be integral, not ' + repr((x, y))) if x < 0: x += self.width if y < 0: y += self.height if x >= self.width: raise IndexError('x must be less than width') elif y >= self.height: raise IndexError('y must be less than height') elif x < 0: raise IndexError('x cannot be less than 0') elif y < 0: raise IndexError('y cannot be less than 0') with iter(self) as iterator: iterator.seek(y) return iterator.next(x) if not (x.step is None and y.step is None): raise ValueError('slicing with step is unsupported') elif (x.start is None and x.stop is None and y.start is None and y.stop is None): return self.clone() cloned = self.clone() try: cloned.crop(x.start, y.start, x.stop, y.stop) except ValueError as e: raise IndexError(str(e)) return cloned else: return self[idx[0]] elif isinstance(idx, numbers.Integral): if idx < 0: idx += self.height elif idx >= self.height: raise IndexError('index must be less than height, but got ' + repr(idx)) elif idx < 0: raise IndexError('index cannot be less than zero, but got ' + repr(idx)) with iter(self) as iterator: iterator.seek(idx) return iterator.next() elif isinstance(idx, slice): return self[:, idx] raise TypeError('unsupported index type: ' + repr(idx)) def __eq__(self, other): if isinstance(other, type(self)): return self.signature == other.signature return False def __ne__(self, other): return not (self == other) def __hash__(self): return hash(self.signature) @property def animation(self): """(:class:`bool`) Whether the image is animation or not. It doesn't only mean that the image has two or more images (frames), but all frames are even the same size. It's about image format, not content. It's :const:`False` even if :mimetype:`image/ico` consits of two or more images of the same size. For example, it's :const:`False` for :mimetype:`image/jpeg`, :mimetype:`image/gif`, :mimetype:`image/ico`. If :mimetype:`image/gif` has two or more frames, it's :const:`True`. If :mimetype:`image/gif` has only one frame, it's :const:`False`. .. versionadded:: 0.3.0 .. versionchanged:: 0.3.8 Became to accept :mimetype:`image/x-gif` as well. """ return False @property def gravity(self): """(:class:`basestring`) The text placement gravity used when annotating with text. It's a string from :const:`GRAVITY_TYPES` list. It also can be set. """ gravity_index = library.MagickGetGravity(self.wand) if not gravity_index: self.raise_exception() return GRAVITY_TYPES[gravity_index] @gravity.setter @manipulative def gravity(self, value): if not isinstance(value, string_type): raise TypeError('expected a string, not ' + repr(value)) if value not in GRAVITY_TYPES: raise ValueError('expected a string from GRAVITY_TYPES, not ' + repr(value)) library.MagickSetGravity(self.wand, GRAVITY_TYPES.index(value)) @property def font_path(self): """(:class:`basestring`) The path of the current font. It also can be set. """ return text(library.MagickGetFont(self.wand)) @font_path.setter @manipulative def font_path(self, font): font = binary(font) if library.MagickSetFont(self.wand, font) is False: raise ValueError('font is invalid') @property def font_size(self): """(:class:`numbers.Real`) The font size. It also can be set.""" return library.MagickGetPointsize(self.wand) @font_size.setter @manipulative def font_size(self, size): if not isinstance(size, numbers.Real): raise TypeError('expected a numbers.Real, but got ' + repr(size)) elif size < 0.0: raise ValueError('cannot be less then 0.0, but got ' + repr(size)) elif library.MagickSetPointsize(self.wand, size) is False: raise ValueError('unexpected error is occur') @property def font_antialias(self): return bool(library.MagickGetAntialias(self.wand)) @font_antialias.setter @manipulative def font_antialias(self, antialias): if not isinstance(antialias, bool): raise TypeError('font_antialias must be a bool, not ' + repr(antialias)) library.MagickSetAntialias(self.wand, antialias) @property def font(self): """(:class:`wand.font.Font`) The current font options.""" return Font( path=text(self.font_path), size=self.font_size, color=self.font_color, antialias=self.font_antialias ) @font.setter @manipulative def font(self, font): if not isinstance(font, Font): raise TypeError('font must be a wand.font.Font, not ' + repr(font)) self.font_path = font.path self.font_size = font.size self.font_color = font.color self.font_antialias = font.antialias @property def page(self): """The dimensions and offset of this Wand's page as a 4-tuple: ``(width, height, x, y)``. Note that since it is based on the virtual canvas, it may not equal the dimensions of an image. See the ImageMagick documentation on the virtual canvas for more information. .. versionadded:: 0.4.3 """ w = ctypes.c_uint() h = ctypes.c_uint() x = ctypes.c_int() y = ctypes.c_int() r = library.MagickGetImagePage(self.wand, w, h, x, y) if not r: self.raise_exception() return int(w.value), int(h.value), int(x.value), int(y.value) @page.setter @manipulative def page(self, newpage): if isinstance(newpage, collections.Sequence): w, h, x, y = newpage else: raise TypeError("page layout must be 4-tuple") r = library.MagickSetImagePage(self.wand, w, h, x, y) if not r: self.raise_exception() @property def page_width(self): """(:class:`numbers.Integral`) The width of the page for this wand. .. versionadded:: 0.4.3 """ return self.page[0] @page_width.setter @manipulative def page_width(self, width): newpage = list(self.page) newpage[0] = width self.page = newpage @property def page_height(self): """(:class:`numbers.Integral`) The height of the page for this wand. .. versionadded:: 0.4.3 """ return self.page[1] @page_height.setter @manipulative def page_height(self, height): newpage = list(self.page) newpage[1] = height self.page = newpage @property def page_x(self): """(:class:`numbers.Integral`) The X-offset of the page for this wand. .. versionadded:: 0.4.3 """ return self.page[2] @page_x.setter @manipulative def page_x(self, x): newpage = list(self.page) newpage[2] = x self.page = newpage @property def page_y(self): """(:class:`numbers.Integral`) The Y-offset of the page for this wand. .. versionadded:: 0.4.3 """ return self.page[3] @page_y.setter @manipulative def page_y(self, y): newpage = list(self.page) newpage[3] = y self.page = newpage @property def width(self): """(:class:`numbers.Integral`) The width of this image.""" return library.MagickGetImageWidth(self.wand) @width.setter @manipulative def width(self, width): if width is not None and not isinstance(width, numbers.Integral): raise TypeError('width must be a integral, not ' + repr(width)) library.MagickSetSize(self.wand, width, self.height) @property def height(self): """(:class:`numbers.Integral`) The height of this image.""" return library.MagickGetImageHeight(self.wand) @height.setter @manipulative def height(self, height): if height is not None and not isinstance(height, numbers.Integral): raise TypeError('height must be a integral, not ' + repr(height)) library.MagickSetSize(self.wand, self.width, height) @property def orientation(self): """(:class:`basestring`) The image orientation. It's a string from :const:`ORIENTATION_TYPES` list. It also can be set. .. versionadded:: 0.3.0 """ orientation_index = library.MagickGetImageOrientation(self.wand) return ORIENTATION_TYPES[orientation_index] @orientation.setter @manipulative def orientation(self, value): if not isinstance(value, string_type): raise TypeError('expected a string, not ' + repr(value)) if value not in ORIENTATION_TYPES: raise ValueError('expected a string from ORIENTATION_TYPES, not ' + repr(value)) index = ORIENTATION_TYPES.index(value) library.MagickSetImageOrientation(self.wand, index) @property def font_color(self): return Color(self.options['fill']) @font_color.setter @manipulative def font_color(self, color): if not isinstance(color, Color): raise TypeError('font_color must be a wand.color.Color, not ' + repr(color)) self.options['fill'] = color.string @manipulative def caption(self, text, left=0, top=0, width=None, height=None, font=None, gravity=None): """Writes a caption ``text`` into the position. :param text: text to write :type text: :class:`basestring` :param left: x offset in pixels :type left: :class:`numbers.Integral` :param top: y offset in pixels :type top: :class:`numbers.Integral` :param width: width of caption in pixels. default is :attr:`width` of the image :type width: :class:`numbers.Integral` :param height: height of caption in pixels. default is :attr:`height` of the image :type height: :class:`numbers.Integral` :param font: font to use. default is :attr:`font` of the image :type font: :class:`wand.font.Font` :param gravity: text placement gravity. uses the current :attr:`gravity` setting of the image by default :type gravity: :class:`basestring` .. versionadded:: 0.3.0 """ if not isinstance(left, numbers.Integral): raise TypeError('left must be an integer, not ' + repr(left)) elif not isinstance(top, numbers.Integral): raise TypeError('top must be an integer, not ' + repr(top)) elif width is not None and not isinstance(width, numbers.Integral): raise TypeError('width must be an integer, not ' + repr(width)) elif height is not None and not isinstance(height, numbers.Integral): raise TypeError('height must be an integer, not ' + repr(height)) elif font is not None and not isinstance(font, Font): raise TypeError('font must be a wand.font.Font, not ' + repr(font)) elif gravity is not None and compat.text(gravity) not in GRAVITY_TYPES: raise ValueError('invalid gravity value') if width is None: width = self.width - left if height is None: height = self.height - top if not font: try: font = self.font except TypeError: raise TypeError('font must be specified or existing in image') with Image() as textboard: library.MagickSetSize(textboard.wand, width, height) textboard.font = font textboard.gravity = gravity or self.gravity with Color('transparent') as background_color: library.MagickSetBackgroundColor(textboard.wand, background_color.resource) textboard.read(filename=b'caption:' + text.encode('utf-8')) self.composite(textboard, left, top) @property def resolution(self): """(:class:`tuple`) Resolution of this image. .. versionadded:: 0.3.0 """ x = ctypes.c_double() y = ctypes.c_double() r = library.MagickGetImageResolution(self.wand, x, y) if not r: self.raise_exception() return int(x.value), int(y.value) @resolution.setter @manipulative def resolution(self, geometry): if isinstance(geometry, collections.Sequence): x, y = geometry elif isinstance(geometry, numbers.Integral): x, y = geometry, geometry else: raise TypeError('resolution must be a (x, y) pair or an integer ' 'of the same x/y') if self.size == (0, 0): r = library.MagickSetResolution(self.wand, x, y) else: r = library.MagickSetImageResolution(self.wand, x, y) if not r: self.raise_exception() @property def size(self): """(:class:`tuple`) The pair of (:attr:`width`, :attr:`height`).""" return self.width, self.height @property def units(self): """(:class:`basestring`) The resolution units of this image.""" r = library.MagickGetImageUnits(self.wand) return UNIT_TYPES[text(r)] @units.setter @manipulative def units(self, units): if not isinstance(units, string_type) or units not in UNIT_TYPES: raise TypeError('Unit value must be a string from wand.images.' 'UNIT_TYPES, not ' + repr(units)) r = library.MagickSetImageUnits(self.wand, UNIT_TYPES.index(units)) if not r: self.raise_exception() @property def virtual_pixel(self): """(:class:`basestring`) The virtual pixel of image. This can also be set with a value from :const:`VIRTUAL_PIXEL_METHOD` ... versionadded:: 0.4.1 """ method_index = library.MagickGetImageVirtualPixelMethod(self.wand) return VIRTUAL_PIXEL_METHOD[method_index] @virtual_pixel.setter def virtual_pixel(self, method): if method not in VIRTUAL_PIXEL_METHOD: raise ValueError('expected method from VIRTUAL_PIXEL_METHOD,' ' not ' + repr(method)) library.MagickSetImageVirtualPixelMethod( self.wand, VIRTUAL_PIXEL_METHOD.index(method) ) @property def colorspace(self): """(:class:`basestring`) The image colorspace. Defines image colorspace as in :const:`COLORSPACE_TYPES` enumeration. It may raise :exc:`ValueError` when the colorspace is unknown. .. versionadded:: 0.3.4 """ colorspace_type_index = library.MagickGetImageColorspace(self.wand) if not colorspace_type_index: self.raise_exception() return COLORSPACE_TYPES[text(colorspace_type_index)] @colorspace.setter @manipulative def colorspace(self, colorspace_type): if (not isinstance(colorspace_type, string_type) or colorspace_type not in COLORSPACE_TYPES): raise TypeError('Colorspace value must be a string from ' 'COLORSPACE_TYPES, not ' + repr(colorspace_type)) r = library.MagickSetImageColorspace( self.wand, COLORSPACE_TYPES.index(colorspace_type) ) if not r: self.raise_exception() @property def depth(self): """(:class:`numbers.Integral`) The depth of this image. .. versionadded:: 0.2.1 """ return library.MagickGetImageDepth(self.wand) @depth.setter @manipulative def depth(self, depth): r = library.MagickSetImageDepth(self.wand, depth) if not r: raise self.raise_exception() @property def type(self): """(:class:`basestring`) The image type. Defines image type as in :const:`IMAGE_TYPES` enumeration. It may raise :exc:`ValueError` when the type is unknown. .. versionadded:: 0.2.2 """ image_type_index = library.MagickGetImageType(self.wand) if not image_type_index: self.raise_exception() return IMAGE_TYPES[text(image_type_index)] @type.setter @manipulative def type(self, image_type): if (not isinstance(image_type, string_type) or image_type not in IMAGE_TYPES): raise TypeError('Type value must be a string from IMAGE_TYPES' ', not ' + repr(image_type)) r = library.MagickSetImageType(self.wand, IMAGE_TYPES.index(image_type)) if not r: self.raise_exception() @property def compression_quality(self): """(:class:`numbers.Integral`) Compression quality of this image. .. versionadded:: 0.2.0 """ return library.MagickGetImageCompressionQuality(self.wand) @compression_quality.setter @manipulative def compression_quality(self, quality): """Set compression quality for the image. :param quality: new compression quality setting :type quality: :class:`numbers.Integral` """ if not isinstance(quality, numbers.Integral): raise TypeError('compression quality must be a natural ' 'number, not ' + repr(quality)) r = library.MagickSetImageCompressionQuality(self.wand, quality) if not r: raise ValueError('Unable to set compression quality to ' + repr(quality)) @property def signature(self): """(:class:`str`) The SHA-256 message digest for the image pixel stream. .. versionadded:: 0.1.9 """ signature = library.MagickGetImageSignature(self.wand) return text(signature.value) @property def alpha_channel(self): """(:class:`bool`) Get state of image alpha channel. It can also be used to enable/disable alpha channel, but with different behavior new, copied, or existing. Behavior of setting :attr:`alpha_channel` is defined with the following values: - ``'activate'``, ``'on'``, or :const:`True` will enable an images alpha channel. Existing alpha data is preserved. - ``'deactivate'``, ``'off'``, or :const:`False` will disable an images alpha channel. Any data on the alpha will be preserved. - ``'associate'`` & ``'disassociate'`` toggle alpha channel flag in certain image-file specifications. - ``'set'`` enables and resets any data in an images alpha channel. - ``'opaque'`` enables alpha/matte channel, and forces full opaque image. - ``'transparent'`` enables alpha/matte channel, and forces full transparent image. - ``'extract'`` copies data in alpha channel across all other channels, and disables alpha channel. - ``'copy'`` calculates the gray-scale of RGB channels, and applies it to alpha channel. - ``'shape'`` is identical to ``'copy'``, but will color the resulting image with the value defined with :attr:`background_color`. - ``'remove'`` will composite :attr:`background_color` value. - ``'background'`` replaces full-transparent color with background color. .. versionadded:: 0.2.1 .. versionchanged:: 0.4.1 Support for additional setting values. However :attr:`Image.alpha_channel` will continue to return :class:`bool` if the current alpha/matte state is enabled. """ return bool(library.MagickGetImageAlphaChannel(self.wand)) @alpha_channel.setter @manipulative def alpha_channel(self, alpha_type): # Map common aliases for ``'deactivate'`` if alpha_type is False or alpha_type == 'off': alpha_type = 'deactivate' # Map common aliases for ``'activate'`` elif alpha_type is True or alpha_type == 'on': alpha_type = 'activate' if alpha_type in ALPHA_CHANNEL_TYPES: alpha_index = ALPHA_CHANNEL_TYPES.index(alpha_type) library.MagickSetImageAlphaChannel(self.wand, alpha_index) self.raise_exception() else: raise ValueError('expecting string from ALPHA_CHANNEL_TYPES, ' 'not ' + repr(alpha_type)) @property def background_color(self): """(:class:`wand.color.Color`) The image background color. It can also be set to change the background color. .. versionadded:: 0.1.9 """ pixel = library.NewPixelWand() result = library.MagickGetImageBackgroundColor(self.wand, pixel) if result: size = ctypes.sizeof(MagickPixelPacket) buffer = ctypes.create_string_buffer(size) library.PixelGetMagickColor(pixel, buffer) return Color(raw=buffer) self.raise_exception() @background_color.setter @manipulative def background_color(self, color): if not isinstance(color, Color): raise TypeError('color must be a wand.color.Color object, not ' + repr(color)) with color: result = library.MagickSetImageBackgroundColor(self.wand, color.resource) if not result: self.raise_exception() @property def matte_color(self): """(:class:`wand.color.Color`) The color value of the matte channel. This can also be set. ..versionadded:: 0.4.1 """ pixel = library.NewPixelWand() result = library.MagickGetImageMatteColor(self.wand, pixel) if result: pixel_size = ctypes.sizeof(MagickPixelPacket) pixel_buffer = ctypes.create_string_buffer(pixel_size) library.PixelGetMagickColor(pixel, pixel_buffer) return Color(raw=pixel_buffer) self.raise_exception() @matte_color.setter @manipulative def matte_color(self, color): if not isinstance(color, Color): raise TypeError('color must be a wand.color.Color object, not ' + repr(color)) with color: result = library.MagickSetImageMatteColor(self.wand, color.resource) if not result: self.raise_exception() @property def quantum_range(self): """(:class:`int`) The maxumim value of a color channel that is supported by the imagemagick library. .. versionadded:: 0.2.0 """ result = ctypes.c_size_t() library.MagickGetQuantumRange(ctypes.byref(result)) return result.value @property def histogram(self): """(:class:`HistogramDict`) The mapping that represents the histogram. Keys are :class:`~wand.color.Color` objects, and values are the number of pixels. .. versionadded:: 0.3.0 """ return HistogramDict(self) @manipulative def distort(self, method, arguments, best_fit=False): """Distorts an image using various distorting methods. :param method: Distortion method name from :const:`DISTORTION_METHODS` :type method: :class:`basestring` :param arguments: List of distorting float arguments unique to distortion method :type arguments: :class:`collections.Sequence` :param best_fit: Attempt to resize resulting image fit distortion. Defaults False :type best_fit: :class:`bool` .. versionadded:: 0.4.1 """ if method not in DISTORTION_METHODS: raise ValueError('expected string from DISTORTION_METHODS, not ' + repr(method)) if not isinstance(arguments, collections.Sequence): raise TypeError('expected sequence of doubles, not ' + repr(arguments)) argc = len(arguments) argv = (ctypes.c_double * argc)(*arguments) library.MagickDistortImage(self.wand, DISTORTION_METHODS.index(method), argc, argv, bool(best_fit)) self.raise_exception() @manipulative def crop(self, left=0, top=0, right=None, bottom=None, width=None, height=None, reset_coords=True, gravity=None): """Crops the image in-place. .. sourcecode:: text +--------------------------------------------------+ | ^ ^ | | | | | | top | | | | | | | v | | | <-- left --> +-------------------+ bottom | | | ^ | | | | | <-- width --|---> | | | | | height | | | | | | | | | | | v | | | | +-------------------+ v | | <--------------- right ----------> | +--------------------------------------------------+ :param left: x-offset of the cropped image. default is 0 :type left: :class:`numbers.Integral` :param top: y-offset of the cropped image. default is 0 :type top: :class:`numbers.Integral` :param right: second x-offset of the cropped image. default is the :attr:`width` of the image. this parameter and ``width`` parameter are exclusive each other :type right: :class:`numbers.Integral` :param bottom: second y-offset of the cropped image. default is the :attr:`height` of the image. this parameter and ``height`` parameter are exclusive each other :type bottom: :class:`numbers.Integral` :param width: the :attr:`width` of the cropped image. default is the :attr:`width` of the image. this parameter and ``right`` parameter are exclusive each other :type width: :class:`numbers.Integral` :param height: the :attr:`height` of the cropped image. default is the :attr:`height` of the image. this parameter and ``bottom`` parameter are exclusive each other :type height: :class:`numbers.Integral` :param reset_coords: optional flag. If set, after the rotation, the coordinate frame will be relocated to the upper-left corner of the new image. By default is `True`. :type reset_coords: :class:`bool` :param gravity: optional flag. If set, will calculate the :attr:`top` and :attr:`left` attributes. This requires both :attr:`width` and :attr:`height` parameters to be included. :type gravity: :const:`GRAVITY_TYPES` :raises ValueError: when one or more arguments are invalid .. note:: If you want to crop the image but not in-place, use slicing operator. .. versionchanged:: 0.4.1 Added ``gravity`` option. Using ``gravity`` along with ``width`` & ``height`` to auto-adjust ``left`` & ``top`` attributes. .. versionchanged:: 0.1.8 Made to raise :exc:`~exceptions.ValueError` instead of :exc:`~exceptions.IndexError` for invalid ``width``/``height`` arguments. .. versionadded:: 0.1.7 """ if not (right is None or width is None): raise TypeError('parameters right and width are exclusive each ' 'other; use one at a time') elif not (bottom is None or height is None): raise TypeError('parameters bottom and height are exclusive each ' 'other; use one at a time') # Define left & top if gravity is given. if gravity: if width is None or height is None: raise TypeError( 'both width and height must be defined with gravity' ) if gravity not in GRAVITY_TYPES: raise ValueError('expected a string from GRAVITY_TYPES, not ' + repr(gravity)) # Set `top` based on given gravity if gravity in ('north_west', 'north', 'north_east'): top = 0 elif gravity in ('west', 'center', 'east'): top = int(self.height / 2) - int(height / 2) elif gravity in ('south_west', 'south', 'south_east'): top = self.height - height # Set `left` based on given gravity if gravity in ('north_west', 'west', 'south_west'): left = 0 elif gravity in ('north', 'center', 'south'): left = int(self.width / 2) - int(width / 2) elif gravity in ('north_east', 'east', 'south_east'): left = self.width - width def abs_(n, m, null=None): if n is None: return m if null is None else null elif not isinstance(n, numbers.Integral): raise TypeError('expected integer, not ' + repr(n)) elif n > m: raise ValueError(repr(n) + ' > ' + repr(m)) return m + n if n < 0 else n left = abs_(left, self.width, 0) top = abs_(top, self.height, 0) if width is None: right = abs_(right, self.width) width = right - left if height is None: bottom = abs_(bottom, self.height) height = bottom - top if width < 1: raise ValueError('image width cannot be zero') elif height < 1: raise ValueError('image width cannot be zero') elif (left == top == 0 and width == self.width and height == self.height): return if self.animation: self.wand = library.MagickCoalesceImages(self.wand) library.MagickSetLastIterator(self.wand) n = library.MagickGetIteratorIndex(self.wand) library.MagickResetIterator(self.wand) for i in xrange(0, n + 1): library.MagickSetIteratorIndex(self.wand, i) library.MagickCropImage(self.wand, width, height, left, top) if reset_coords: library.MagickResetImagePage(self.wand, None) else: library.MagickCropImage(self.wand, width, height, left, top) self.raise_exception() if reset_coords: self.reset_coords() def reset_coords(self): """Reset the coordinate frame of the image so to the upper-left corner is (0, 0) again (crop and rotate operations change it). .. versionadded:: 0.2.0 """ library.MagickResetImagePage(self.wand, None) @manipulative def resize(self, width=None, height=None, filter='undefined', blur=1): """Resizes the image. :param width: the width in the scaled image. default is the original width :type width: :class:`numbers.Integral` :param height: the height in the scaled image. default is the original height :type height: :class:`numbers.Integral` :param filter: a filter type to use for resizing. choose one in :const:`FILTER_TYPES`. default is ``'undefined'`` which means IM will try to guess best one to use :type filter: :class:`basestring`, :class:`numbers.Integral` :param blur: the blur factor where > 1 is blurry, < 1 is sharp. default is 1 :type blur: :class:`numbers.Real` .. versionchanged:: 0.2.1 The default value of ``filter`` has changed from ``'triangle'`` to ``'undefined'`` instead. .. versionchanged:: 0.1.8 The ``blur`` parameter changed to take :class:`numbers.Real` instead of :class:`numbers.Rational`. .. versionadded:: 0.1.1 """ if width is None: width = self.width if height is None: height = self.height if not isinstance(width, numbers.Integral): raise TypeError('width must be a natural number, not ' + repr(width)) elif not isinstance(height, numbers.Integral): raise TypeError('height must be a natural number, not ' + repr(height)) elif width < 1: raise ValueError('width must be a natural number, not ' + repr(width)) elif height < 1: raise ValueError('height must be a natural number, not ' + repr(height)) elif not isinstance(blur, numbers.Real): raise TypeError('blur must be numbers.Real , not ' + repr(blur)) elif not isinstance(filter, (string_type, numbers.Integral)): raise TypeError('filter must be one string defined in wand.image.' 'FILTER_TYPES or an integer, not ' + repr(filter)) if isinstance(filter, string_type): try: filter = FILTER_TYPES.index(filter) except IndexError: raise ValueError(repr(filter) + ' is an invalid filter type; ' 'choose on in ' + repr(FILTER_TYPES)) elif (isinstance(filter, numbers.Integral) and not (0 <= filter < len(FILTER_TYPES))): raise ValueError(repr(filter) + ' is an invalid filter type') blur = ctypes.c_double(float(blur)) if self.animation: self.wand = library.MagickCoalesceImages(self.wand) library.MagickSetLastIterator(self.wand) n = library.MagickGetIteratorIndex(self.wand) library.MagickResetIterator(self.wand) for i in xrange(n + 1): library.MagickSetIteratorIndex(self.wand, i) library.MagickResizeImage(self.wand, width, height, filter, blur) library.MagickSetSize(self.wand, width, height) else: r = library.MagickResizeImage(self.wand, width, height, filter, blur) library.MagickSetSize(self.wand, width, height) if not r: self.raise_exception() @manipulative def sample(self, width=None, height=None): """Resizes the image by sampling the pixels. It's basically quicker than :meth:`resize()` except less quality as a tradeoff. :param width: the width in the scaled image. default is the original width :type width: :class:`numbers.Integral` :param height: the height in the scaled image. default is the original height :type height: :class:`numbers.Integral` .. versionadded:: 0.3.4 """ if width is None: width = self.width if height is None: height = self.height if not isinstance(width, numbers.Integral): raise TypeError('width must be a natural number, not ' + repr(width)) elif not isinstance(height, numbers.Integral): raise TypeError('height must be a natural number, not ' + repr(height)) elif width < 1: raise ValueError('width must be a natural number, not ' + repr(width)) elif height < 1: raise ValueError('height must be a natural number, not ' + repr(height)) if self.animation: self.wand = library.MagickCoalesceImages(self.wand) library.MagickSetLastIterator(self.wand) n = library.MagickGetIteratorIndex(self.wand) library.MagickResetIterator(self.wand) for i in xrange(n + 1): library.MagickSetIteratorIndex(self.wand, i) library.MagickSampleImage(self.wand, width, height) library.MagickSetSize(self.wand, width, height) else: r = library.MagickSampleImage(self.wand, width, height) library.MagickSetSize(self.wand, width, height) if not r: self.raise_exception() @manipulative def transform(self, crop='', resize=''): """Transforms the image using :c:func:`MagickTransformImage`, which is a convenience function accepting geometry strings to perform cropping and resizing. Cropping is performed first, followed by resizing. Either or both arguments may be omitted or given an empty string, in which case the corresponding action will not be performed. Geometry specification strings are defined as follows: A geometry string consists of a size followed by an optional offset. The size is specified by one of the options below, where **bold** terms are replaced with appropriate integer values: **scale**\ ``%`` Height and width both scaled by specified percentage **scale-x**\ ``%x``\ \ **scale-y**\ ``%`` Height and width individually scaled by specified percentages. Only one % symbol is needed. **width** Width given, height automagically selected to preserve aspect ratio. ``x``\ \ **height** Height given, width automagically selected to preserve aspect ratio. **width**\ ``x``\ **height** Maximum values of width and height given; aspect ratio preserved. **width**\ ``x``\ **height**\ ``!`` Width and height emphatically given; original aspect ratio ignored. **width**\ ``x``\ **height**\ ``>`` Shrinks images with dimension(s) larger than the corresponding width and/or height dimension(s). **width**\ ``x``\ **height**\ ``<`` Enlarges images with dimensions smaller than the corresponding width and/or height dimension(s). **area**\ ``@`` Resize image to have the specified area in pixels. Aspect ratio is preserved. The offset, which only applies to the cropping geometry string, is given by ``{+-}``\ **x**\ ``{+-}``\ **y**\ , that is, one plus or minus sign followed by an **x** offset, followed by another plus or minus sign, followed by a **y** offset. Offsets are in pixels from the upper left corner of the image. Negative offsets will cause the corresponding number of pixels to be removed from the right or bottom edge of the image, meaning the cropped size will be the computed size minus the absolute value of the offset. For example, if you want to crop your image to 300x300 pixels and then scale it by 2x for a final size of 600x600 pixels, you can call:: image.transform('300x300', '200%') This method is a fairly thing wrapper for the C API, and does not perform any additional checking of the parameters except insofar as verifying that they are of the correct type. Thus, like the C API function, the method is very permissive in terms of what it accepts for geometry strings; unrecognized strings and trailing characters will be ignored rather than raising an error. :param crop: A geometry string defining a subregion of the image to crop to :type crop: :class:`basestring` :param resize: A geometry string defining the final size of the image :type resize: :class:`basestring` .. seealso:: `ImageMagick Geometry Specifications`__ Cropping and resizing geometry for the ``transform`` method are specified according to ImageMagick's geometry string format. The ImageMagick documentation provides more information about geometry strings. __ http://www.imagemagick.org/script/command-line-processing.php#geometry .. versionadded:: 0.2.2 """ # noqa # Check that the values given are the correct types. ctypes will do # this automatically, but we can make the error message more friendly # here. if not isinstance(crop, string_type): raise TypeError("crop must be a string, not " + repr(crop)) if not isinstance(resize, string_type): raise TypeError("resize must be a string, not " + repr(resize)) # Also verify that only ASCII characters are included try: crop = crop.encode('ascii') except UnicodeEncodeError: raise ValueError('crop must only contain ascii-encodable ' + 'characters.') try: resize = resize.encode('ascii') except UnicodeEncodeError: raise ValueError('resize must only contain ascii-encodable ' + 'characters.') if self.animation: new_wand = library.MagickCoalesceImages(self.wand) length = len(self.sequence) for i in xrange(length): library.MagickSetIteratorIndex(new_wand, i) if i: library.MagickAddImage( new_wand, library.MagickTransformImage(new_wand, crop, resize) ) else: new_wand = library.MagickTransformImage(new_wand, crop, resize) self.sequence.instances = [] else: new_wand = library.MagickTransformImage(self.wand, crop, resize) if not new_wand: self.raise_exception() self.wand = new_wand @manipulative def liquid_rescale(self, width, height, delta_x=0, rigidity=0): """Rescales the image with `seam carving`_, also known as image retargeting, content-aware resizing, or liquid rescaling. :param width: the width in the scaled image :type width: :class:`numbers.Integral` :param height: the height in the scaled image :type height: :class:`numbers.Integral` :param delta_x: maximum seam transversal step. 0 means straight seams. default is 0 :type delta_x: :class:`numbers.Real` :param rigidity: introduce a bias for non-straight seams. default is 0 :type rigidity: :class:`numbers.Real` :raises wand.exceptions.MissingDelegateError: when ImageMagick isn't configured ``--with-lqr`` option. .. note:: This feature requires ImageMagick to be configured ``--with-lqr`` option. Or it will raise :exc:`~wand.exceptions.MissingDelegateError`: .. seealso:: `Seam carving`_ --- Wikipedia The article which explains what seam carving is on Wikipedia. .. _Seam carving: http://en.wikipedia.org/wiki/Seam_carving """ if not isinstance(width, numbers.Integral): raise TypeError('width must be an integer, not ' + repr(width)) elif not isinstance(height, numbers.Integral): raise TypeError('height must be an integer, not ' + repr(height)) elif not isinstance(delta_x, numbers.Real): raise TypeError('delta_x must be a float, not ' + repr(delta_x)) elif not isinstance(rigidity, numbers.Real): raise TypeError('rigidity must be a float, not ' + repr(rigidity)) library.MagickLiquidRescaleImage(self.wand, int(width), int(height), float(delta_x), float(rigidity)) try: self.raise_exception() except MissingDelegateError as e: raise MissingDelegateError( str(e) + '\n\nImageMagick in the system is likely to be ' 'impossible to load liblqr. You might not install liblqr, ' 'or ImageMagick may not compiled with liblqr.' ) @manipulative def rotate(self, degree, background=None, reset_coords=True): """Rotates the image right. It takes a ``background`` color for ``degree`` that isn't a multiple of 90. :param degree: a degree to rotate. multiples of 360 affect nothing :type degree: :class:`numbers.Real` :param background: an optional background color. default is transparent :type background: :class:`wand.color.Color` :param reset_coords: optional flag. If set, after the rotation, the coordinate frame will be relocated to the upper-left corner of the new image. By default is `True`. :type reset_coords: :class:`bool` .. versionadded:: 0.2.0 The ``reset_coords`` parameter. .. versionadded:: 0.1.8 """ if background is None: background = Color('transparent') elif not isinstance(background, Color): raise TypeError('background must be a wand.color.Color instance, ' 'not ' + repr(background)) if not isinstance(degree, numbers.Real): raise TypeError('degree must be a numbers.Real value, not ' + repr(degree)) with background: if self.animation: self.wand = library.MagickCoalesceImages(self.wand) library.MagickSetLastIterator(self.wand) n = library.MagickGetIteratorIndex(self.wand) library.MagickResetIterator(self.wand) for i in range(0, n + 1): library.MagickSetIteratorIndex(self.wand, i) library.MagickRotateImage(self.wand, background.resource, degree) if reset_coords: library.MagickResetImagePage(self.wand, None) else: result = library.MagickRotateImage(self.wand, background.resource, degree) if not result: self.raise_exception() if reset_coords: self.reset_coords() @manipulative def evaluate(self, operator=None, value=0.0, channel=None): """Apply arithmetic, relational, or logical expression to an image. Percent values must be calculated against the quantum range of the image:: fifty_percent = img.quantum_range * 0.5 img.evaluate(operator='set', value=fifty_percent) :param operator: Type of operation to calculate :type operator: :const:`EVALUATE_OPS` :param value: Number to calculate with ``operator`` :type value: :class:`numbers.Real` :param channel: Optional channel to apply operation on. :type channel: :const:`CHANNELS` :raises TypeError: When ``value`` is not numeric. :raises ValueError: When ``operator``, or ``channel`` are not defined in constants. .. versionadded:: 0.4.1 """ if operator not in EVALUATE_OPS: raise ValueError('expected value from EVALUATE_OPS, not ' + repr(operator)) if not isinstance(value, numbers.Real): raise TypeError('value must be real number, not ' + repr(value)) if channel: if channel not in CHANNELS: raise ValueError('expected value from CHANNELS, not ' + repr(channel)) library.MagickEvaluateImageChannel(self.wand, CHANNELS[channel], EVALUATE_OPS.index(operator), value) else: library.MagickEvaluateImage(self.wand, EVALUATE_OPS.index(operator), value) self.raise_exception() @manipulative def flip(self): """Creates a vertical mirror image by reflecting the pixels around the central x-axis. It manipulates the image in place. .. versionadded:: 0.3.0 """ result = library.MagickFlipImage(self.wand) if not result: self.raise_exception() @manipulative def flop(self): """Creates a horizontal mirror image by reflecting the pixels around the central y-axis. It manipulates the image in place. .. versionadded:: 0.3.0 """ result = library.MagickFlopImage(self.wand) if not result: self.raise_exception() @manipulative def frame(self, matte=None, width=1, height=1, inner_bevel=0, outer_bevel=0): """Creates a bordered frame around image. Inner & outer bevel can simulate a 3D effect. :param matte: color of the frame :type matte: :class:`wand.color.Color` :param width: total size of frame on x-axis :type width: :class:`numbers.Integral` :param height: total size of frame on y-axis :type height: :class:`numbers.Integral` :param inner_bevel: inset shadow length :type inner_bevel: :class:`numbers.Real` :param outer_bevel: outset highlight length :type outer_bevel: :class:`numbers.Real` .. versionadded:: 0.4.1 """ if matte is None: matte = Color('gray') if not isinstance(matte, Color): raise TypeError('Expecting instance of Color for matte, not ' + repr(matte)) if not isinstance(width, numbers.Integral): raise TypeError('Expecting integer for width, not ' + repr(width)) if not isinstance(height, numbers.Integral): raise TypeError('Expecting integer for height, not ' + repr(height)) if not isinstance(inner_bevel, numbers.Real): raise TypeError('Expecting real number, not ' + repr(inner_bevel)) if not isinstance(outer_bevel, numbers.Real): raise TypeError('Expecting real number, not ' + repr(outer_bevel)) with matte: library.MagickFrameImage(self.wand, matte.resource, width, height, inner_bevel, outer_bevel) @manipulative def function(self, function, arguments, channel=None): """Apply an arithmetic, relational, or logical expression to an image. Defaults entire image, but can isolate affects to single color channel by passing :const:`CHANNELS` value to ``channel`` parameter. .. note:: Support for function methods added in the following versions of ImageMagick. - ``'polynomial'`` >= 6.4.8-8 - ``'sinusoid'`` >= 6.4.8-8 - ``'arcsin'`` >= 6.5.3-1 - ``'arctan'`` >= 6.5.3-1 :param function: a string listed in :const:`FUNCTION_TYPES` :type function: :class:`basestring` :param arguments: a sequence of doubles to apply against ``function`` :type arguments: :class:`collections.Sequence` :param channel: optional :const:`CHANNELS`, defaults all :type channel: :class:`basestring` :raises ValueError: when a ``function``, or ``channel`` is not defined in there respected constant :raises TypeError: if ``arguments`` is not a sequence .. versionadded:: 0.4.1 """ if function not in FUNCTION_TYPES: raise ValueError('expected string from FUNCTION_TYPES, not ' + repr(function)) if not isinstance(arguments, collections.Sequence): raise TypeError('expecting sequence of arguments, not ' + repr(arguments)) argc = len(arguments) argv = (ctypes.c_double * argc)(*arguments) index = FUNCTION_TYPES.index(function) if channel is None: library.MagickFunctionImage(self.wand, index, argc, argv) elif channel in CHANNELS: library.MagickFunctionImageChannel(self.wand, CHANNELS[channel], index, argc, argv) else: raise ValueError('expected string from CHANNELS, not ' + repr(channel)) self.raise_exception() @manipulative def fx(self, expression, channel=None): """Manipulate each pixel of an image by given expression. FX will preserver current wand instance, and return a new instance of :class:`Image` containing affected pixels. Defaults entire image, but can isolate affects to single color channel by passing :const:`CHANNELS` value to ``channel`` parameter. .. seealso:: The anatomy of FX expressions can be found at http://www.imagemagick.org/script/fx.php :param expression: The entire FX expression to apply :type expression: :class:`basestring` :param channel: Optional channel to target. :type channel: :const:`CHANNELS` :returns: A new instance of an image with expression applied :rtype: :class:`Image` .. versionadded:: 0.4.1 """ if not isinstance(expression, string_type): raise TypeError('expected basestring for expression, not' + repr(expression)) c_expression = binary(expression) if channel is None: new_wand = library.MagickFxImage(self.wand, c_expression) elif channel in CHANNELS: new_wand = library.MagickFxImageChannel(self.wand, CHANNELS[channel], c_expression) else: raise ValueError('expected string from CHANNELS, not ' + repr(channel)) if new_wand: return Image(image=BaseImage(new_wand)) self.raise_exception() @manipulative def transparentize(self, transparency): """Makes the image transparent by subtracting some percentage of the black color channel. The ``transparency`` parameter specifies the percentage. :param transparency: the percentage fade that should be performed on the image, from 0.0 to 1.0 :type transparency: :class:`numbers.Real` .. versionadded:: 0.2.0 """ if transparency: t = ctypes.c_double(float(self.quantum_range * float(transparency))) if t.value > self.quantum_range or t.value < 0: raise ValueError('transparency must be a numbers.Real value ' + 'between 0.0 and 1.0') # Set the wand to image zero, in case there are multiple images # in it library.MagickSetIteratorIndex(self.wand, 0) # Change the pixel representation of the image # to RGB with an alpha channel library.MagickSetImageType(self.wand, IMAGE_TYPES.index('truecolormatte')) # Perform the black channel subtraction library.MagickEvaluateImageChannel(self.wand, CHANNELS['opacity'], EVALUATE_OPS.index('subtract'), t) self.raise_exception() @manipulative def transparent_color(self, color, alpha, fuzz=0, invert=False): """Makes the color ``color`` a transparent color with a tolerance of fuzz. The ``alpha`` parameter specify the transparency level and the parameter ``fuzz`` specify the tolerance. :param color: The color that should be made transparent on the image, color object :type color: :class:`wand.color.Color` :param alpha: the level of transparency: 1.0 is fully opaque and 0.0 is fully transparent. :type alpha: :class:`numbers.Real` :param fuzz: By default target must match a particular pixel color exactly. However, in many cases two colors may differ by a small amount. The fuzz member of image defines how much tolerance is acceptable to consider two colors as the same. For example, set fuzz to 10 and the color red at intensities of 100 and 102 respectively are now interpreted as the same color for the color. :type fuzz: :class:`numbers.Integral` :param invert: Boolean to tell to paint the inverse selection. :type invert: :class:`bool` .. versionadded:: 0.3.0 """ if not isinstance(alpha, numbers.Real): raise TypeError('alpha must be an float, not ' + repr(alpha)) elif not isinstance(fuzz, numbers.Integral): raise TypeError('fuzz must be an integer, not ' + repr(fuzz)) elif not isinstance(color, Color): raise TypeError('color must be a wand.color.Color object, not ' + repr(color)) library.MagickTransparentPaintImage(self.wand, color.resource, alpha, fuzz, invert) self.raise_exception() def compare(self, image, metric='undefined'): """Compares an image to a reconstructed image. :param image: The reference image :type image: :class:`wand.image.Image` :param metric: The metric type to use for comparing. :type metric: :class:`basestring` :returns: The difference image(:class:`wand.image.Image`), the computed distortion between the images (:class:`numbers.Integral`) :rtype: :class:`tuple` ..versionadded:: 0.4.3 """ if not isinstance(metric, string_type): raise TypeError('metric must be a string, not ' + repr(metric)) metric = COMPARE_METRICS.index(metric) distortion = ctypes.c_double() compared_image = library.MagickCompareImages(self.wand, image.wand, metric, ctypes.byref(distortion)) return Image(BaseImage(compared_image)), distortion.value @manipulative def composite(self, image, left, top): """Places the supplied ``image`` over the current image, with the top left corner of ``image`` at coordinates ``left``, ``top`` of the current image. The dimensions of the current image are not changed. :param image: the image placed over the current image :type image: :class:`wand.image.Image` :param left: the x-coordinate where `image` will be placed :type left: :class:`numbers.Integral` :param top: the y-coordinate where `image` will be placed :type top: :class:`numbers.Integral` .. versionadded:: 0.2.0 """ if not isinstance(left, numbers.Integral): raise TypeError('left must be an integer, not ' + repr(left)) elif not isinstance(top, numbers.Integral): raise TypeError('top must be an integer, not ' + repr(left)) op = COMPOSITE_OPERATORS.index('over') library.MagickCompositeImage(self.wand, image.wand, op, int(left), int(top)) self.raise_exception() @manipulative def composite_channel(self, channel, image, operator, left=0, top=0): """Composite two images using the particular ``channel``. :param channel: the channel type. available values can be found in the :const:`CHANNELS` mapping :param image: the composited source image. (the receiver image becomes the destination) :type image: :class:`Image` :param operator: the operator that affects how the composite is applied to the image. available values can be found in the :const:`COMPOSITE_OPERATORS` list :param left: the column offset of the composited source image :type left: :class:`numbers.Integral` :param top: the row offset of the composited source image :type top: :class:`numbers.Integral` :raises ValueError: when the given ``channel`` or ``operator`` is invalid .. versionadded:: 0.3.0 """ if not isinstance(channel, string_type): raise TypeError('channel must be a string, not ' + repr(channel)) elif not isinstance(operator, string_type): raise TypeError('operator must be a string, not ' + repr(operator)) elif not isinstance(left, numbers.Integral): raise TypeError('left must be an integer, not ' + repr(left)) elif not isinstance(top, numbers.Integral): raise TypeError('top must be an integer, not ' + repr(left)) try: ch_const = CHANNELS[channel] except KeyError: raise ValueError(repr(channel) + ' is an invalid channel type' '; see wand.image.CHANNELS dictionary') try: op = COMPOSITE_OPERATORS.index(operator) except IndexError: raise IndexError(repr(operator) + ' is an invalid composite ' 'operator type; see wand.image.COMPOSITE_' 'OPERATORS dictionary') library.MagickCompositeImageChannel(self.wand, ch_const, image.wand, op, int(left), int(top)) self.raise_exception() @manipulative def equalize(self): """Equalizes the image histogram .. versionadded:: 0.3.10 """ result = library.MagickEqualizeImage(self.wand) if not result: self.raise_exception() @manipulative def modulate(self, brightness=100.0, saturation=100.0, hue=100.0): """Changes the brightness, saturation and hue of an image. We modulate the image with the given ``brightness``, ``saturation`` and ``hue``. :param brightness: percentage of brightness :type brightness: :class:`numbers.Real` :param saturation: percentage of saturation :type saturation: :class:`numbers.Real` :param hue: percentage of hue rotation :type hue: :class:`numbers.Real` :raises ValueError: when one or more arguments are invalid .. versionadded:: 0.3.4 """ if not isinstance(brightness, numbers.Real): raise TypeError('brightness has to be a numbers.Real, not ' + repr(brightness)) elif not isinstance(saturation, numbers.Real): raise TypeError('saturation has to be a numbers.Real, not ' + repr(saturation)) elif not isinstance(hue, numbers.Real): raise TypeError('hue has to be a numbers.Real, not ' + repr(hue)) r = library.MagickModulateImage( self.wand, brightness, saturation, hue ) if not r: self.raise_exception() @manipulative def merge_layers(self, method): """Composes all the image layers from the current given image onward to produce a single image of the merged layers. The inital canvas's size depends on the given ImageLayerMethod, and is initialized using the first images background color. The images are then compositied onto that image in sequence using the given composition that has been assigned to each individual image. The method must be set with a value from :const:`IMAGE_LAYER_METHOD` that is acceptable to this operation. (See ImageMagick documentation for more details.) :param method: the method of selecting the size of the initial canvas. :type method: :class:`basestring` .. versionadded:: 0.4.3 """ if method not in ['merge', 'flatten', 'mosaic']: raise TypeError('method must be one of: merge, flatten, mosaic') m = IMAGE_LAYER_METHOD.index(method) r = library.MagickMergeImageLayers(self.wand, m) if not r: self.raise_exception() self.wand = r @manipulative def threshold(self, threshold=0.5, channel=None): """Changes the value of individual pixels based on the intensity of each pixel compared to threshold. The result is a high-contrast, two color image. It manipulates the image in place. :param threshold: threshold as a factor of quantum :type threshold: :class:`numbers.Real` :param channel: the channel type. available values can be found in the :const:`CHANNELS` mapping. If ``None``, threshold all channels. :type channel: :class:`basestring` .. versionadded:: 0.3.10 """ if not isinstance(threshold, numbers.Real): raise TypeError('threshold has to be a numbers.Real, not ' + repr(threshold)) if channel: try: ch_const = CHANNELS[channel] except KeyError: raise ValueError(repr(channel) + ' is an invalid channel type' '; see wand.image.CHANNELS dictionary') r = library.MagickThresholdImageChannel( self.wand, ch_const, threshold * self.quantum_range ) else: r = library.MagickThresholdImage(self.wand, threshold * self.quantum_range) if not r: self.raise_exception() def negate(self, grayscale=False, channel=None): """Negate the colors in the reference image. :param grayscale: if set, only negate grayscale pixels in the image. :type grayscale: :class:`bool` :param channel: the channel type. available values can be found in the :const:`CHANNELS` mapping. If ``None``, negate all channels. :type channel: :class:`basestring` .. versionadded:: 0.3.8 """ if channel: try: ch_const = CHANNELS[channel] except KeyError: raise ValueError(repr(channel) + ' is an invalid channel type' '; see wand.image.CHANNELS dictionary') r = library.MagickNegateImageChannel(self.wand, ch_const, grayscale) else: r = library.MagickNegateImage(self.wand, grayscale) if not r: self.raise_exception() @manipulative def gaussian_blur(self, radius, sigma): """Blurs the image. We convolve the image with a gaussian operator of the given ``radius`` and standard deviation (``sigma``). For reasonable results, the ``radius`` should be larger than ``sigma``. Use a ``radius`` of 0 and :meth:`blur()` selects a suitable ``radius`` for you. :param radius: the radius of the, in pixels, not counting the center pixel :type radius: :class:`numbers.Real` :param sigma: the standard deviation of the, in pixels :type sigma: :class:`numbers.Real` .. versionadded:: 0.3.3 """ if not isinstance(radius, numbers.Real): raise TypeError('radius has to be a numbers.Real, not ' + repr(radius)) elif not isinstance(sigma, numbers.Real): raise TypeError('sigma has to be a numbers.Real, not ' + repr(sigma)) r = library.MagickGaussianBlurImage(self.wand, radius, sigma) if not r: self.raise_exception() @manipulative def unsharp_mask(self, radius, sigma, amount, threshold): """Sharpens the image using unsharp mask filter. We convolve the image with a Gaussian operator of the given ``radius`` and standard deviation (``sigma``). For reasonable results, ``radius`` should be larger than ``sigma``. Use a radius of 0 and :meth:`unsharp_mask()` selects a suitable radius for you. :param radius: the radius of the Gaussian, in pixels, not counting the center pixel :type radius: :class:`numbers.Real` :param sigma: the standard deviation of the Gaussian, in pixels :type sigma: :class:`numbers.Real` :param amount: the percentage of the difference between the original and the blur image that is added back into the original :type amount: :class:`numbers.Real` :param threshold: the threshold in pixels needed to apply the diffence amount :type threshold: :class:`numbers.Real` .. versionadded:: 0.3.4 """ if not isinstance(radius, numbers.Real): raise TypeError('radius has to be a numbers.Real, not ' + repr(radius)) elif not isinstance(sigma, numbers.Real): raise TypeError('sigma has to be a numbers.Real, not ' + repr(sigma)) elif not isinstance(amount, numbers.Real): raise TypeError('amount has to be a numbers.Real, not ' + repr(amount)) elif not isinstance(threshold, numbers.Real): raise TypeError('threshold has to be a numbers.Real, not ' + repr(threshold)) r = library.MagickUnsharpMaskImage(self.wand, radius, sigma, amount, threshold) if not r: self.raise_exception() @manipulative def watermark(self, image, transparency=0.0, left=0, top=0): """Transparentized the supplied ``image`` and places it over the current image, with the top left corner of ``image`` at coordinates ``left``, ``top`` of the current image. The dimensions of the current image are not changed. :param image: the image placed over the current image :type image: :class:`wand.image.Image` :param transparency: the percentage fade that should be performed on the image, from 0.0 to 1.0 :type transparency: :class:`numbers.Real` :param left: the x-coordinate where `image` will be placed :type left: :class:`numbers.Integral` :param top: the y-coordinate where `image` will be placed :type top: :class:`numbers.Integral` .. versionadded:: 0.2.0 """ with image.clone() as watermark_image: watermark_image.transparentize(transparency) self.composite(watermark_image, left=left, top=top) self.raise_exception() @manipulative def quantize(self, number_colors, colorspace_type, treedepth, dither, measure_error): """`quantize` analyzes the colors within a sequence of images and chooses a fixed number of colors to represent the image. The goal of the algorithm is to minimize the color difference between the input and output image while minimizing the processing time. :param number_colors: the number of colors. :type number_colors: :class:`numbers.Integral` :param colorspace_type: colorspace_type. available value can be found in the :const:`COLORSPACE_TYPES` :type colorspace_type: :class:`basestring` :param treedepth: normally, this integer value is zero or one. a zero or one tells :meth:`quantize` to choose a optimal tree depth of ``log4(number_colors)``. a tree of this depth generally allows the best representation of the reference image with the least amount of memory and the fastest computational speed. in some cases, such as an image with low color dispersion (a few number of colors), a value other than ``log4(number_colors)`` is required. to expand the color tree completely, use a value of 8 :type treedepth: :class:`numbers.Integral` :param dither: a value other than zero distributes the difference between an original image and the corresponding color reduced algorithm to neighboring pixels along a Hilbert curve :type dither: :class:`bool` :param measure_error: a value other than zero measures the difference between the original and quantized images. this difference is the total quantization error. The error is computed by summing over all pixels in an image the distance squared in RGB space between each reference pixel value and its quantized value :type measure_error: :class:`bool` .. versionadded:: 0.4.2 """ if not isinstance(number_colors, numbers.Integral): raise TypeError('number_colors must be integral, ' 'not ' + repr(number_colors)) if not isinstance(colorspace_type, string_type) \ or colorspace_type not in COLORSPACE_TYPES: raise TypeError('Colorspace value must be a string from ' 'COLORSPACE_TYPES, not ' + repr(colorspace_type)) if not isinstance(treedepth, numbers.Integral): raise TypeError('treedepth must be integral, ' 'not ' + repr(treedepth)) if not isinstance(dither, bool): raise TypeError('dither must be a bool, not ' + repr(dither)) if not isinstance(measure_error, bool): raise TypeError('measure_error must be a bool, not ' + repr(measure_error)) r = library.MagickQuantizeImage( self.wand, number_colors, COLORSPACE_TYPES.index(colorspace_type), treedepth, dither, measure_error ) if not r: self.raise_exception() @manipulative def transform_colorspace(self, colorspace_type): """Transform image's colorspace. :param colorspace_type: colorspace_type. available value can be found in the :const:`COLORSPACE_TYPES` :type colorspace_type: :class:`basestring` .. versionadded:: 0.4.2 """ if not isinstance(colorspace_type, string_type) \ or colorspace_type not in COLORSPACE_TYPES: raise TypeError('Colorspace value must be a string from ' 'COLORSPACE_TYPES, not ' + repr(colorspace_type)) r = library.MagickTransformImageColorspace( self.wand, COLORSPACE_TYPES.index(colorspace_type) ) if not r: self.raise_exception() def __repr__(self, extra_format=' ({self.width}x{self.height})'): cls = type(self) typename = '{0}.{1}'.format( cls.__module__, getattr(cls, '__qualname__', cls.__name__) ) if getattr(self, 'c_resource', None) is None: return '<{0}: (closed)>'.format(typename) sig = self.signature if not sig: return '<{0}: (empty)>'.format(typename) return '<{0}: {1}{2}>'.format( typename, sig[:7], extra_format.format(self=self) ) class Image(BaseImage): """An image object. :param image: makes an exact copy of the ``image`` :type image: :class:`Image` :param blob: opens an image of the ``blob`` byte array :type blob: :class:`bytes` :param file: opens an image of the ``file`` object :type file: file object :param filename: opens an image of the ``filename`` string :type filename: :class:`basestring` :param format: forces filename to buffer. ``format`` to help imagemagick detect the file format. Used only in ``blob`` or ``file`` cases :type format: :class:`basestring` :param width: the width of new blank image or an image loaded from raw data. :type width: :class:`numbers.Integral` :param height: the height of new blank imgage or an image loaded from raw data. :type height: :class:`numbers.Integral` :param depth: the depth used when loading raw data. :type depth: :class:`numbers.Integral` :param background: an optional background color. default is transparent :type background: :class:`wand.color.Color` :param resolution: set a resolution value (dpi), useful for vectorial formats (like pdf) :type resolution: :class:`collections.Sequence`, :Class:`numbers.Integral` .. versionadded:: 0.1.5 The ``file`` parameter. .. versionadded:: 0.1.1 The ``blob`` parameter. .. versionadded:: 0.2.1 The ``format`` parameter. .. versionadded:: 0.2.2 The ``width``, ``height``, ``background`` parameters. .. versionadded:: 0.3.0 The ``resolution`` parameter. .. versionadded:: 0.4.2 The ``depth`` parameter. .. versionchanged:: 0.4.2 The ``depth``, ``width`` and ``height`` parameters can be used with the ``filename``, ``file`` and ``blob`` parameters to load raw pixel data. .. describe:: [left:right, top:bottom] Crops the image by its ``left``, ``right``, ``top`` and ``bottom``, and then returns the cropped one. :: with img[100:200, 150:300] as cropped: # manipulated the cropped image pass Like other subscriptable objects, default is 0 or its width/height:: img[:, :] #--> just clone img[:100, 200:] #--> equivalent to img[0:100, 200:img.height] Negative integers count from the end (width/height):: img[-70:-50, -20:-10] #--> equivalent to img[width-70:width-50, height-20:height-10] :returns: the cropped image :rtype: :class:`Image` .. versionadded:: 0.1.2 """ #: (:class:`Metadata`) The metadata mapping of the image. Read only. #: #: .. versionadded:: 0.3.0 metadata = None #: (:class:`ChannelImageDict`) The mapping of separated channels #: from the image. :: #: #: with image.channel_images['red'] as red_image: #: display(red_image) channel_images = None #: (:class:`ChannelDepthDict`) The mapping of channels to their depth. #: Read only. #: #: .. versionadded:: 0.3.0 channel_depths = None def __init__(self, image=None, blob=None, file=None, filename=None, format=None, width=None, height=None, depth=None, background=None, resolution=None): new_args = width, height, background, depth open_args = blob, file, filename if any(a is not None for a in new_args) and image is not None: raise TypeError("blank image parameters can't be used with image " 'parameter') if sum(a is not None for a in open_args + (image,)) > 1: raise TypeError(', '.join(open_args) + ' and image parameters are exclusive each other; ' 'use only one at once') if not (format is None): if not isinstance(format, string_type): raise TypeError('format must be a string, not ' + repr(format)) if not any(a is not None for a in open_args): raise TypeError('format can only be used with the blob, file ' 'or filename parameter') if depth not in [None, 8, 16, 32]: raise ValueError('Depth must be 8, 16 or 32') with self.allocate(): if image is None: wand = library.NewMagickWand() super(Image, self).__init__(wand) if image is not None: if not isinstance(image, BaseImage): raise TypeError('image must be a wand.image.Image ' 'instance, not ' + repr(image)) wand = library.CloneMagickWand(image.wand) super(Image, self).__init__(wand) elif any(a is not None for a in open_args): if format: format = binary(format) with Color('transparent') as bg: # FIXME: parameterize this result = library.MagickSetBackgroundColor(self.wand, bg.resource) if not result: self.raise_exception() # allow setting the width, height and depth # (needed for loading raw data) if width is not None and height is not None: if not isinstance(width, numbers.Integral) or width < 1: raise TypeError('width must be a natural number, ' 'not ' + repr(width)) if not isinstance(height, numbers.Integral) or height < 1: raise TypeError('height must be a natural number, ' 'not ' + repr(height)) library.MagickSetSize(self.wand, width, height) if depth is not None: library.MagickSetDepth(self.wand, depth) if format: library.MagickSetFormat(self.wand, format) if not filename: library.MagickSetFilename(self.wand, b'buffer.' + format) if file is not None: self.read(file=file, resolution=resolution) elif blob is not None: self.read(blob=blob, resolution=resolution) elif filename is not None: self.read(filename=filename, resolution=resolution) # clear the wand format, otherwise any subsequent call to # MagickGetImageBlob will silently change the image to this # format again. library.MagickSetFormat(self.wand, binary("")) elif width is not None and height is not None: self.blank(width, height, background) if depth: r = library.MagickSetImageDepth(self.wand, depth) if not r: raise self.raise_exception() self.metadata = Metadata(self) from .sequence import Sequence self.sequence = Sequence(self) self.raise_exception() def destroy(self): """Manually remove :class:`~.sequence.SingleImage`'s in the :class:`~.sequence.Sequence`, allowing it to be properly garbage collected after using a ``with Image()`` context manager. """ for i in range(0, len(self.sequence)): self.sequence.pop() super(Image, self).destroy() def read(self, file=None, filename=None, blob=None, resolution=None): """Read new image into Image() object. :param blob: reads an image from the ``blob`` byte array :type blob: :class:`bytes` :param file: reads an image from the ``file`` object :type file: file object :param filename: reads an image from the ``filename`` string :type filename: :class:`basestring` :param resolution: set a resolution value (DPI), useful for vectorial formats (like PDF) :type resolution: :class:`collections.Sequence`, :class:`numbers.Integral` .. versionadded:: 0.3.0 """ r = None # Resolution must be set after image reading. if resolution is not None: if (isinstance(resolution, collections.Sequence) and len(resolution) == 2): library.MagickSetResolution(self.wand, *resolution) elif isinstance(resolution, numbers.Integral): library.MagickSetResolution(self.wand, resolution, resolution) else: raise TypeError('resolution must be a (x, y) pair or an ' 'integer of the same x/y') if file is not None: if (isinstance(file, file_types) and hasattr(libc, 'fdopen') and hasattr(file, 'mode')): fd = libc.fdopen(file.fileno(), file.mode) r = library.MagickReadImageFile(self.wand, fd) elif not callable(getattr(file, 'read', None)): raise TypeError('file must be a readable file object' ', but the given object does not ' 'have read() method') else: blob = file.read() file = None if blob is not None: if not isinstance(blob, collections.Iterable): raise TypeError('blob must be iterable, not ' + repr(blob)) if not isinstance(blob, binary_type): blob = b''.join(blob) r = library.MagickReadImageBlob(self.wand, blob, len(blob)) elif filename is not None: filename = encode_filename(filename) r = library.MagickReadImage(self.wand, filename) if not r: self.raise_exception() def close(self): """Closes the image explicitly. If you use the image object in :keyword:`with` statement, it was called implicitly so don't have to call it. .. note:: It has the same functionality of :attr:`destroy()` method. """ self.destroy() def clear(self): """Clears resources associated with the image, leaving the image blank, and ready to be used with new image. .. versionadded:: 0.3.0 """ library.ClearMagickWand(self.wand) def level(self, black=0.0, white=None, gamma=1.0, channel=None): """Adjusts the levels of an image by scaling the colors falling between specified black and white points to the full available quantum range. If only ``black`` is given, ``white`` will be adjusted inward. :param black: Black point, as a percentage of the system's quantum range. Defaults to 0. :type black: :class:`numbers.Real` :param white: White point, as a percentage of the system's quantum range. Defaults to 1.0. :type white: :class:`numbers.Real` :param gamma: Optional gamma adjustment. Values > 1.0 lighten the image's midtones while values < 1.0 darken them. :type gamma: :class:`numbers.Real` :param channel: The channel type. Available values can be found in the :const:`CHANNELS` mapping. If ``None``, normalize all channels. :type channel: :const:`CHANNELS` .. versionadded:: 0.4.1 """ if not isinstance(black, numbers.Real): raise TypeError('expecting real number, not' + repr(black)) # If white is not given, mimic CLI behavior by reducing top point if white is None: white = 1.0 - black if not isinstance(white, numbers.Real): raise TypeError('expecting real number, not' + repr(white)) if not isinstance(gamma, numbers.Real): raise TypeError('expecting real number, not' + repr(gamma)) bp = float(self.quantum_range * black) wp = float(self.quantum_range * white) if channel: try: ch_const = CHANNELS[channel] except KeyError: raise ValueError(repr(channel) + ' is an invalid channel type' '; see wand.image.CHANNELS dictionary') library.MagickLevelImageChannel(self.wand, ch_const, bp, gamma, wp) else: library.MagickLevelImage(self.wand, bp, gamma, wp) self.raise_exception() @property def format(self): """(:class:`basestring`) The image format. If you want to convert the image format, just reset this property:: assert isinstance(img, wand.image.Image) img.format = 'png' It may raise :exc:`ValueError` when the format is unsupported. .. seealso:: `ImageMagick Image Formats`__ ImageMagick uses an ASCII string known as *magick* (e.g. ``GIF``) to identify file formats, algorithms acting as formats, built-in patterns, and embedded profile types. __ http://www.imagemagick.org/script/formats.php .. versionadded:: 0.1.6 """ fmt = library.MagickGetImageFormat(self.wand) if bool(fmt): return text(fmt.value) self.raise_exception() @format.setter def format(self, fmt): if not isinstance(fmt, string_type): raise TypeError("format must be a string like 'png' or 'jpeg'" ', not ' + repr(fmt)) fmt = fmt.strip() r = library.MagickSetImageFormat(self.wand, binary(fmt.upper())) if not r: raise ValueError(repr(fmt) + ' is unsupported format') r = library.MagickSetFilename(self.wand, b'buffer.' + binary(fmt.lower())) if not r: self.raise_exception() @property def mimetype(self): """(:class:`basestring`) The MIME type of the image e.g. ``'image/jpeg'``, ``'image/png'``. .. versionadded:: 0.1.7 """ rp = libmagick.MagickToMime(binary(self.format)) if not bool(rp): self.raise_exception() mimetype = rp.value return text(mimetype) @property def animation(self): return (self.mimetype in ('image/gif', 'image/x-gif') and len(self.sequence) > 1) @property def compression(self): """(:class:`basestring`) The type of image compression. It's a string from :const:`COMPRESSION_TYPES` list. It also can be set. .. versionadded:: 0.3.6 """ compression_index = library.MagickGetImageCompression(self.wand) return COMPRESSION_TYPES[compression_index] @compression.setter def compression(self, value): if not isinstance(value, string_type): raise TypeError('expected a string, not ' + repr(value)) if value not in COMPRESSION_TYPES: raise ValueError('expected a string from COMPRESSION_TYPES, not ' + repr(value)) library.MagickSetImageCompression( self.wand, COMPRESSION_TYPES.index(value) ) def blank(self, width, height, background=None): """Creates blank image. :param width: the width of new blank image. :type width: :class:`numbers.Integral` :param height: the height of new blank imgage. :type height: :class:`numbers.Integral` :param background: an optional background color. default is transparent :type background: :class:`wand.color.Color` :returns: blank image :rtype: :class:`Image` .. versionadded:: 0.3.0 """ if not isinstance(width, numbers.Integral) or width < 1: raise TypeError('width must be a natural number, not ' + repr(width)) if not isinstance(height, numbers.Integral) or height < 1: raise TypeError('height must be a natural number, not ' + repr(height)) if background is not None and not isinstance(background, Color): raise TypeError('background must be a wand.color.Color ' 'instance, not ' + repr(background)) if background is None: background = Color('transparent') with background: r = library.MagickNewImage(self.wand, width, height, background.resource) if not r: self.raise_exception() return self def convert(self, format): """Converts the image format with the original image maintained. It returns a converted image instance which is new. :: with img.convert('png') as converted: converted.save(filename='converted.png') :param format: image format to convert to :type format: :class:`basestring` :returns: a converted image :rtype: :class:`Image` :raises ValueError: when the given ``format`` is unsupported .. versionadded:: 0.1.6 """ cloned = self.clone() cloned.format = format return cloned def save(self, file=None, filename=None): """Saves the image into the ``file`` or ``filename``. It takes only one argument at a time. :param file: a file object to write to :type file: file object :param filename: a filename string to write to :type filename: :class:`basestring` .. versionadded:: 0.1.5 The ``file`` parameter. .. versionadded:: 0.1.1 """ if file is None and filename is None: raise TypeError('expected an argument') elif file is not None and filename is not None: raise TypeError('expected only one argument; but two passed') elif file is not None: if isinstance(file, string_type): raise TypeError('file must be a writable file object, ' 'but {0!r} is a string; did you want ' '.save(filename={0!r})?'.format(file)) elif isinstance(file, file_types) and hasattr(libc, 'fdopen'): fd = libc.fdopen(file.fileno(), file.mode) if len(self.sequence) > 1: r = library.MagickWriteImagesFile(self.wand, fd) else: r = library.MagickWriteImageFile(self.wand, fd) libc.fflush(fd) if not r: self.raise_exception() else: if not callable(getattr(file, 'write', None)): raise TypeError('file must be a writable file object, ' 'but it does not have write() method: ' + repr(file)) file.write(self.make_blob()) else: if not isinstance(filename, string_type): raise TypeError('filename must be a string, not ' + repr(filename)) filename = encode_filename(filename) if len(self.sequence) > 1: r = library.MagickWriteImages(self.wand, filename, True) else: r = library.MagickWriteImage(self.wand, filename) if not r: self.raise_exception() def make_blob(self, format=None): """Makes the binary string of the image. :param format: the image format to write e.g. ``'png'``, ``'jpeg'``. it is omittable :type format: :class:`basestring` :returns: a blob (bytes) string :rtype: :class:`bytes` :raises ValueError: when ``format`` is invalid .. versionchanged:: 0.1.6 Removed a side effect that changes the image :attr:`format` silently. .. versionadded:: 0.1.5 The ``format`` parameter became optional. .. versionadded:: 0.1.1 """ if format is not None: with self.convert(format) as converted: return converted.make_blob() library.MagickResetIterator(self.wand) length = ctypes.c_size_t() blob_p = None if len(self.sequence) > 1: blob_p = library.MagickGetImagesBlob(self.wand, ctypes.byref(length)) else: blob_p = library.MagickGetImageBlob(self.wand, ctypes.byref(length)) if blob_p and length.value: blob = ctypes.string_at(blob_p, length.value) library.MagickRelinquishMemory(blob_p) return blob self.raise_exception() def strip(self): """Strips an image of all profiles and comments. .. versionadded:: 0.2.0 """ result = library.MagickStripImage(self.wand) if not result: self.raise_exception() def trim(self, color=None, fuzz=0): """Remove solid border from image. Uses top left pixel as a guide by default, or you can also specify the ``color`` to remove. :param color: the border color to remove. if it's omitted top left pixel is used by default :type color: :class:`~wand.color.Color` :param fuzz: Defines how much tolerance is acceptable to consider two colors as the same. :type fuzz: :class:`numbers.Integral` .. versionadded:: 0.3.0 Optional ``color`` and ``fuzz`` parameters. .. versionadded:: 0.2.1 """ with color or self[0, 0] as color: self.border(color, 1, 1) result = library.MagickTrimImage(self.wand, fuzz) if not result: self.raise_exception() @manipulative def transpose(self): """Creates a vertical mirror image by reflecting the pixels around the central x-axis while rotating them 90-degrees. .. versionadded:: 0.4.1 """ result = library.MagickTransposeImage(self.wand) if not result: self.raise_exception() @manipulative def transverse(self): """Creates a horizontal mirror image by reflecting the pixels around the central y-axis while rotating them 270-degrees. .. versionadded:: 0.4.1 """ result = library.MagickTransverseImage(self.wand) if not result: self.raise_exception() @manipulative def _auto_orient(self): """Fallback for :attr:`auto_orient()` method (which wraps :c:func:`MagickAutoOrientImage`), fixes orientation by checking EXIF data. .. versionadded:: 0.4.1 """ exif_orientation = self.metadata.get('exif:orientation') if not exif_orientation: return orientation_type = ORIENTATION_TYPES[int(exif_orientation)] fn_lookup = { 'undefined': None, 'top_left': None, 'top_right': self.flop, 'bottom_right': functools.partial(self.rotate, degree=180.0), 'bottom_left': self.flip, 'left_top': self.transpose, 'right_top': functools.partial(self.rotate, degree=90.0), 'right_bottom': self.transverse, 'left_bottom': functools.partial(self.rotate, degree=270.0) } fn = fn_lookup.get(orientation_type) if not fn: return fn() self.orientation = 'top_left' @manipulative def auto_orient(self): """Adjusts an image so that its orientation is suitable for viewing (i.e. top-left orientation). If available it uses :c:func:`MagickAutoOrientImage` (was added in ImageMagick 6.8.9+) if you have an older magick library, it will use :attr:`_auto_orient()` method for fallback. .. versionadded:: 0.4.1 """ try: result = library.MagickAutoOrientImage(self.wand) if not result: self.raise_exception() except AttributeError: self._auto_orient() def border(self, color, width, height): """Surrounds the image with a border. :param bordercolor: the border color pixel wand :type image: :class:`~wand.color.Color` :param width: the border width :type width: :class:`numbers.Integral` :param height: the border height :type height: :class:`numbers.Integral` .. versionadded:: 0.3.0 """ if not isinstance(color, Color): raise TypeError('color must be a wand.color.Color object, not ' + repr(color)) with color: result = library.MagickBorderImage(self.wand, color.resource, width, height) if not result: self.raise_exception() @manipulative def contrast_stretch(self, black_point=0.0, white_point=None, channel=None): """Enhance contrast of image by adjusting the span of the available colors. If only ``black_point`` is given, match the CLI behavior by assuming the ``white_point`` has the same delta percentage off the top e.g. contrast stretch of 15% is calculated as ``black_point`` = 0.15 and ``white_point`` = 0.85. :param black_point: black point between 0.0 and 1.0. default is 0.0 :type black_point: :class:`numbers.Real` :param white_point: white point between 0.0 and 1.0. default value of 1.0 minus ``black_point`` :type white_point: :class:`numbers.Real` :param channel: optional color channel to apply contrast stretch :type channel: :const:`CHANNELS` :raises ValueError: if ``channel`` is not in :const:`CHANNELS` .. versionadded:: 0.4.1 """ if not isinstance(black_point, numbers.Real): raise TypeError('expecting float, not ' + repr(black_point)) if not (white_point is None or isinstance(white_point, numbers.Real)): raise TypeError('expecting float, not ' + repr(white_point)) # If only black-point is given, match CLI behavior by # calculating white point if white_point is None: white_point = 1.0 - black_point contrast_range = float(self.width * self.height) black_point *= contrast_range white_point *= contrast_range if channel in CHANNELS: library.MagickContrastStretchImageChannel(self.wand, CHANNELS[channel], black_point, white_point) elif channel is None: library.MagickContrastStretchImage(self.wand, black_point, white_point) else: raise ValueError(repr(channel) + ' is an invalid channel type' '; see wand.image.CHANNELS dictionary') self.raise_exception() @manipulative def gamma(self, adjustment_value, channel=None): """Gamma correct image. Specific color channels can be correct individual. Typical values range between 0.8 and 2.3. :param adjustment_value: value to adjust gamma level :type adjustment_value: :class:`numbers.Real` :param channel: optional channel to apply gamma correction :type channel: :class:`basestring` :raises TypeError: if ``gamma_point`` is not a :class:`numbers.Real` :raises ValueError: if ``channel`` is not in :const:`CHANNELS` .. versionadded:: 0.4.1 """ if not isinstance(adjustment_value, numbers.Real): raise TypeError('expecting float, not ' + repr(adjustment_value)) if channel in CHANNELS: library.MagickGammaImageChannel(self.wand, CHANNELS[channel], adjustment_value) elif channel is None: library.MagickGammaImage(self.wand, adjustment_value) else: raise ValueError(repr(channel) + ' is an invalid channel type' '; see wand.image.CHANNELS dictionary') self.raise_exception() @manipulative def linear_stretch(self, black_point=0.0, white_point=1.0): """Enhance saturation intensity of an image. :param black_point: Black point between 0.0 and 1.0. Default 0.0 :type black_point: :class:`numbers.Real` :param white_point: White point between 0.0 and 1.0. Default 1.0 :type white_point: :class:`numbers.Real` .. versionadded:: 0.4.1 """ if not isinstance(black_point, numbers.Real): raise TypeError('expecting float, not ' + repr(black_point)) if not isinstance(white_point, numbers.Real): raise TypeError('expecting float, not ' + repr(white_point)) linear_range = float(self.width * self.height) library.MagickLinearStretchImage(self.wand, linear_range * black_point, linear_range * white_point) def normalize(self, channel=None): """Normalize color channels. :param channel: the channel type. available values can be found in the :const:`CHANNELS` mapping. If ``None``, normalize all channels. :type channel: :class:`basestring` """ if channel: try: ch_const = CHANNELS[channel] except KeyError: raise ValueError(repr(channel) + ' is an invalid channel type' '; see wand.image.CHANNELS dictionary') r = library.MagickNormalizeImageChannel(self.wand, ch_const) else: r = library.MagickNormalizeImage(self.wand) if not r: self.raise_exception() def _repr_png_(self): with self.convert('png') as cloned: return cloned.make_blob() def __repr__(self): return super(Image, self).__repr__( extra_format=' {self.format!r} ({self.width}x{self.height})' ) class Iterator(Resource, collections.Iterator): """Row iterator for :class:`Image`. It shouldn't be instantiated directly; instead, it can be acquired through :class:`Image` instance:: assert isinstance(image, wand.image.Image) iterator = iter(image) It doesn't iterate every pixel, but rows. For example:: for row in image: for col in row: assert isinstance(col, wand.color.Color) print(col) Every row is a :class:`collections.Sequence` which consists of one or more :class:`wand.color.Color` values. :param image: the image to get an iterator :type image: :class:`Image` .. versionadded:: 0.1.3 """ c_is_resource = library.IsPixelIterator c_destroy_resource = library.DestroyPixelIterator c_get_exception = library.PixelGetIteratorException c_clear_exception = library.PixelClearIteratorException def __init__(self, image=None, iterator=None): if image is not None and iterator is not None: raise TypeError('it takes only one argument at a time') with self.allocate(): if image is not None: if not isinstance(image, Image): raise TypeError('expected a wand.image.Image instance, ' 'not ' + repr(image)) self.resource = library.NewPixelIterator(image.wand) self.height = image.height else: if not isinstance(iterator, Iterator): raise TypeError('expected a wand.image.Iterator instance, ' 'not ' + repr(iterator)) self.resource = library.ClonePixelIterator(iterator.resource) self.height = iterator.height self.raise_exception() self.cursor = 0 def __iter__(self): return self def seek(self, y): if not isinstance(y, numbers.Integral): raise TypeError('expected an integer, but got ' + repr(y)) elif y < 0: raise ValueError('cannot be less than 0, but got ' + repr(y)) elif y > self.height: raise ValueError('canot be greater than height') self.cursor = y if y == 0: library.PixelSetFirstIteratorRow(self.resource) else: if not library.PixelSetIteratorRow(self.resource, y - 1): self.raise_exception() def __next__(self, x=None): if self.cursor >= self.height: self.destroy() raise StopIteration() self.cursor += 1 width = ctypes.c_size_t() pixels = library.PixelGetNextIteratorRow(self.resource, ctypes.byref(width)) get_color = library.PixelGetMagickColor struct_size = ctypes.sizeof(MagickPixelPacket) if x is None: r_pixels = [None] * width.value for x in xrange(width.value): pc = pixels[x] packet_buffer = ctypes.create_string_buffer(struct_size) get_color(pc, packet_buffer) r_pixels[x] = Color(raw=packet_buffer) return r_pixels packet_buffer = ctypes.create_string_buffer(struct_size) get_color(pixels[x], packet_buffer) return Color(raw=packet_buffer) next = __next__ # Python 2 compatibility def clone(self): """Clones the same iterator. """ return type(self)(iterator=self) class ImageProperty(object): """The mixin class to maintain a weak reference to the parent :class:`Image` object. .. versionadded:: 0.3.0 """ def __init__(self, image): if not isinstance(image, BaseImage): raise TypeError('expected a wand.image.BaseImage instance, ' 'not ' + repr(image)) self._image = weakref.ref(image) @property def image(self): """(:class:`Image`) The parent image. It ensures that the parent :class:`Image`, which is held in a weak reference, still exists. Returns the dereferenced :class:`Image` if it does exist, or raises a :exc:`ClosedImageError` otherwise. :exc: `ClosedImageError` when the parent Image has been destroyed """ # Dereference our weakref and check that the parent Image stil exists image = self._image() if image is not None: return image raise ClosedImageError( 'parent Image of {0!r} has been destroyed'.format(self) ) class OptionDict(ImageProperty, collections.MutableMapping): """Mutable mapping of the image internal options. See available options in :const:`OPTIONS` constant. .. versionadded:: 0.3.0 """ def __iter__(self): return iter(OPTIONS) def __len__(self): return len(OPTIONS) def __getitem__(self, key): if not isinstance(key, string_type): raise TypeError('option name must be a string, not ' + repr(key)) if key not in OPTIONS: raise ValueError('invalid option: ' + repr(key)) image = self.image return text(library.MagickGetOption(image.wand, binary(key))) def __setitem__(self, key, value): if not isinstance(key, string_type): raise TypeError('option name must be a string, not ' + repr(key)) if not isinstance(value, string_type): raise TypeError('option value must be a string, not ' + repr(value)) if key not in OPTIONS: raise ValueError('invalid option: ' + repr(key)) image = self.image library.MagickSetOption(image.wand, binary(key), binary(value)) def __delitem__(self, key): self[key] = '' class Metadata(ImageProperty, collections.Mapping): """Class that implements dict-like read-only access to image metadata like EXIF or IPTC headers. :param image: an image instance :type image: :class:`Image` .. note:: You don't have to use this by yourself. Use :attr:`Image.metadata` property instead. .. versionadded:: 0.3.0 """ def __init__(self, image): if not isinstance(image, Image): raise TypeError('expected a wand.image.Image instance, ' 'not ' + repr(image)) super(Metadata, self).__init__(image) def __getitem__(self, k): """ :param k: Metadata header name string. :type k: :class:`basestring` :returns: a header value string :rtype: :class:`str` """ image = self.image if not isinstance(k, string_type): raise TypeError('k must be a string, not ' + repr(k)) v = library.MagickGetImageProperty(image.wand, binary(k)) if bool(v) is False: raise KeyError(k) value = v.value return text(value) def __iter__(self): image = self.image num = ctypes.c_size_t() props_p = library.MagickGetImageProperties(image.wand, b'', num) props = [text(props_p[i]) for i in xrange(num.value)] library.MagickRelinquishMemory(props_p) return iter(props) def __len__(self): image = self.image num = ctypes.c_size_t() props_p = library.MagickGetImageProperties(image.wand, b'', num) library.MagickRelinquishMemory(props_p) return num.value class ChannelImageDict(ImageProperty, collections.Mapping): """The mapping table of separated images of the particular channel from the image. :param image: an image instance :type image: :class:`Image` .. note:: You don't have to use this by yourself. Use :attr:`Image.channel_images` property instead. .. versionadded:: 0.3.0 """ def __iter__(self): return iter(CHANNELS) def __len__(self): return len(CHANNELS) def __getitem__(self, channel): c = CHANNELS[channel] img = self.image.clone() succeeded = library.MagickSeparateImageChannel(img.wand, c) if not succeeded: try: img.raise_exception() except WandException: img.close() raise return img class ChannelDepthDict(ImageProperty, collections.Mapping): """The mapping table of channels to their depth. :param image: an image instance :type image: :class:`Image` .. note:: You don't have to use this by yourself. Use :attr:`Image.channel_depths` property instead. .. versionadded:: 0.3.0 """ def __iter__(self): return iter(CHANNELS) def __len__(self): return len(CHANNELS) def __getitem__(self, channel): c = CHANNELS[channel] depth = library.MagickGetImageChannelDepth(self.image.wand, c) return int(depth) class HistogramDict(collections.Mapping): """Specialized mapping object to represent color histogram. Keys are colors, and values are the number of pixels. :param image: the image to get its histogram :type image: :class:`BaseImage` .. versionadded:: 0.3.0 """ def __init__(self, image): self.size = ctypes.c_size_t() self.pixels = library.MagickGetImageHistogram( image.wand, ctypes.byref(self.size) ) self.counts = None def __len__(self): if self.counts is None: return self.size.value return len(self.counts) def __iter__(self): if self.counts is None: pixels = self.pixels string = library.PixelGetColorAsString return (Color(string(pixels[i]).value) for i in xrange(self.size.value)) return iter(Color(string=c) for c in self.counts) def __getitem__(self, color): if self.counts is None: string = library.PixelGetColorAsNormalizedString pixels = self.pixels count = library.PixelGetColorCount self.counts = dict( (text(string(pixels[i]).value), count(pixels[i])) for i in xrange(self.size.value) ) del self.size, self.pixels return self.counts[color.normalized_string] class ClosedImageError(DestroyedResourceError): """An error that rises when some code tries access to an already closed image. """
reflective_key.ts
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {stringify} from '../util/stringify'; import {resolveForwardRef} from './forward_ref'; /** * A unique object used for retrieving items from the {@link ReflectiveInjector}. * * Keys have: * - a system-wide unique `id`. * - a `token`. * * `Key` is used internally by {@link ReflectiveInjector} because its system-wide unique `id` allows * the * injector to store created objects in a more efficient way. * * `Key` should not be created directly. {@link ReflectiveInjector} creates keys automatically when * resolving * providers. * * @deprecated No replacement * @publicApi */ export class ReflectiveKey { public readonly displayName: string; /** * Private */ constructor(public token: Object, public id: number) { if (!token) { throw new Error('Token must be defined!'); } this.displayName = stringify(this.token); } /** * Retrieves a `Key` for a token. */ static get(token: Object): ReflectiveKey { return _globalKeyRegistry.get(resolveForwardRef(token)); } /** * @returns the number of keys registered in the system. */ static get numberOfKeys(): number { return _globalKeyRegistry.numberOfKeys; } } export class
{ private _allKeys = new Map<Object, ReflectiveKey>(); get(token: Object): ReflectiveKey { if (token instanceof ReflectiveKey) return token; if (this._allKeys.has(token)) { return this._allKeys.get(token) !; } const newKey = new ReflectiveKey(token, ReflectiveKey.numberOfKeys); this._allKeys.set(token, newKey); return newKey; } get numberOfKeys(): number { return this._allKeys.size; } } const _globalKeyRegistry = new KeyRegistry();
KeyRegistry
page.go
package metascraper import ( "bytes" "golang.org/x/net/html" "regexp" "strings" ) var lineFeedReplacer = regexp.MustCompile(`[\n\r]+`) var whitespaceReplacer = regexp.MustCompile(`\s+`) // Page represents an HTML document with metadata. type Page struct { URL string // The web page's URL. HTML string // The web page's raw HTML. Text string // The text content in the body of the web page, sans markup. // Series of more than one line feed are replaced by a single newline. // Series of more than one space are replaced by a single space. Title string // The title of the web page, as given in the head's title element. MetaReader *MetaReader // A TokenReader for extracting metadata from the document's head. SchemaReader *SchemaReader // A TokenReader for extracting schema.org metadata from the document's body. PageReader *PageReader // A TokenReader for extracting the page's title and text content. } // Readers gets a ReaderList aggregating all the TokenReaders associated with // this page. Client code could add or remove token readers while reusing the // Read method by embedding the Page struct and overriding this method with one // that populates the ReaderList differently. // TODO: Write an example that shows how to extend the Page struct with additional // token readers. func (p *Page) Readers() ReaderList { return ReaderList{ Readers: []TokenReader{ p.PageReader, p.MetaReader, p.SchemaReader, }, } } // MetaData gets the metadata found in this page's head. func (p *Page) MetaData() []*Meta { return p.MetaReader.items } // SchemaData gets the schema.org metadata found in this page's body. func (p *Page) SchemaData() []*ItemScope { return p.SchemaReader.items } // Read populates the Page struct with content and metadata from the given // byte array, which the caller is responsible for assuring is well-formed HTML. func (p *Page) Read(text []byte) error { data := bytes.NewReader(text) z := html.NewTokenizer(data) readers := p.Readers() for { tt := z.Next() text := z.Text() switch tt { case html.ErrorToken: readers.Done() // Returning io.EOF indicates success. return z.Err() case html.TextToken: readers.HandleText(text) case html.StartTagToken: tagName, hasAttr := z.TagName() tn := string(tagName) attrs := AttrMap(hasAttr, z) readers.HandleStart(tn, attrs, z) case html.EndTagToken: tagName, _ := z.TagName() tn := string(tagName) readers.HandleEnd(tn, z) case html.SelfClosingTagToken: tagName, hasAttr := z.TagName() tn := string(tagName) attrs := AttrMap(hasAttr, z) readers.HandleStart(tn, attrs, z) readers.HandleEnd(tn, z) } } } // AttrMap parses the attributes of the current element into a friendly map. // It only makes sense to call this while processing a start or self closing tag token. func AttrMap(hasAttr bool, z *html.Tokenizer) map[string]string { attrs := make(map[string]string) if !hasAttr { return attrs } for { k, v, more := z.TagAttr() attrs[string(k)] = string(v) if !more { break } } return attrs } // PageReader implements the TokenReader interface; it maintains the necessary // state for extracting the body text and page title from a token stream. type PageReader struct { page *Page inTitle bool inBody bool inScript bool text []byte } func (r *PageReader) HandleStart(tn string, attrs map[string]string, z *html.Tokenizer) { switch tn { case "title": r.inTitle = true case "body": r.inBody = true case "script":
} } func (r *PageReader) HandleEnd(tn string, z *html.Tokenizer) { switch tn { case "title": r.inTitle = false case "body": r.inBody = false case "script": r.inScript = false } } func (r *PageReader) HandleText(text []byte) { if r.inTitle { r.page.Title = string(text) } else if r.inBody && !r.inScript { r.text = append(r.text, text...) } } func (r *PageReader) Done() { r.text = lineFeedReplacer.ReplaceAll(r.text, []byte("\n")) r.text = whitespaceReplacer.ReplaceAll(r.text, []byte{' '}) r.page.Text = strings.TrimSpace(string(r.text)) }
r.inScript = true
threshold_par.py
import numpy as np import cv2 import matplotlib.pyplot as plt import matplotlib.image as mpimg import glob def abs_sobel_threshold(img, orientation='x', kernel_size=3, threshold=(0, 255)): """ `orientation` Input for setting the sobel operator gradient orientation (x, y) `kernel_size` Input for kernel size of sobel operator `threshold` Input tuple for lower and upper threshold This function calculates a binary image mask according to the absolute sobel operation on a given gradient, based on a lower and upper threshold. returns a binary image """ gray = cv2.GaussianBlur(img, (5, 5), 0) # calculate the sobel depending on the orientation if orientation == 'x': abs_sobel = np.absolute(cv2.Sobel(gray, cv2.CV_64F, 1, 0, \ ksize=kernel_size)) elif orientation == 'y':
else: abs_sobel = np.zeros_like(gray) print("None") # rescale the sobel to uint8 type scaled_sobel = np.uint8(255 * abs_sobel / np.max(abs_sobel)) # calculate the binary output with respect to thresholds binary_output = np.zeros_like(scaled_sobel) binary_output[(scaled_sobel >= threshold[0]) & (scaled_sobel <= threshold[1])] = 1 return binary_output def direction_sobel_threshold(img, kernel_size=3, threshold=(0, np.pi / 2)): """ `kernel_size` Input for kernel size of sobel operator `threshold` Input tuple for lower and upper threshold in rad This function calculates the gradients and thresholds the direction based on given angles returns a binary image based on the given thresholds """ gray = cv2.GaussianBlur(img, (5, 5), 0) # calculate the sobel sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=kernel_size) sobely = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=kernel_size) # calculate the gradient direction absgraddir = np.arctan2(np.absolute(sobely), np.absolute(sobelx)) # calculate the binary output with respect to thresholds binary_output = np.zeros_like(absgraddir) binary_output[(absgraddir >= threshold[0]) & (absgraddir <= threshold[1])] = 1 # Return the binary image return binary_output def mag_sobel_threshold(img, kernel_size=3, threshold=(0, 255)): """ `kernel_size` Input for kernel size of sobel operator `threshold` Input tuple for lower and upper threshold This function calculates the magnitude of the gradients detected by the sobel operator in X and Y direction. returns a binary image based on the given thresholds """ gray = cv2.GaussianBlur(img, (5, 5), 0) # calculate the sobel sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=kernel_size) sobely = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=kernel_size) # calculate the gradient magnitude magnitude = np.sqrt(sobelx**2 + sobely**2) # rescale to 8 bit scale = np.max(magnitude)/255 magnitude = (magnitude / scale).astype(np.uint8) # calculate the binary output with respect to thresholds binary_output = np.zeros_like(magnitude) binary_output[(magnitude >= threshold[0]) & (magnitude <= threshold[1])] = 1 return binary_output def nothing(x): pass cv2.namedWindow('image') """ cv2.createTrackbar('Low', 'image', 0, 255, nothing) cv2.createTrackbar('High', 'image', 0, 255, nothing) """ cv2.createTrackbar('Low', 'image', 0, 255, nothing) cv2.createTrackbar('High', 'image', 0, 255, nothing) #testimages = glob.glob('test_images/*.jpg') testimages = glob.glob('output_images/debug/*.png') for curImage in testimages: print(curImage) img = cv2.imread(curImage) img = img[:,:,:3] img = cv2.pyrDown(img) gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) hls = cv2.cvtColor(img, cv2.COLOR_BGR2HLS)[:,:,1] lab = cv2.cvtColor(img, cv2.COLOR_BGR2LAB)[:,:,2] """ f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, figsize=(24, 9)) ax1.imshow(img) ax1.set_title("RGB") ax2.imshow(lab[:,:,0], cmap='gray') ax2.set_title("L") ax3.imshow(lab[:,:,1], cmap='gray') ax3.set_title("A") ax4.imshow(lab[:,:,2], cmap='gray') ax4.set_title("B") plt.show() """ debug_image = np.zeros((360, 640 * 2, 3), dtype=np.uint8) debug_image[0:img.shape[0], 0:img.shape[1]] = img gray = cv2.equalizeHist(gray) while(1): k = cv2.waitKey(1) & 0xFF if k == 27: break low = cv2.getTrackbarPos('Low', 'image') high = cv2.getTrackbarPos('High', 'image') #binary = abs_sobel_threshold(gray, 'y', kernel_size=3, threshold=(low, high)) #binary = mag_sobel_threshold(gray, kernel_size=3, threshold=(low, high)) """ binary = np.zeros_like(hls) binary[(lab > low) & (lab < high)] = 1 """ ret, binary = cv2.threshold(gray, thresh=low, maxval=high, type=cv2.THRESH_BINARY) bin = np.dstack((binary, binary, binary)) debug_image[0:bin.shape[0], img.shape[1]:] = bin cv2.imshow('window', debug_image) cv2.destroyAllWindows()
abs_sobel = np.absolute(cv2.Sobel(gray, cv2.CV_64F, 0, 1, \ ksize=kernel_size))
icons.js
/*eslint-env browser */ /*eslint-disable no-console */ /*jslint devel: true*/ /*global chrome, Utils */ (function () { 'use strict'; function
() { Utils.JSON.parseFromURL(chrome.extension.getURL("/icons.json"), function (icons) { var container = document.getElementById("icons"), imgTag; icons.map(function (icon) { imgTag = document.createElement("img"); imgTag.src = icon.big; imgTag.setAttribute("id", icon.id); imgTag.addEventListener("click", function () { chrome.tabs.query({ active: true, currentWindow: true }, function (tabs) { localStorage.setItem(tabs[0].url, icon.id); chrome.pageAction.setIcon({ path: icon.small, tabId: tabs[0].id }, function () { window.close(); }); }); }); container.appendChild(imgTag); }); }); } document.addEventListener('DOMContentLoaded', function () { loadIcons(); }); }());
loadIcons
test_valve.py
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ # noqa from django.test import TestCase from django_signal_valve import valve from django_signal_valve.tests import mock_signal from django_signal_valve.models import Signal class TestValve(TestCase): def setUp(self): valve.unload_valve_function() def test_set_valve_function(self): self.assertRaises(Exception, valve.set_valve_function, args=[1]) def func(): return True valve.unload_valve_function() valve.set_valve_function(func) self.assertEqual(valve.valve_function(), func) self.assertRaises(Exception, valve.set_valve_function, args=[func]) valve.__valve_function = None def test_send_on_valve
args_1 = {'1': 1} kwargs_2 = {'2': 2} valve.unload_valve_function() valve.send(mock_signal, 'signal_1', **kwargs_1) valve.send(mock_signal, 'signal_1', **kwargs_2) self.assertEqual(mock_signal.signal_1.history[0], kwargs_1) self.assertEqual(mock_signal.signal_1.history[1], kwargs_2) mock_signal.clear() def test_send_on_valve_opened(self): kwargs_1 = {'1': 1} kwargs_2 = {'2': 2} def is_valve_closed(): return False valve.unload_valve_function() valve.set_valve_function(is_valve_closed) valve.send(mock_signal, 'signal_1', **kwargs_1) valve.send(mock_signal, 'signal_1', **kwargs_2) self.assertEqual(mock_signal.signal_1.history[0], kwargs_1) self.assertEqual(mock_signal.signal_1.history[1], kwargs_2) mock_signal.clear() def test_send_on_closed(self): kwargs_1 = {'1': 1} kwargs_2 = {'2': 2} def is_valve_closed(): return True valve.unload_valve_function() valve.set_valve_function(is_valve_closed) valve.send(mock_signal, 'signal_1', **kwargs_1) valve.send(mock_signal, 'signal_1', **kwargs_2) self.assertEqual(len(mock_signal.signal_1.history), 0) mock_signal.clear() Signal.objects.all().delete() def test_open_valve(self): kwargs_1 = {'1': 1} kwargs_2 = {'2': 2} def valve_closed(): return True valve.unload_valve_function() valve.set_valve_function(valve_closed) valve.send(mock_signal, 'signal_1', **kwargs_1) valve.send(mock_signal, 'signal_1', **kwargs_2) self.assertEqual(len(mock_signal.signal_1.history), 0) valve.open_valve(mock_signal) self.assertEqual(mock_signal.signal_1.history[0], kwargs_1) self.assertEqual(mock_signal.signal_1.history[1], kwargs_2) mock_signal.clear()
_is_none(self): kw
scene_loader.rs
use crate::gl; use wavefront_obj::obj; pub struct Frame { pub vertices: Vec<[f32; 7]>, } // (x_min, x_max, y_min, y_max, z_min, z_max) fn aabb_overlap( a: (f32, f32, f32, f32, f32, f32), b: (f32, f32, f32, f32, f32, f32), ) -> bool { let eps = 1e-6; a.0.max(b.0) < a.1.min(b.1) + eps && a.2.max(b.2) < a.3.min(b.3) + eps && a.4.max(b.4) < a.5.min(b.5) + eps } pub fn
<P: AsRef<std::path::Path>>(p: P) -> Result<Frame, Box<dyn std::error::Error>> { let contents = std::fs::read_to_string(p)?; let objects = obj::parse(contents)?.objects; let mut vertices = vec![]; let mut aabb: Vec<((f32, f32, f32, f32, f32, f32), (usize, usize))> = vec![]; for object in &objects { let vertices_start = vertices.len(); let (mut x_min, mut x_max) = (f32::INFINITY, -f32::INFINITY); let (mut y_min, mut y_max) = (f32::INFINITY, -f32::INFINITY); let (mut z_min, mut z_max) = (f32::INFINITY, -f32::INFINITY); for geom in &object.geometry { for shape in &geom.shapes { if let obj::Primitive::Triangle( (vi0, _, Some(ni0)), (vi1, _, Some(ni1)), (vi2, _, Some(ni2)), ) = shape.primitive { for (vi, ni) in [ (vi0, ni0), (vi1, ni1), (vi2, ni2), ] { let v = object.vertices[vi]; let n = object.normals[ni]; vertices.push([ v.x as f32, v.y as f32, v.z as f32, n.x as f32, n.y as f32, n.z as f32, 0.0, ]); x_min = x_min.min(v.x as f32); x_max = x_max.max(v.x as f32); y_min = y_min.min(v.y as f32); y_max = y_max.max(v.y as f32); z_min = z_min.min(v.z as f32); z_max = z_max.max(v.z as f32); } } } } let bb = (x_min, x_max, y_min, y_max, z_min, z_max); for &(bb_other, (start, end)) in &aabb { if aabb_overlap(bb, bb_other) { // Mark vertices in the other object as overlapping for i in start..end { vertices[i][6] = 1.0; } for i in vertices_start..vertices.len() { vertices[i][6] = 1.0; } } } aabb.push((bb, (vertices_start, vertices.len()))); } Ok(Frame { vertices, }) }
load
chained_bft_smr_test.rs
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{ chained_bft::{ block_storage::BlockReader, chained_bft_consensus_provider::InitialSetup, chained_bft_smr::{ChainedBftSMR, ChainedBftSMRConfig}, network_tests::NetworkPlayground, persistent_storage::RecoveryData, test_utils::{ consensus_runtime, with_smr_id, MockStateComputer, MockStorage, MockTransactionManager, TestPayload, }, }, state_replication::StateMachineReplication, }; use channel; use consensus_types::{ proposal_msg::{ProposalMsg, ProposalUncheckedSignatures}, vote_msg::VoteMsg, }; use futures::{channel::mpsc, executor::block_on, prelude::*}; use libra_config::config::{ ConsensusProposerType::{self, FixedProposer, MultipleOrderedProposers, RotatingProposer}, {BaseConfig, OnDiskStorageConfig, SafetyRulesBackend, SafetyRulesConfig}, }; use libra_crypto::hash::CryptoHash; use libra_types::{ crypto_proxies::ValidatorSet, crypto_proxies::{ random_validator_verifier, LedgerInfoWithSignatures, ValidatorChangeEventWithProof, ValidatorSigner, ValidatorVerifier, }, }; use network::{ proto::ConsensusMsg_oneof, validator_network::{ConsensusNetworkEvents, ConsensusNetworkSender}, }; use safety_rules::OnDiskStorage; use std::{convert::TryFrom, path::PathBuf, sync::Arc, time::Duration}; use tempfile::NamedTempFile; use tokio::runtime; /// Auxiliary struct that is preparing SMR for the test struct SMRNode { signer: ValidatorSigner, validators: Arc<ValidatorVerifier>, proposer_type: ConsensusProposerType, smr_id: usize, smr: ChainedBftSMR<TestPayload>, commit_cb_receiver: mpsc::UnboundedReceiver<LedgerInfoWithSignatures>, mempool: Arc<MockTransactionManager>, mempool_notif_receiver: mpsc::Receiver<usize>, storage: Arc<MockStorage<TestPayload>>, safety_rules_path: PathBuf, } impl SMRNode { fn start( playground: &mut NetworkPlayground, signer: ValidatorSigner, smr_id: usize, storage: Arc<MockStorage<TestPayload>>, initial_data: RecoveryData<TestPayload>, proposer_type: ConsensusProposerType, executor_with_reconfig: Option<ValidatorSet>, safety_rules_path: PathBuf, ) -> Self { let validators = initial_data.validators(); let author = signer.author(); let (network_reqs_tx, network_reqs_rx) = channel::new_test(8); let (consensus_tx, consensus_rx) = channel::new_test(8); let network_sender = ConsensusNetworkSender::new(network_reqs_tx); let network_events = ConsensusNetworkEvents::new(consensus_rx); playground.add_node(author, consensus_tx, network_reqs_rx); let runtime = runtime::Builder::new() .threaded_scheduler() .enable_all() .on_thread_start(with_smr_id(signer.author().short_str())) .build() .expect("Failed to create Tokio runtime!"); let mut safety_rules_config = SafetyRulesConfig::default(); safety_rules_config.backend = SafetyRulesBackend::OnDiskStorage(OnDiskStorageConfig { default: false, path: safety_rules_path.clone(), base: Arc::new(BaseConfig::default()), }); let config = ChainedBftSMRConfig { max_pruned_blocks_in_mem: 10000, pacemaker_initial_timeout: Duration::from_secs(3), proposer_type, contiguous_rounds: 2, max_block_size: 50, safety_rules: safety_rules_config, }; let initial_setup = InitialSetup { author, signer: signer.clone(), network_sender, network_events, }; let mut smr = ChainedBftSMR::new( initial_setup, runtime, config, storage.clone(), initial_data, ); let (commit_cb_sender, commit_cb_receiver) = mpsc::unbounded::<LedgerInfoWithSignatures>(); let mut mp = MockTransactionManager::new(); let commit_receiver = mp.take_commit_receiver(); let mempool = Arc::new(mp); smr.start( mempool.clone(), Arc::new(MockStateComputer::new( commit_cb_sender.clone(), Arc::clone(&storage), executor_with_reconfig, )), ) .expect("Failed to start SMR!"); Self { signer, validators, proposer_type, smr_id, smr, commit_cb_receiver, mempool, mempool_notif_receiver: commit_receiver, storage, safety_rules_path, } } fn restart(mut self, playground: &mut NetworkPlayground) -> Self { self.smr.stop(); let recover_data = self .storage .try_start() .unwrap_or_else(|e| panic!("fail to restart due to: {}", e)); Self::start( playground, self.signer, self.smr_id + 10, self.storage, recover_data, self.proposer_type, None, self.safety_rules_path, ) } fn start_num_nodes( num_nodes: usize, playground: &mut NetworkPlayground, proposer_type: ConsensusProposerType, executor_with_reconfig: bool, ) -> Vec<Self> { let (mut signers, validators) = random_validator_verifier(num_nodes, None, true); let validator_set: ValidatorSet = (&validators).into(); let executor_validator_set = if executor_with_reconfig { Some(validator_set.clone()) } else { None }; let mut nodes = vec![]; for smr_id in 0..num_nodes { let (initial_data, storage) = MockStorage::start_for_testing(validator_set.clone()); let safety_rules_path = NamedTempFile::new().unwrap().into_temp_path().to_path_buf(); OnDiskStorage::default_storage(safety_rules_path.clone()) .expect("Unable to allocate SafetyRules storage"); nodes.push(Self::start( playground, signers.remove(0), smr_id, storage, initial_data, proposer_type, executor_validator_set.clone(), safety_rules_path, )); } nodes } } fn verify_finality_proof(node: &SMRNode, ledger_info_with_sig: &LedgerInfoWithSignatures) { let ledger_info_hash = ledger_info_with_sig.ledger_info().hash(); for (author, signature) in ledger_info_with_sig.signatures() { assert_eq!( Ok(()), node.validators .verify_signature(*author, ledger_info_hash, &signature) ); } } #[test] /// Should receive a new proposal upon start fn basic_start_test() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); let nodes = SMRNode::start_num_nodes(2, &mut playground, RotatingProposer, false); let genesis = nodes[0] .smr .block_store() .expect("No valid block store!") .root(); block_on(async move { let msg = playground .wait_for_messages(1, NetworkPlayground::proposals_only) .await; let first_proposal: ProposalMsg<Vec<u64>> = ProposalUncheckedSignatures::<Vec<u64>>::try_from(msg[0].1.clone()) .unwrap() .into(); assert_eq!(first_proposal.proposal().parent_id(), genesis.id()); assert_eq!( first_proposal .proposal() .quorum_cert() .certified_block() .id(), genesis.id() ); }); } #[test] /// Upon startup, the first proposal is sent, delivered and voted by all the participants. fn start_with_proposal_test() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); let nodes = SMRNode::start_num_nodes(2, &mut playground, RotatingProposer, false); block_on(async move { let _proposals = playground .wait_for_messages(1, NetworkPlayground::proposals_only) .await; // Need to wait for 2 votes for the 2 replicas let votes: Vec<VoteMsg> = playground .wait_for_messages(2, NetworkPlayground::votes_only) .await .into_iter() .map(|(_, msg)| VoteMsg::try_from(msg).unwrap()) .collect(); let proposed_block_id = votes[0].vote().vote_data().proposed().id(); // Verify that the proposed block id is indeed present in the block store. assert!(nodes[0] .smr .block_store() .unwrap() .get_block(proposed_block_id) .is_some()); assert!(nodes[1] .smr .block_store() .unwrap() .get_block(proposed_block_id) .is_some()); }); } fn basic_full_round(num_nodes: usize, proposer_type: ConsensusProposerType) { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); let _nodes = SMRNode::start_num_nodes(num_nodes, &mut playground, proposer_type, false); // In case we're using multi-proposer, every proposal and vote is sent to two participants. let num_messages_to_send = if proposer_type == MultipleOrderedProposers { 2 * (num_nodes - 1) } else { num_nodes - 1 }; block_on(async move { let _broadcast_proposals_1 = playground .wait_for_messages(num_messages_to_send, NetworkPlayground::proposals_only) .await; let _votes_1 = playground .wait_for_messages(num_messages_to_send, NetworkPlayground::votes_only) .await; let broadcast_proposals_2 = playground .wait_for_messages(num_messages_to_send, NetworkPlayground::proposals_only) .await; let next_proposal: ProposalMsg<Vec<u64>> = ProposalUncheckedSignatures::<Vec<u64>>::try_from(broadcast_proposals_2[0].1.clone()) .unwrap() .into(); assert!(next_proposal.proposal().round() >= 2); }); } #[test] /// Upon startup, the first proposal is sent, voted by all the participants, QC is formed and /// then the next proposal is sent. fn basic_full_round_test() { basic_full_round(2, FixedProposer); } #[test] /// Basic happy path with multiple proposers fn happy_path_with_multi_proposer() { basic_full_round(2, MultipleOrderedProposers); } /// Verify the basic e2e flow: blocks are committed, txn manager is notified, block tree is /// pruned, restart the node and we can still continue. #[test] fn basic_commit_and_restart() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); let mut nodes = SMRNode::start_num_nodes(2, &mut playground, RotatingProposer, false); let mut block_ids = vec![]; block_on(async { let num_rounds = 10; for round in 0..num_rounds { let _proposals = playground .wait_for_messages(1, NetworkPlayground::exclude_timeout_msg) .await; // A proposal is carrying a QC that commits a block of round - 3. if round >= 3 { let block_id_to_commit = block_ids[round - 3]; let commit_v1 = nodes[0].commit_cb_receiver.next().await.unwrap(); let commit_v2 = nodes[1].commit_cb_receiver.next().await.unwrap(); assert_eq!( commit_v1.ledger_info().consensus_block_id(), block_id_to_commit ); verify_finality_proof(&nodes[0], &commit_v1); assert_eq!( commit_v2.ledger_info().consensus_block_id(), block_id_to_commit ); verify_finality_proof(&nodes[1], &commit_v2); } // v1 and v2 send votes let votes = playground .wait_for_messages(1, NetworkPlayground::votes_only) .await; let vote_msg = VoteMsg::try_from(votes[0].1.clone()).unwrap(); block_ids.push(vote_msg.vote().vote_data().proposed().id()); } assert!( nodes[0].smr.block_store().unwrap().root().round() >= 7, "round of node 0 is {}", nodes[0].smr.block_store().unwrap().root().round() ); assert!( nodes[1].smr.block_store().unwrap().root().round() >= 7, "round of node 1 is {}", nodes[1].smr.block_store().unwrap().root().round() ); // This message is for proposal with round 11 to delivery the QC, but not gather the QC // so after restart, proposer will propose round 11 again. playground .wait_for_messages(1, NetworkPlayground::exclude_timeout_msg) .await; }); // create a new playground to avoid polling potential vote messages in previous one. playground = NetworkPlayground::new(runtime.handle().clone()); nodes = nodes .into_iter() .map(|node| node.restart(&mut playground)) .collect(); block_on(async { let mut round = 0; while round < 10 {
// The loop is to ensure that we collect a network vote(enough for QC with 2 nodes) then // move the round forward because there's a race that node1 may or may not // reject round 11 depends on whether it voted for before restart. loop { let msg = playground .wait_for_messages(1, NetworkPlayground::exclude_timeout_msg) .await; if let Some(ConsensusMsg_oneof::VoteMsg(_)) = msg[0].1.message { round += 1; break; } } } // Because of the race, we can't assert the commit reliably, instead we assert // both nodes commit to at least round 17. // We cannot reliable wait for the event of "commit & prune": the only thing that we know is // that after receiving the vote for round 20, the root should be at least height 16. assert!( nodes[0].smr.block_store().unwrap().root().round() >= 17, "round of node 0 is {}", nodes[0].smr.block_store().unwrap().root().round() ); assert!( nodes[1].smr.block_store().unwrap().root().round() >= 17, "round of node 1 is {}", nodes[1].smr.block_store().unwrap().root().round() ); }); } #[test] fn basic_block_retrieval() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); // This test depends on the fixed proposer on nodes[0] let mut nodes = SMRNode::start_num_nodes(4, &mut playground, FixedProposer, false); block_on(async move { let mut first_proposals = vec![]; // First three proposals are delivered just to nodes[0..2]. playground.drop_message_for(&nodes[0].signer.author(), nodes[3].signer.author()); for _ in 0..2 { playground .wait_for_messages(2, NetworkPlayground::proposals_only) .await; let votes = playground .wait_for_messages(2, NetworkPlayground::votes_only) .await; let vote_msg = VoteMsg::try_from(votes[0].1.clone()).unwrap(); let proposal_id = vote_msg.vote().vote_data().proposed().id(); first_proposals.push(proposal_id); } // The next proposal is delivered to all: as a result nodes[2] should retrieve the missing // blocks from nodes[0] and vote for the 3th proposal. playground.stop_drop_message_for(&nodes[0].signer.author(), &nodes[3].signer.author()); // Drop nodes[1]'s vote to ensure nodes[3] contribute to the quorum playground.drop_message_for(&nodes[0].signer.author(), nodes[1].signer.author()); playground .wait_for_messages(2, NetworkPlayground::proposals_only) .await; playground .wait_for_messages(2, NetworkPlayground::votes_only) .await; // The first two proposals should be present at nodes[3] via block retrieval for block_id in &first_proposals { assert!(nodes[3] .smr .block_store() .unwrap() .get_block(*block_id) .is_some()); } // 4th proposal will get quorum and verify that nodes[3] commits the first proposal. playground .wait_for_messages(2, NetworkPlayground::proposals_only) .await; playground .wait_for_messages(2, NetworkPlayground::votes_only) .await; if let Some(commit_v3) = nodes[3].commit_cb_receiver.next().await { assert_eq!( commit_v3.ledger_info().consensus_block_id(), first_proposals[0], ); } }); } #[test] fn block_retrieval_with_timeout() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); let nodes = SMRNode::start_num_nodes(4, &mut playground, FixedProposer, false); block_on(async move { let mut first_proposals = vec![]; // First three proposals are delivered just to nodes[0..2]. playground.drop_message_for(&nodes[0].signer.author(), nodes[3].signer.author()); for _ in 0..2 { playground .wait_for_messages(2, NetworkPlayground::proposals_only) .await; let votes = playground .wait_for_messages(2, NetworkPlayground::votes_only) .await; let vote_msg = VoteMsg::try_from(votes[0].1.clone()).unwrap(); let proposal_id = vote_msg.vote().vote_data().proposed().id(); first_proposals.push(proposal_id); } // stop proposals from nodes[0] playground.drop_message_for(&nodes[0].signer.author(), nodes[1].signer.author()); playground.drop_message_for(&nodes[0].signer.author(), nodes[2].signer.author()); // Wait until {1, 2, 3} timeout to {0 , 1, 2, 3} excluding self messages playground .wait_for_messages(3 * 3, NetworkPlayground::timeout_votes_only) .await; // the first two proposals should be present at nodes[3] for block_id in &first_proposals { assert!(nodes[2] .smr .block_store() .unwrap() .get_block(*block_id) .is_some()); } }); } #[test] /// Verify that a node that is lagging behind can catch up by state sync some blocks /// have been pruned by the others. fn basic_state_sync() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); // This test depends on the fixed proposer on nodes[0] let mut nodes = SMRNode::start_num_nodes(4, &mut playground, FixedProposer, false); block_on(async move { let mut proposals = vec![]; // The first ten proposals are delivered just to nodes[0..2], which should commit // the first seven blocks. playground.drop_message_for(&nodes[0].signer.author(), nodes[3].signer.author()); for _ in 0..10 { playground .wait_for_messages(2, NetworkPlayground::proposals_only) .await; let votes = playground .wait_for_messages(2, NetworkPlayground::votes_only) .await; let vote_msg = VoteMsg::try_from(votes[0].1.clone()).unwrap(); let proposal_id = vote_msg.vote().vote_data().proposed().id(); proposals.push(proposal_id); } let mut node0_commits = vec![]; for i in 0..7 { node0_commits.push( nodes[0] .commit_cb_receiver .next() .await .unwrap() .ledger_info() .consensus_block_id(), ); assert_eq!(node0_commits[i], proposals[i]); } // Next proposal is delivered to all: as a result nodes[3] should be able to retrieve the // missing blocks from nodes[0] and commit the first eight proposals as well. playground.stop_drop_message_for(&nodes[0].signer.author(), &nodes[3].signer.author()); playground .wait_for_messages(3, NetworkPlayground::proposals_only) .await; let mut node3_commits = vec![]; // The only notification we will receive is for the last (8th) proposal. node3_commits.push( nodes[3] .commit_cb_receiver .next() .await .unwrap() .ledger_info() .consensus_block_id(), ); assert_eq!(node3_commits[0], proposals[7]); // wait for the vote from all including node3 playground .wait_for_messages(3, NetworkPlayground::votes_only) .await; playground .wait_for_messages(3, NetworkPlayground::proposals_only) .await; // Verify that node 3 has notified its mempool about the committed txn of next block. nodes[3] .mempool_notif_receiver .next() .await .expect("Fail to be notified by a mempool committed txns"); assert_eq!(nodes[3].mempool.get_committed_txns().len(), 50); }); } #[test] /// Verify that a node syncs up when receiving a timeout message with a relevant ledger info fn state_sync_on_timeout() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); // This test depends on the fixed proposer on nodes[0] let mut nodes = SMRNode::start_num_nodes(4, &mut playground, FixedProposer, false); block_on(async move { // The first ten proposals are delivered just to nodes[0..2], which should commit // the first seven blocks. // nodes[2] should be fully disconnected from the others s.t. its timeouts would not trigger // SyncInfo delivery ahead of time. playground.drop_message_for(&nodes[0].signer.author(), nodes[3].signer.author()); playground.drop_message_for(&nodes[1].signer.author(), nodes[3].signer.author()); playground.drop_message_for(&nodes[2].signer.author(), nodes[3].signer.author()); for _ in 0..10 { playground .wait_for_messages(2, NetworkPlayground::proposals_only) .await; playground .wait_for_messages(2, NetworkPlayground::votes_only) .await; } // Stop dropping messages from node 1 to node 0: next time node 0 sends a timeout to node 1, // node 1 responds with a SyncInfo that carries a LedgerInfo for commit at round >= 7. playground.stop_drop_message_for(&nodes[1].signer.author(), &nodes[3].signer.author()); // Wait for the sync info message from 1 to 3 playground .wait_for_messages(1, NetworkPlayground::sync_info_only) .await; // In the end of the state synchronization node 3 should have commit at round >= 7. assert!( nodes[3] .commit_cb_receiver .next() .await .unwrap() .ledger_info() .round() >= 7 ); }); } #[test] /// Verify that in case a node receives timeout message from a remote peer that is lagging behind, /// then this node sends a sync info, which helps the remote to properly catch up. fn sync_info_sent_if_remote_stale() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); // This test depends on the fixed proposer on nodes[0] // We're going to drop messages from 0 to 2: as a result we expect node 2 to broadcast timeout // messages, for which node 1 should respond with sync_info, which should eventually // help node 2 to catch up. let mut nodes = SMRNode::start_num_nodes(4, &mut playground, FixedProposer, false); block_on(async move { playground.drop_message_for(&nodes[0].signer.author(), nodes[2].signer.author()); // Don't want to receive timeout messages from 2 until 1 has some real stuff to contribute. playground.drop_message_for(&nodes[2].signer.author(), nodes[1].signer.author()); for _ in 0..10 { playground .wait_for_messages(2, NetworkPlayground::proposals_only) .await; playground .wait_for_messages(2, NetworkPlayground::votes_only) .await; } // Wait for some timeout message from 2 to {0, 1}. playground.stop_drop_message_for(&nodes[2].signer.author(), &nodes[1].signer.author()); playground .wait_for_messages(3, NetworkPlayground::timeout_votes_only) .await; // Now wait for a sync info message from 1 to 2. playground .wait_for_messages(1, NetworkPlayground::sync_info_only) .await; let node2_commit = nodes[2] .commit_cb_receiver .next() .await .unwrap() .ledger_info() .consensus_block_id(); // Close node 1 channel for new commit callbacks and iterate over all its commits: we should // find the node 2 commit there. let mut found = false; nodes[1].commit_cb_receiver.close(); while let Ok(Some(node1_commit)) = nodes[1].commit_cb_receiver.try_next() { let node1_commit_id = node1_commit.ledger_info().consensus_block_id(); if node1_commit_id == node2_commit { found = true; break; } } assert_eq!(found, true); }); } #[test] /// Verify that a QC can be formed by aggregating the votes piggybacked by TimeoutMsgs fn aggregate_timeout_votes() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); // The proposer node[0] sends its proposal to nodes 1 and 2, which cannot respond back, // because their messages are dropped. // Upon timeout nodes 1 and 2 are sending timeout messages with attached votes for the original // proposal: both can then aggregate the QC for the first proposal. let nodes = SMRNode::start_num_nodes(3, &mut playground, FixedProposer, false); block_on(async move { playground.drop_message_for(&nodes[1].signer.author(), nodes[0].signer.author()); playground.drop_message_for(&nodes[2].signer.author(), nodes[0].signer.author()); // Node 0 sends proposals to nodes 1 and 2 let msg = playground .wait_for_messages(2, NetworkPlayground::proposals_only) .await; let first_proposal: ProposalMsg<Vec<u64>> = ProposalUncheckedSignatures::<Vec<u64>>::try_from(msg[0].1.clone()) .unwrap() .into(); let proposal_id = first_proposal.proposal().id(); // wait for node 0 send vote to 1 and 2 playground .wait_for_messages(2, NetworkPlayground::votes_only) .await; playground.drop_message_for(&nodes[0].signer.author(), nodes[1].signer.author()); playground.drop_message_for(&nodes[0].signer.author(), nodes[2].signer.author()); // Wait for the timeout messages sent by 1 and 2 to each other playground .wait_for_messages(2, NetworkPlayground::timeout_votes_only) .await; // Node 0 cannot form a QC assert_eq!( nodes[0] .smr .block_store() .unwrap() .highest_quorum_cert() .certified_block() .round(), 0 ); // Nodes 1 and 2 form a QC and move to the next round. // Wait for the timeout messages from 1 and 2 playground .wait_for_messages(2, NetworkPlayground::timeout_votes_only) .await; assert_eq!( nodes[1] .smr .block_store() .unwrap() .highest_quorum_cert() .certified_block() .id(), proposal_id ); assert_eq!( nodes[2] .smr .block_store() .unwrap() .highest_quorum_cert() .certified_block() .id(), proposal_id ); }); } #[test] /// Verify that the NIL blocks formed during timeouts can be used to form commit chains. fn chain_with_nil_blocks() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); // The proposer node[0] sends 3 proposals, after that its proposals are dropped and it cannot // communicate with nodes 1, 2, 3. Nodes 1, 2, 3 should be able to commit the 3 proposal // via NIL blocks commit chain. let num_nodes = 4; let nodes = SMRNode::start_num_nodes(num_nodes, &mut playground, FixedProposer, false); let num_proposal = 3; block_on(async move { // Wait for the first 3 proposals (each one sent to two nodes). playground .wait_for_messages( (num_nodes - 1) * num_proposal, NetworkPlayground::proposals_only, ) .await; playground.drop_message_for(&nodes[0].signer.author(), nodes[1].signer.author()); playground.drop_message_for(&nodes[0].signer.author(), nodes[2].signer.author()); playground.drop_message_for(&nodes[0].signer.author(), nodes[3].signer.author()); // After the first timeout nodes 1, 2, 3 should have last_proposal votes and // they can generate its QC independently. // Upon the second timeout nodes 1, 2, 3 send NIL block_1 with a QC to last_proposal. // Upon the third timeout nodes 1, 2, 3 send NIL block_2 with a QC to NIL block_1. // G <- p1 <- p2 <- p3 <- NIL1 <- NIL2 let num_timeout = 3; playground .wait_for_messages( // all-to-all broadcast except nodes 0's messages are dropped and self messages don't count (num_nodes - 1) * (num_nodes - 1) * num_timeout, NetworkPlayground::timeout_votes_only, ) .await; // We can't guarantee the timing of the last timeout processing, the only thing we can // look at is that HQC round is at least 4. assert!( nodes[2] .smr .block_store() .unwrap() .highest_quorum_cert() .certified_block() .round() >= 4 ); assert!(nodes[2].smr.block_store().unwrap().root().round() >= 1) }); } #[test] /// Test secondary proposal processing fn secondary_proposers() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); let num_nodes = 4; let mut nodes = SMRNode::start_num_nodes(num_nodes, &mut playground, MultipleOrderedProposers, false); block_on(async move { // Node 0 is disconnected. playground.drop_message_for(&nodes[0].signer.author(), nodes[1].signer.author()); playground.drop_message_for(&nodes[0].signer.author(), nodes[2].signer.author()); playground.drop_message_for(&nodes[0].signer.author(), nodes[3].signer.author()); // Run a system until node 0 is a designated primary proposer. In this round the // secondary proposal should be voted for and attached to the timeout message. let timeout_votes = playground .wait_for_messages( (num_nodes - 1) * (num_nodes - 1), NetworkPlayground::timeout_votes_only, ) .await; let mut secondary_proposal_ids = vec![]; for msg in timeout_votes { let vote_msg = VoteMsg::try_from(msg.1).unwrap(); assert!(vote_msg.vote().is_timeout()); secondary_proposal_ids.push(vote_msg.vote().vote_data().proposed().id()); } assert_eq!( secondary_proposal_ids.len(), (num_nodes - 1) * (num_nodes - 1) ); let secondary_proposal_id = secondary_proposal_ids[0]; for id in secondary_proposal_ids { assert_eq!(secondary_proposal_id, id); } // The secondary proposal id should get committed at some point in the future: // 10 rounds should be more than enough. Note that it's hard to say what round is going to // have 2 proposals and what round is going to have just one proposal because we don't want // to predict the rounds with proposer 0 being a leader. for _ in 0..10 { playground .wait_for_messages(num_nodes - 1, NetworkPlayground::votes_only) .await; // Retrieve all the ids committed by the node to check whether secondary_proposal_id // has been committed. while let Ok(Some(li)) = nodes[1].commit_cb_receiver.try_next() { if li.ledger_info().consensus_block_id() == secondary_proposal_id { return; } } } panic!("Did not commit the secondary proposal"); }); } #[test] /// Test we can do reconfiguration if execution returns new validator set. fn reconfiguration_test() { let runtime = consensus_runtime(); let mut playground = NetworkPlayground::new(runtime.handle().clone()); // This quorum size needs to be 2f+1 because we derive the ValidatorVerifier from ValidatorSet at network.rs // which doesn't support specializing quorum power let _nodes = SMRNode::start_num_nodes(4, &mut playground, MultipleOrderedProposers, true); let target_epoch = 10; block_on(async move { // Test we can survive a few epochs loop { let mut msg = playground .wait_for_messages(1, NetworkPlayground::take_all) .await; if let Some(ConsensusMsg_oneof::EpochChange(proof)) = msg.pop().unwrap().1.message { let proof = ValidatorChangeEventWithProof::try_from(proof).unwrap(); if proof.epoch().unwrap() == target_epoch { break; } } } }); }
MultiKeyData.py
class MultiKeyData(object): def __init__(self): self._keys = {} self._values = {} self._links = {} self._index = 0 def __add_item(self, key, value): if key not in self._keys: self._keys[key] = self._index self._values[self._index] = value self._links[self._index] = 1 return 1 else: self._values[self._keys[key]] = value return 0 def multi_set(self, keys, value): count = 0 for key in keys: count += self.__add_item(key, value) if count>0: self._links[self._index] += count-1 self._index += 1 def get_values(self): return list(self._values.values()) def get_keys(self): return list(self._keys.keys()) def __getitem__(self, key): return self._values[self._keys[key]] if key in self._keys else None def __setitem__(self, key, value): self._index += self.__add_item(key, value) def __delitem__(self, key):
del self._links[index] del self._values[index] def __str__(self): return f'keys: {self._keys}\n' \ f'values: {self._values}\n' \ f'links: {self._links}' if __name__ == '__main__': print('MultiKeuData Test') data = MultiKeyData() data['a'] = 101 data['b'] = 201 print("data['b']: ", data['b']) print('-------------') print('data: ') print(data) print('-------------') data.multi_set(('a', 'b', 'c', 'd'), 'hello, world!') print(data) print('-------------') data.multi_set(('a', 'b', 'c', 'd'), 'hello, world!') print(data) print('-------------') data.multi_set(('a', 'b', 'c', 'd', 'e'), 'hello, world!') print(data) print('-------------') del data['e'] print(data) print('-------------') print('keys: ', data.get_keys()) print('values: ', data.get_values())
index = self._keys[key] self._links[index] += -1 del self._keys[key] if self._links[index]==0:
rendering.rs
use crate::{ components::{Mesh, TransformMatrix, Visible}, resources::VulkanContext, resources::{render_context::create_push_constant, RenderContext}, }; use ash::vk; use hecs::{PreparedQuery, With, World}; pub fn rendering_system( query: &mut PreparedQuery<With<Visible, (&mut Mesh, &TransformMatrix)>>, world: &mut World, vulkan_context: &VulkanContext, swapchain_image_index: usize, render_context: &RenderContext, ) -> () { for (_, (mesh, transform_matrix)) in query.query_mut(world) { let device = &vulkan_context.device; let command_buffer = render_context.frames[swapchain_image_index].command_buffer; unsafe { mesh.ubo_data.transform = transform_matrix.0.clone(); mesh.ubo_buffer .update(&vulkan_context, &[mesh.ubo_data]) .unwrap(); // Bind mesh descriptor sets device.cmd_bind_descriptor_sets( command_buffer, vk::PipelineBindPoint::GRAPHICS, render_context.pipeline_layout, 2, &mesh.descriptor_sets, &[], ); for primitive in &mesh.primitives { // Bind vertex and index buffers device.cmd_bind_vertex_buffers( command_buffer, 0, &[primitive.vertex_buffer.handle], &[0], ); device.cmd_bind_index_buffer( command_buffer, primitive.index_buffer.handle, 0, vk::IndexType::UINT32, ); // Bind texture descriptor sets device.cmd_bind_descriptor_sets( command_buffer, vk::PipelineBindPoint::GRAPHICS, render_context.pipeline_layout, 1, &[primitive.texture_descriptor_set], &[], ); // Push constants let material_push_constant = create_push_constant(&primitive.material); device.cmd_push_constants( command_buffer, render_context.pipeline_layout, vk::ShaderStageFlags::FRAGMENT, 0, material_push_constant, ); device.cmd_draw_indexed(command_buffer, primitive.indicies_count, 1, 0, 0, 1); } } } } #[cfg(test)] mod tests { use std::{collections::hash_map::DefaultHasher, hash::Hasher}; use super::*; use ash::vk::Handle; use image::{jpeg::JpegEncoder, DynamicImage, RgbaImage}; use nalgebra::UnitQuaternion; use openxr::{Fovf, Quaternionf, Vector3f}; use crate::{ buffer::Buffer, gltf_loader, resources::RenderContext, scene_data::SceneParams, swapchain::Swapchain, systems::{update_parent_transform_matrix_system, update_transform_matrix_system}, util::get_from_device_memory, COLOR_FORMAT, }; #[test] pub fn test_rendering_pbr() { let vulkan_context = VulkanContext::testing().unwrap(); let resolution = vk::Extent2D { height: 800, width: 800, }; // Create an image with vulkan_context let image = vulkan_context .create_image( COLOR_FORMAT, &resolution, vk::ImageUsageFlags::COLOR_ATTACHMENT | vk::ImageUsageFlags::TRANSFER_SRC, 2, 1, ) .unwrap(); vulkan_context .set_debug_name(vk::ObjectType::IMAGE, image.handle.as_raw(), "Screenshot") .unwrap(); let swapchain = Swapchain { images: vec![image.handle], resolution, }; let mut render_context = RenderContext::new_from_swapchain(&vulkan_context, &swapchain).unwrap(); // Get a model from GLTF // let gltf_data: Vec<(&[u8], &[u8])> = vec![( // include_bytes!("../../../test_assets/Sponza.gltf"), // include_bytes!("../../../test_assets/Sponza.bin"), // )]; let gltf_data: Vec<&[u8]> = vec![include_bytes!("../../../test_assets/damaged_helmet.glb")]; let mut models = gltf_loader::load_models_from_glb( &gltf_data, &vulkan_context, &render_context.descriptor_set_layouts, ) .unwrap(); let (_, mut world) = models.drain().next().unwrap(); let params = vec![ ("Normal", 0.0), ("Diffuse", 1.0), ("F", 2.0), ("G", 3.0), ("D", 4.0), ("Specular", 5.0), ]; for (name, debug_view_equation) in &params { render_object_with_debug_equation( &vulkan_context, &mut render_context, &mut world, resolution, image.clone(), name, *debug_view_equation, ); } } fn render_object_with_debug_equation( vulkan_context: &VulkanContext, render_context: &mut RenderContext, world: &mut World, resolution: vk::Extent2D, image: crate::image::Image, name: &str, debug_view_equation: f32, ) { // Render the scene schedule(render_context, vulkan_context, debug_view_equation, world); // Save the resulting image to the disk and get its hash, along with a "known good" hash // of what the image *should* be. let (output_hash, known_good_hash) = save_image_to_disk(resolution, vulkan_context, image, name); assert_eq!(output_hash, known_good_hash); } fn save_image_to_disk( resolution: vk::Extent2D, vulkan_context: &VulkanContext, image: crate::image::Image, name: &str, ) -> (u64, u64) { let size = (resolution.height * resolution.width * 4) as usize; let image_data = vec![0; size]; let buffer = Buffer::new( &vulkan_context, &image_data, vk::BufferUsageFlags::TRANSFER_DST,
vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, vk::ImageLayout::TRANSFER_SRC_OPTIMAL, 1, 1, ); vulkan_context.copy_image_to_buffer( &image, vk::ImageLayout::TRANSFER_SRC_OPTIMAL, buffer.handle, ); let image_bytes = unsafe { get_from_device_memory(&vulkan_context, &buffer) }.to_vec(); let image_from_vulkan = DynamicImage::ImageRgba8( RgbaImage::from_raw(resolution.width, resolution.height, image_bytes).unwrap(), ); let output_path = format!("../test_assets/render_{}.jpg", name); { let output_path = std::path::Path::new(&output_path); let mut file = std::fs::File::create(output_path).unwrap(); let mut jpeg_encoder = JpegEncoder::new(&mut file); jpeg_encoder.encode_image(&image_from_vulkan).unwrap(); } let output_hash = hash_file(&output_path); let known_good_path = format!("../test_assets/render_{}_known_good.jpg", name); let known_good_hash = hash_file(&known_good_path); (output_hash, known_good_hash) } fn schedule( render_context: &mut RenderContext, vulkan_context: &VulkanContext, debug_view_equation: f32, world: &mut World, ) { // SPONZA // let rotation: mint::Quaternion<f32> = // UnitQuaternion::from_euler_angles(0., 90_f32.to_radians(), 0.).into(); // let position = Vector3f { // x: 0.0, // y: 1.4, // z: 0.0, // }; // HELMET let rotation: mint::Quaternion<f32> = UnitQuaternion::from_euler_angles(0., 45_f32.to_radians(), 0.).into(); let position = Vector3f { x: 0.8, y: 1.4, z: 0.8, }; let view = openxr::View { pose: openxr::Posef { orientation: Quaternionf::from(rotation), position, }, fov: Fovf { angle_up: 45.0_f32.to_radians(), angle_down: -45.0_f32.to_radians(), angle_left: -45.0_f32.to_radians(), angle_right: 45.0_f32.to_radians(), }, }; let views = vec![view.clone(), view]; render_context .update_scene_data(&views, &vulkan_context) .unwrap(); render_context .scene_params_buffer .update( &vulkan_context, &[SceneParams { debug_view_equation, ..Default::default() }], ) .unwrap(); render_context.begin_frame(&vulkan_context, 0); render_context.begin_pbr_render_pass(&vulkan_context, 0); update_transform_matrix_system(&mut Default::default(), world); update_parent_transform_matrix_system( &mut Default::default(), &mut Default::default(), world, ); rendering_system( &mut Default::default(), world, vulkan_context, 0, render_context, ); render_context.end_pbr_render_pass(&vulkan_context, 0); render_context.end_frame(&vulkan_context, 0); } fn hash_file(file_path: &str) -> u64 { let mut hasher = DefaultHasher::new(); let bytes = std::fs::read(&file_path).unwrap(); bytes.iter().for_each(|b| hasher.write_u8(*b)); return hasher.finish(); } }
) .unwrap(); vulkan_context.transition_image_layout( image.handle,
Element.js
import './Element.scss'; import PropTypes from 'prop-types'; import React from 'react'; const Element = ({ Tag, children, className, flex, dirColumn, spaceAround, spaceBetween, centered, autoMargin, autoBottomMargin, horizontalHalf, flexStart, noWrap, centeredInDesktop, relative, ...props, }) => { const classes = []; flex && classes.push('element--flex'); autoMargin && classes.push('element--auto-margin'); autoBottomMargin && classes.push('element--auto-bottom-margin'); dirColumn && classes.push('element--column'); spaceAround && classes.push('element--space-around'); spaceBetween && classes.push('element--space-between'); centered && classes.push('element--centered'); horizontalHalf && classes.push('element--horizontal-half'); flexStart && classes.push('element--flex-start'); noWrap && classes.push('element--no-wrap'); centeredInDesktop && classes.push('element--centered-in-desktop'); relative && classes.push('element--relative'); return ( <Tag className={`${className} ${classes.join(' ')}`} {...props}> {children} </Tag> ); }; Element.defaultProps = { className: '', Tag: 'div', }; Element.propTypes = { Tag: PropTypes.string, children: PropTypes.node, className: PropTypes.string, flex: PropTypes.bool, spaceAround: PropTypes.bool, spaceBetween: PropTypes.bool, dirColumn: PropTypes.bool, centered: PropTypes.bool,
autoBottomMargin: PropTypes.bool, flexStart: PropTypes.bool, noWrap: PropTypes.bool, centeredInDesktop: PropTypes.bool, relative: PropTypes.bool, }; export default Element;
horizontalHalf: PropTypes.bool, autoMargin: PropTypes.bool,
artifacts.py
from xml.etree import ElementTree as ET from gomatic.mixins import CommonEqualityMixin def fetch_artifact_src_from(element): if 'srcfile' in element.attrib: return FetchArtifactFile(element.attrib['srcfile']) if 'srcdir' in element.attrib: return FetchArtifactDir(element.attrib['srcdir']) raise RuntimeError("Expected srcfile or srcdir. Do not know what src type to use for " + ET.tostring(element, 'utf-8')) def fetch_properties_from(element): props = {} for prop in element.iter('property'): props[prop.find('key').text] = prop.find('value').text return props if props else None class FetchArtifactFile(CommonEqualityMixin): def __init__(self, src_value): self.__src_value = src_value def __repr__(self): return 'FetchArtifactFile("%s")' % self.__src_value @property def as_xml_type_and_value(self): return "srcfile", self.__src_value
def __repr__(self): return 'FetchArtifactDir("%s")' % self.__src_value @property def as_xml_type_and_value(self): return "srcdir", self.__src_value class Artifact(CommonEqualityMixin): def __init__(self, src=None, dest=None, id=None, store_id=None, config=None, artifact_type='build'): self._src = src self._dest = dest self._artifact_id = id self._store_id = store_id self._config = config self._type = artifact_type def __repr__(self): if self._artifact_id is not None: if self._config is None: return '%s("%s", "%s")' % (self.constructor, self._artifact_id, self._store_id) else: return '%s("%s", "%s", %s)' % (self.constructor, self._artifact_id, self._store_id, self._config) if self._dest is None: return '%s("%s")' % (self.constructor, self._src) else: return '%s("%s", "%s")' % (self.constructor, self._src, self._dest) @property def constructor(self): if self._type == "build": return "BuildArtifact" if self._type == "test": return "TestArtifact" if self._type == "external": return "ExternalArtifact" raise RuntimeError("Unknown artifact type %s" % self._type) def append_to(self, element, gocd_18_3_and_above=False): if gocd_18_3_and_above: self._append_to_gocd_18_3_and_above(element) else: self._append_to_gocd_18_2_and_below(element) def _append_to_gocd_18_3_and_above(self, element): if self._artifact_id is not None: if self._config is None: element.append(ET.fromstring('<artifact id="%s" storeId="%s" type="%s" />' % (self._artifact_id, self._store_id, self._type))) else: properties_xml = "".join(["<property><key>{}</key><value>{}</value></property>".format(k, str(v or '')) for k, v in self._config.items()]) new_element = ET.fromstring('<artifact id="{}" storeId="{}" type="{}"><configuration>{}</configuration></artifact>'.format(self._artifact_id, self._store_id, self._type, properties_xml)) element.append(new_element) elif self._dest is None: element.append(ET.fromstring('<artifact src="%s" type="%s" />' % (self._src, self._type))) else: element.append(ET.fromstring('<artifact src="%s" dest="%s" type="%s" />' % (self._src, self._dest, self._type))) def _append_to_gocd_18_2_and_below(self, element): if not self._type == 'build' and not self._type == 'test': raise RuntimeError("Artifact type '%s' not supported in GoCD 18.2 and below" % self._type) tag = 'artifact' if self._type == 'build' else 'test' if self._dest is None: element.append(ET.fromstring('<%s src="%s" />' % (tag, self._src))) else: element.append(ET.fromstring('<%s src="%s" dest="%s" />' % (tag, self._src, self._dest))) @classmethod def get_artifact_for(cls, element): src = element.attrib.get('src', None) dest = element.attrib.get('dest', None) id = element.attrib.get('id', None) store_id = element.attrib.get('storeId', None) artifact_type_attribute = element.attrib.get('type', None) if id is not None: return cls(id=id, store_id=store_id, config=fetch_properties_from(element), artifact_type=artifact_type_attribute) if artifact_type_attribute is None: _type = 'build' if element.tag == 'artifact' else 'test' return cls(src=src, dest=dest, artifact_type=_type) else: return cls(src=src, dest=dest, artifact_type=artifact_type_attribute) @classmethod def get_build_artifact(cls, src, dest=None): return cls(src=src, dest=dest, artifact_type='build') @classmethod def get_test_artifact(cls, src, dest=None): return cls(src=src, dest=dest, artifact_type='test') @classmethod def get_external_artifact(cls, id, store_id, config=None): return cls(id=id, store_id=store_id, config=config, artifact_type='external') ArtifactFor = Artifact.get_artifact_for BuildArtifact = Artifact.get_build_artifact TestArtifact = Artifact.get_test_artifact ExternalArtifact = Artifact.get_external_artifact
class FetchArtifactDir(CommonEqualityMixin): def __init__(self, src_value): self.__src_value = src_value
metrics.go
// Copyright 2020, OpenTelemetry Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package elastic contains an opentelemetry-collector exporter // for Elastic APM. package elastic // import "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/elasticexporter/internal/translator/elastic" import ( "sort" "strings" "time" "go.elastic.co/apm/model" "go.elastic.co/fastjson" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" ) // EncodeMetrics encodes an OpenTelemetry metrics slice, and instrumentation // library information, as one or more metricset lines, writing to w. // // TODO(axw) otlpLibrary is currently not used. We should consider recording // it as metadata. func EncodeMetrics(otlpMetrics pmetric.MetricSlice, otlpLibrary pcommon.InstrumentationScope, w *fastjson.Writer) (dropped int, _ error) { var metricsets metricsets for i := 0; i < otlpMetrics.Len(); i++ { metric := otlpMetrics.At(i) name := metric.Name() switch metric.DataType() { case pmetric.MetricDataTypeGauge: doubleGauge := metric.Gauge() dps := doubleGauge.DataPoints() for i := 0; i < dps.Len(); i++ { dp := dps.At(i) var val float64 switch dp.ValueType() { case pmetric.MetricValueTypeDouble: val = dp.DoubleVal() case pmetric.MetricValueTypeInt: val = float64(dp.IntVal()) } metricsets.upsert(model.Metrics{ Timestamp: asTime(dp.Timestamp()), Labels: asStringMap(dp.Attributes()), Samples: map[string]model.Metric{name: { Value: val, }}, }) } case pmetric.MetricDataTypeSum: doubleSum := metric.Sum() dps := doubleSum.DataPoints() for i := 0; i < dps.Len(); i++ { dp := dps.At(i) var val float64 switch dp.ValueType() { case pmetric.MetricValueTypeDouble: val = dp.DoubleVal() case pmetric.MetricValueTypeInt: val = float64(dp.IntVal()) } metricsets.upsert(model.Metrics{ Timestamp: asTime(dp.Timestamp()), Labels: asStringMap(dp.Attributes()), Samples: map[string]model.Metric{name: { Value: val, }}, }) } case pmetric.MetricDataTypeHistogram: // TODO(axw) requires https://github.com/elastic/apm-server/issues/3195 doubleHistogram := metric.Histogram() dropped += doubleHistogram.DataPoints().Len() default: // Unknown type, so just increment dropped by 1 as a best effort. dropped++ } } for _, metricset := range metricsets { w.RawString(`{"metricset":`) if err := metricset.MarshalFastJSON(w); err != nil { return dropped, err } w.RawString("}\n") } return dropped, nil } func
(in pcommon.Timestamp) model.Time { return model.Time(time.Unix(0, int64(in))) } func asStringMap(in pcommon.Map) model.StringMap { var out model.StringMap in.Sort() in.Range(func(k string, v pcommon.Value) bool { out = append(out, model.StringMapItem{ Key: k, Value: v.AsString(), }) return true }) return out } type metricsets []model.Metrics func (ms *metricsets) upsert(m model.Metrics) { i := ms.search(m) if i < len(*ms) && compareMetricsets((*ms)[i], m) == 0 { existing := (*ms)[i] for k, v := range m.Samples { existing.Samples[k] = v } } else { head := (*ms)[:i] tail := append([]model.Metrics{m}, (*ms)[i:]...) *ms = append(head, tail...) } } func (ms *metricsets) search(m model.Metrics) int { return sort.Search(len(*ms), func(i int) bool { return compareMetricsets((*ms)[i], m) >= 0 }) } func compareMetricsets(a, b model.Metrics) int { atime, btime := time.Time(a.Timestamp), time.Time(b.Timestamp) if atime.Before(btime) { return -1 } else if atime.After(btime) { return 1 } n := len(a.Labels) - len(b.Labels) switch { case n < 0: return -1 case n > 0: return 1 } for i, la := range a.Labels { lb := b.Labels[i] if n := strings.Compare(la.Key, lb.Key); n != 0 { return n } if n := strings.Compare(la.Value, lb.Value); n != 0 { return n } } return 0 }
asTime
LoginScreen.js
import React, {useState} from 'react' import { Platform, SafeAreaView, KeyboardAvoidingView, TouchableOpacity, ActivityIndicator, ScrollView, TextInput, StatusBar, View, Text } from 'react-native' import Icon from 'react-native-vector-icons/Feather' import { connect } from 'react-redux' import { Formik } from 'formik' import * as Yup from 'yup' import ArrowBack from '../Components/ArrowBack' import AuthActions from '../Redux/AuthRedux' import styles from './Styles/LoginScreenStyle' import HeaderStyle from '../Navigation/Styles/NavigationStyles' import { apply } from '../Lib/OsmiProvider' const OS = Platform.OS const LoginScreen = (props) => { const [isSecure, setIsSecure] = useState(true) const event = props.navigation.getParam('event', null) const Scheme = Yup.object().shape({ email: Yup.string() .email("Your email isn't valid") .required("*required"), password: Yup.string() .min(6, "Password must contain at least 6 characters") .required("*required") }) const handleSubmit = (values, actions) => { actions.setSubmitting(false) props.doLogin({ email: values.email, password: values.password, event }) return false } const renderForm = formProps => { return ( <KeyboardAvoidingView> <View style={styles.inputView}> <View style={styles.inputForm}> <Icon name="mail" size={20} /> <TextInput placeholder="Email" onChangeText={(value) => formProps.setFieldValue('email', value)} value={formProps.values.email} style={styles.inputText} autoCapitalize="none" keyboardType="email-address" autoCompleteType="off" /> </View> <Text style={styles.error}>{formProps?.errors?.email}</Text> <View style={styles.inputForm}> <Icon name="lock" size={20} /> <TextInput placeholder="Password" onChangeText={(value) => formProps.setFieldValue('password', value)} secureTextEntry={isSecure} value={formProps.values.password} style={styles.inputText} /> {isSecure ? ( <TouchableOpacity onPress={() => setIsSecure(false)}> <Icon name="eye" size={20} /> </TouchableOpacity> ) : ( <TouchableOpacity onPress={() => setIsSecure(true)}> <Icon name="eye-off" size={20} /> </TouchableOpacity> )} </View> <Text style={styles.error}>{formProps?.errors?.password}</Text> <TouchableOpacity style={[styles.btnLogin, props?.statusLogin?.fetching && apply('bg-blue-400')]} activeOpacity={0.9}
<View style={apply('flex items-center justify-center')}> <ActivityIndicator color="#fff" /> </View> ) : ( <Text style={styles.btnLoginText}>Login</Text> )} </TouchableOpacity> </View> </KeyboardAvoidingView> ) } return ( <SafeAreaView style={styles.container}> <StatusBar backgroundColor={apply("blue-500")} barStyle='light-content' /> <ScrollView contentContainerStyle={apply('p-5')}> <Text style={styles.hello}>Welcome back</Text> <Text style={styles.caption}>Login to your account</Text> <Formik onSubmit={handleSubmit} validationSchema={Scheme} validateOnChange={false} initialValues={{ email: '', password: '' }} > {formProps => renderForm(formProps)} </Formik> <Text style={apply("self-center")}>Don't have an account?</Text> <TouchableOpacity style={styles.btnRegister} activeOpacity={0.9} onPress={() => {props.navigation.goBack(), props.navigation.navigate('RegisterScreen', { event })}}> <Text style={styles.btnRegisterText}>Register</Text> </TouchableOpacity> </ScrollView> </SafeAreaView> ) } const mapStateToProps = (state) => { return { statusLogin: state.auth.doLogin } } const mapDispatchToProps = (dispatch) => { return { doLogin: (value) => dispatch(AuthActions.doLoginRequest(value)) } } LoginScreen.navigationOptions = ({ navigation }) => { const { params = {} } = navigation.state return { headerStyle: HeaderStyle.default, headerTitle: 'Login', headerLeft: () => <ArrowBack />, headerRight: () => <View />, headerTitleContainerStyle: {left: OS === 'ios' ? 0 : 55} } } export default connect(mapStateToProps, mapDispatchToProps)(LoginScreen)
onPress={(e) => {formProps.handleSubmit(e)}} disabled={props?.statusLogin?.fetching ?? false}> {props?.statusLogin?.fetching ? (
mvnsortmod1.py
''' ====================================================================== Created on Jan 14, 2018 PURPOSE: this module provides classes to read Maven projects from git or other repos specifically intended to create the graph of multiple project dependencies ROADMAP: TODO - 1. review how properties are distributed and could break things 2. review subproject dependencies on top level, are props declared? 2. review parent POM, are props declared? 3. are external property files used? @author: Larry ====================================================================== ''' import os import subprocess #import json #import xml.etree.ElementTree as ET #import urllib2 #import csv import xml.etree.cElementTree as ET import re import urllib.request #======================================================================= # static functions and constants class Util(object): mvn_pom_ns = {"mvn":"http://maven.apache.org/POM/4.0.0"} def __init__(self): pass @staticmethod def get_tag_value(name, section): s = ('mvn:%s' % name) elem = section.find(s, Util.mvn_pom_ns) if elem ==None: return'' return elem.text @staticmethod def get_path(dirs): path = '' for d in dirs: path += d + '/' return path[:len(path) -1] # if hasattr(a, 'property'): @staticmethod def run_process_2(cmd_args): #result = subprocess.run(['dir', '../*.*'], stdout=subprocess.PIPE) #result = subprocess.run(['C:/apps/maven352/bin/mvn', 'help:effective-pom'], stdout=subprocess.PIPE) result = subprocess.run(['cd', '..'], stdout=subprocess.PIPE, shell=True) result = subprocess.run(cmd_args, stdout=subprocess.PIPE, shell=True) print(result.stdout.decode('utf-8')) @staticmethod def run_process(cmd_args, args_in): cmd = subprocess.Popen(cmd_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell=True) if (args_in): cmd.stdin.write(args_in.encode('utf-8')) cmd.stdin.flush() # Must include this to ensure data is passed to child process result = cmd.stdout.read() print(args_in.encode('utf-8')) print(result) #.stdout.decode('utf-8')) ''' cmdline = ["cmd", "/q", "/k", "echo off"] batch = b"""\ rem vsinstr -coverage helloclass.exe /exclude:std::* vsperfcmd /start:coverage /output:run.coverage helloclass vsperfcmd /shutdown exit """ ''' def test_map_update(self): A = {'a':1, 'b':2, 'c': 3} B = {'c':99, 'd':4, 'e':5} A.update(B) print(A) #======================================================================= # identifies Maven coordinates for a project or dependnecy class MavenCoords(object): def __init__(self, element, props): if (not element): self.groupid ='' self.artifactid = '' self.version = '' self.scope = '' self.relative_path = '' self.key ='' return self.groupid = Util.get_tag_value('groupId', element) self.artifactid = Util.get_tag_value('artifactId', element) self.version = Util.get_tag_value('version', element) self.relative_path = Util.get_tag_value('relativePath', element) self.scope = Util.get_tag_value('scope', element) self.refresh_key(props) def refresh_key(self, props): if (props and self.version in props): self.version = props[self.version] self.key = '%s|%s|%s' % (self.groupid, self.artifactid, self.version) #======================================================================= # a maven project POM complete with properties and dependencies class MavenProject(object): def __init__(self, pom_url, project_map): #dirs = pom_url.split('/') self.pom_url = pom_url; self.project_map = project_map self.pom_file = self.get_pom_file(self.pom_url) self.name = Util.get_tag_value('name', self.pom_file) self.packaging = Util.get_tag_value('packaging', self.pom_file) self.init_from_parent() self.properties.update(self.get_properties(self.pom_file)) self.coord = MavenCoords(self.pom_file, self.properties) self.dependencies.update(self.get_dependencies(self.pom_file)) self.project_map[self.coord.key] = self self.get_sub_modules(self.pom_file) self.history = [] self.consumers = [] #if self.packaging =='pom': # parent pom's will always be pre-existent to child pom's. they will be looked by coord key from # the global graph / project list def init_from_parent(self): parent_section = self.pom_file.findall('mvn:parent', Util.mvn_pom_ns) if (parent_section): self.parent_coord = MavenCoords(parent_section[0], None) parent = self.project_map[self.parent_coord.key] if (parent): self.properties = parent.properties.copy() self.dependencies = parent.dependencies.copy() else: print('Error: POM {} has unresolved parent POM reference {}'.format(self.name, parent.key)) else: self.dependencies = {} self.properties = {} self.coord = MavenCoords(None, None) dirs = self.pom_url.split('/') print(dirs) print (Util.get_path(dirs)) def get_sub_modules(self, pom_file): section = pom_file.findall('mvn:modules', Util.mvn_pom_ns) self.modules = {} if (not section): return for elem in section[0].findall('*'): sub_proj = self.get_sub_module(elem.text) self.modules[sub_proj.coord.key] = sub_proj self.project_map[sub_proj.coord.key] = sub_proj def get_sub_module(self, sub_dir): dirs = self.pom_url.split('/') x = len(dirs) dirs[x-1] = 'pom.xml' dirs.insert(x-1, sub_dir) path = Util.get_path(dirs) module = MavenProject(path, self.project_map) return module def get_properties(self, pom): section = pom.findall('mvn:properties', Util.mvn_pom_ns) props = {} if (len(section)==0): return props for elem in section[0].findall('*'): k = re.sub('{.*?}', '', elem.tag) k = '${%s}' % k props[k] = elem.text return props def get_dependencies(self, pom): section = pom.findall('mvn:dependencies', Util.mvn_pom_ns) deps_map = {} if (len(section)==0): return deps_map for dep_section in section[0].findall('mvn:dependency', Util.mvn_pom_ns): obj = MavenCoords(dep_section, self.properties) deps_map[obj.key] = obj return deps_map @staticmethod def get_pom_file(pomfile): if pomfile.find("http://") >=0 or pomfile.find("https://") >=0: opener = urllib.request.build_opener() pom = ET.parse( opener.open(pomfile) ).getroot() else: pom = ET.parse(pomfile).getroot() return pom def logx(self, level): print() print('---------Maven Project---------') #print('key: %s * Group: %s * Id: %s * Ver: %s' % (self.coord.key, self.coord.groupid, self.coord.artifactid, self.coord.version)) print('key: {0} * Name: {1} * Group: {2} * Id: {3} * Ver: {4}'.format(self.coord.key, self.name, self.coord.groupid, self.coord.artifactid, self.coord.version)) print() if level ==0: return print(' dependencies') for k, v in self.dependencies.items(): print(' key: %s * Group: %s * Id: %s * Ver: %s' % (k, v.groupid, v.artifactid, v.version)) print() print(' properties: ', self.properties) print (' consumers') for proj in self.consumers: print(' ', proj.coord.key) class DAGerror(Exception): def __init__(self, arg): self.arg = arg #======================================================================= # class MavenProjectGraph(object): def __init__(self, pom_url_list): self.pom_url_list = pom_url_list self.proj_list = [] self.proj_map = {} #self.validation = {} def generate_pom_list(self): for pom_url in self.pom_url_list: MavenProject(pom_url, self.proj_map) #self.proj_list.append(proj) #self.proj_map[proj.coord.key] = proj self.proj_list = list(self.proj_map.values()) for proj in self.proj_list: proj.logx(1) #$$ print() def set_options(self): pass # PURPOSE: sort the list in DAG dependency order and capture each project consumers # # def resolve_graph(self): self.resolve_dependencies() self.resolve_consumers() # PURPOSE: reorder the project list such that each projects dependencies appear before that project # # NOTE #1: iterate thru the list looking fwd in the list for each project's dependencies # for each dependency found, move it behind that project # # NOTE #2: the DAG is complete when the list is scanned and no dependencies exist fwd of each project # # NOTE #3: a history of each dependency relocation is maintained for each project # a circular reference will be detected if that # def resolve_dependencies(self):
# PURPOSE: for each project in the list, discover the set of consuming projects # # NOTE #1: call this method AFTER the dependency graph has been properly resolved # consuming projects will be forward in the list # def resolve_consumers(self): for i in range(len(self.proj_list)): proj_base = self.proj_list[i] j = i while j < len(self.proj_list)-1: j = j+1 proj_scan = self.proj_list[j] if (proj_base.coord.key in proj_scan.dependencies): proj_base.consumers.append(proj_scan) def list_projects(self): for proj in self.proj_list: proj.logx(1) #========================================================================== def main(): pom_files = ['D:\\devspaces\\wks4\\py1\\snipits2.xml', 'https://raw.githubusercontent.com/LeonardoZ/java-concurrency-patterns/master/pom.xml'] pom_files = ['D:\\devspaces\\wks4\\py1\\pom-A.xml', 'D:\\devspaces\\wks4\\py1\\pom-B.xml', 'D:\\devspaces\\wks4\\py1\\pom-C.xml', 'D:\\devspaces\\wks4\\py1\\pom-D.xml', ] pom_files = ['C:/Users/Larry/Dropbox/gitcode/gh/maven_proj_graph/pom-A.xml', 'C:/Users/Larry/Dropbox/gitcode/gh/maven_proj_graph/pom-B.xml', 'C:/Users/Larry/Dropbox/gitcode/gh/maven_proj_graph/pom-C.xml', 'C:/Users/Larry/Dropbox/gitcode/gh/maven_proj_graph/pom-D.xml', ] # C:\Users\Larry\Dropbox\gitcode\gh\maven_proj_graph s = ['dir', '*'] s = ['C:/apps/maven352/bin/mvn', 'help:effective-pom'] s2 = ['C:\\apps\\maven352\\bin\\mvn', 'help:effective-pom'] #Util.run_process(['cd', '..'], 'C:\\apps\\maven352\\bin\\mvn help:effective-pom') #Util.run_process('C:\\apps\\maven352\\bin\\mvn help:effective-pom', '') #Util.test_map_update(None) #return() graph = MavenProjectGraph(pom_files) graph.generate_pom_list() graph.resolve_graph() graph.list_projects() #========================================================================== # see this article for opening remote xml files # https://stackoverflow.com/questions/28238713/python-xml-parsing-lxml-urllib-request def main2(): cwd = os.getcwd() cwd = 'D:\\devspaces\\wks4\\py1\\' pom_file = cwd + 'snipits2.xml' pom_file = 'D:\\devspaces\\wks4\\py1\\snipits2.xml' pom = ET.parse(pom_file).getroot() # https://github.com/LeonardoZ/java-concurrency-patterns.git # this is the correct patttern for reading single files from github # https://raw.githubusercontent.com/user/repository/branch/filename # this is the web page containing the file # 'https://github.com/LeonardoZ/java-concurrency-patterns/blob/master/pom.xml' pom_file_url = 'https://raw.githubusercontent.com/LeonardoZ/java-concurrency-patterns/master/pom.xml' opener = urllib.request.build_opener() f = opener.open(pom_file_url) # ng, file=urllib.urlopen(file=urllib.urlopen()) #parser = ET.HTMLParser() #with urlopen('https://pypi.python.org/simple') as f: #tree = ET.parse(f, parser) #pom_file = urllib.request.urlopen(pom_file) pom = ET.parse(opener.open(pom_file_url)).getroot() project = MavenProject(pom) project.logx() if __name__ == '__main__': main() #main() ''' ===================================================================== notes: alternatives - use maven to get equiv pom > mvn help:effective-pom https://stackoverflow.com/questions/4760215/running-shell-command-from-python-and-capturing-the-output '''
try: while True: for p in self.proj_list: print(p.name) i = 0 #dependency_found = False while i < len(self.proj_list): dependency_found = False proj_base = self.proj_list[i] print('loop i={}, base={}'.format(i, proj_base.name)) j = i + 1 while j < len(self.proj_list): print(' loop j {}'.format(j)) proj_scan = self.proj_list[j] # a forward project dependency is found for the base project, move it behind the base project if proj_scan.coord.key in proj_base.dependencies: # dejavu - a repeated reorder indicates circular dependency if proj_scan.coord.key in proj_base.history: raise DAGerror("Error: base project - {} - encountered duplicate reorder for dependency - {} -".format ( proj_base.name, proj_scan.name)) # remove the fwd item first to avoid order issues del self.proj_list[j] #self.proj_list.remove(j) # insert behind the base project self.proj_list.insert(i, proj_scan) print(' reorded scan {} from j={} to i={}'.format( proj_scan.name, j, i)) for p in self.proj_list: print(p.name) proj_base.history.append(proj_scan.coord.key) dependency_found = True i = i -1 break j =j+1 # while j i=i+1 # while i # repeat outer loop until nothing is reordered if not dependency_found: break else: i = 0 except DAGerror as e: print(e)
test_db_matches_ui.py
from model.group import Group def
(app, db): ui_list = app.group.get_group_list() def clean(group): return Group(id=group.id, name=group.name.strip()) db_list = map(clean, db.get_group_list()) assert sorted(ui_list, key=Group.id_or_max) == sorted (db_list, key=Group.id_or_max)
test_group_list
mod.rs
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Some code that abstracts away much of the boilerplate of writing //! `derive` instances for traits. Among other things it manages getting //! access to the fields of the 4 different sorts of structs and enum //! variants, as well as creating the method and impl ast instances. //! //! Supported features (fairly exhaustive): //! //! - Methods taking any number of parameters of any type, and returning //! any type, other than vectors, bottom and closures. //! - Generating `impl`s for types with type parameters and lifetimes //! (e.g. `Option<T>`), the parameters are automatically given the //! current trait as a bound. (This includes separate type parameters //! and lifetimes for methods.) //! - Additional bounds on the type parameters (`TraitDef.additional_bounds`) //! //! The most important thing for implementers is the `Substructure` and //! `SubstructureFields` objects. The latter groups 5 possibilities of the //! arguments: //! //! - `Struct`, when `Self` is a struct (including tuple structs, e.g //! `struct T(i32, char)`). //! - `EnumMatching`, when `Self` is an enum and all the arguments are the //! same variant of the enum (e.g. `Some(1)`, `Some(3)` and `Some(4)`) //! - `EnumNonMatchingCollapsed` when `Self` is an enum and the arguments //! are not the same variant (e.g. `None`, `Some(1)` and `None`). //! - `StaticEnum` and `StaticStruct` for static methods, where the type //! being derived upon is either an enum or struct respectively. (Any //! argument with type Self is just grouped among the non-self //! arguments.) //! //! In the first two cases, the values from the corresponding fields in //! all the arguments are grouped together. For `EnumNonMatchingCollapsed` //! this isn't possible (different variants have different fields), so the //! fields are inaccessible. (Previous versions of the deriving infrastructure //! had a way to expand into code that could access them, at the cost of //! generating exponential amounts of code; see issue #15375). There are no //! fields with values in the static cases, so these are treated entirely //! differently. //! //! The non-static cases have `Option<ident>` in several places associated //! with field `expr`s. This represents the name of the field it is //! associated with. It is only not `None` when the associated field has //! an identifier in the source code. For example, the `x`s in the //! following snippet //! //! ```rust //! struct A { x : i32 } //! //! struct B(i32); //! //! enum C { //! C0(i32), //! C1 { x: i32 } //! } //! ``` //! //! The `i32`s in `B` and `C0` don't have an identifier, so the //! `Option<ident>`s would be `None` for them. //! //! In the static cases, the structure is summarised, either into the just //! spans of the fields or a list of spans and the field idents (for tuple //! structs and record structs, respectively), or a list of these, for //! enums (one for each variant). For empty struct and empty enum //! variants, it is represented as a count of 0. //! //! # "`cs`" functions //! //! The `cs_...` functions ("combine substructure) are designed to //! make life easier by providing some pre-made recipes for common //! threads; mostly calling the function being derived on all the //! arguments and then combining them back together in some way (or //! letting the user chose that). They are not meant to be the only //! way to handle the structures that this code creates. //! //! # Examples //! //! The following simplified `PartialEq` is used for in-code examples: //! //! ```rust //! trait PartialEq { //! fn eq(&self, other: &Self); //! } //! impl PartialEq for i32 { //! fn eq(&self, other: &i32) -> bool { //! *self == *other //! } //! } //! ``` //! //! Some examples of the values of `SubstructureFields` follow, using the //! above `PartialEq`, `A`, `B` and `C`. //! //! ## Structs //! //! When generating the `expr` for the `A` impl, the `SubstructureFields` is //! //! ```{.text} //! Struct(vec![FieldInfo { //! span: <span of x> //! name: Some(<ident of x>), //! self_: <expr for &self.x>, //! other: vec![<expr for &other.x] //! }]) //! ``` //! //! For the `B` impl, called with `B(a)` and `B(b)`, //! //! ```{.text} //! Struct(vec![FieldInfo { //! span: <span of `i32`>, //! name: None, //! self_: <expr for &a> //! other: vec![<expr for &b>] //! }]) //! ``` //! //! ## Enums //! //! When generating the `expr` for a call with `self == C0(a)` and `other //! == C0(b)`, the SubstructureFields is //! //! ```{.text} //! EnumMatching(0, <ast::Variant for C0>, //! vec![FieldInfo { //! span: <span of i32> //! name: None, //! self_: <expr for &a>, //! other: vec![<expr for &b>] //! }]) //! ``` //! //! For `C1 {x}` and `C1 {x}`, //! //! ```{.text} //! EnumMatching(1, <ast::Variant for C1>, //! vec![FieldInfo { //! span: <span of x> //! name: Some(<ident of x>), //! self_: <expr for &self.x>, //! other: vec![<expr for &other.x>] //! }]) //! ``` //! //! For `C0(a)` and `C1 {x}` , //! //! ```{.text} //! EnumNonMatchingCollapsed( //! vec![<ident of self>, <ident of __arg_1>], //! &[<ast::Variant for C0>, <ast::Variant for C1>], //! &[<ident for self index value>, <ident of __arg_1 index value>]) //! ``` //! //! It is the same for when the arguments are flipped to `C1 {x}` and //! `C0(a)`; the only difference is what the values of the identifiers //! <ident for self index value> and <ident of __arg_1 index value> will //! be in the generated code. //! //! `EnumNonMatchingCollapsed` deliberately provides far less information //! than is generally available for a given pair of variants; see #15375 //! for discussion. //! //! ## Static //! //! A static method on the types above would result in, //! //! ```{.text} //! StaticStruct(<ast::StructDef of A>, Named(vec![(<ident of x>, <span of x>)])) //! //! StaticStruct(<ast::StructDef of B>, Unnamed(vec![<span of x>])) //! //! StaticEnum(<ast::EnumDef of C>, //! vec![(<ident of C0>, <span of C0>, Unnamed(vec![<span of i32>])), //! (<ident of C1>, <span of C1>, Named(vec![(<ident of x>, <span of x>)]))]) //! ``` pub use self::StaticFields::*; pub use self::SubstructureFields::*; use self::StructType::*; use std::cell::RefCell; use std::vec; use abi::Abi; use abi; use ast; use ast::{EnumDef, Expr, Ident, Generics, StructDef}; use ast_util; use attr; use attr::AttrMetaMethods; use ext::base::ExtCtxt; use ext::build::AstBuilder; use codemap::{self, DUMMY_SP}; use codemap::Span; use diagnostic::SpanHandler; use fold::MoveMap; use owned_slice::OwnedSlice; use parse::token::InternedString; use parse::token::special_idents; use ptr::P; use self::ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty}; pub mod ty; pub struct TraitDef<'a> { /// The span for the current #[derive(Foo)] header. pub span: Span, pub attributes: Vec<ast::Attribute>, /// Path of the trait, including any type parameters pub path: Path<'a>, /// Additional bounds required of any type parameters of the type, /// other than the current trait pub additional_bounds: Vec<Ty<'a>>, /// Any extra lifetimes and/or bounds, e.g. `D: serialize::Decoder` pub generics: LifetimeBounds<'a>, pub methods: Vec<MethodDef<'a>>, pub associated_types: Vec<(ast::Ident, Ty<'a>)>, } pub struct MethodDef<'a> { /// name of the method pub name: &'a str, /// List of generics, e.g. `R: rand::Rng` pub generics: LifetimeBounds<'a>, /// Whether there is a self argument (outer Option) i.e. whether /// this is a static function, and whether it is a pointer (inner /// Option) pub explicit_self: Option<Option<PtrTy<'a>>>, /// Arguments other than the self argument pub args: Vec<Ty<'a>>, /// Return type pub ret_ty: Ty<'a>, pub attributes: Vec<ast::Attribute>, pub combine_substructure: RefCell<CombineSubstructureFunc<'a>>, } /// All the data about the data structure/method being derived upon. pub struct Substructure<'a> { /// ident of self pub type_ident: Ident, /// ident of the method pub method_ident: Ident, /// dereferenced access to any `Self_` or `Ptr(Self_, _)` arguments pub self_args: &'a [P<Expr>], /// verbatim access to any other arguments pub nonself_args: &'a [P<Expr>], pub fields: &'a SubstructureFields<'a> } /// Summary of the relevant parts of a struct/enum field. pub struct FieldInfo<'a> { pub span: Span, /// None for tuple structs/normal enum variants, Some for normal /// structs/struct enum variants. pub name: Option<Ident>, /// The expression corresponding to this field of `self` /// (specifically, a reference to it). pub self_: P<Expr>, /// The expressions corresponding to references to this field in /// the other `Self` arguments. pub other: Vec<P<Expr>>, /// The attributes on the field pub attrs: &'a [ast::Attribute], } /// Fields for a static method pub enum StaticFields { /// Tuple structs/enum variants like this. Unnamed(Vec<Span>), /// Normal structs/struct variants. Named(Vec<(Ident, Span)>), } /// A summary of the possible sets of fields. pub enum SubstructureFields<'a> { Struct(Vec<FieldInfo<'a>>), /// Matching variants of the enum: variant index, ast::Variant, /// fields: the field name is only non-`None` in the case of a struct /// variant. EnumMatching(usize, &'a ast::Variant, Vec<FieldInfo<'a>>), /// Non-matching variants of the enum, but with all state hidden from /// the consequent code. The first component holds `Ident`s for all of /// the `Self` arguments; the second component is a slice of all of the /// variants for the enum itself, and the third component is a list of /// `Ident`s bound to the variant index values for each of the actual /// input `Self` arguments. EnumNonMatchingCollapsed(Vec<Ident>, &'a [P<ast::Variant>], &'a [Ident]), /// A static method where `Self` is a struct. StaticStruct(&'a ast::StructDef, StaticFields), /// A static method where `Self` is an enum. StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)>), } /// Combine the values of all the fields together. The last argument is /// all the fields of all the structures. pub type CombineSubstructureFunc<'a> = Box<FnMut(&mut ExtCtxt, Span, &Substructure) -> P<Expr> + 'a>; /// Deal with non-matching enum variants. The tuple is a list of /// identifiers (one for each `Self` argument, which could be any of the /// variants since they have been collapsed together) and the identifiers /// holding the variant index value for each of the `Self` arguments. The /// last argument is all the non-`Self` args of the method being derived. pub type EnumNonMatchCollapsedFunc<'a> = Box<FnMut(&mut ExtCtxt, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>; pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>) -> RefCell<CombineSubstructureFunc<'a>> { RefCell::new(f) } /// This method helps to extract all the type parameters referenced from a /// type. For a type parameter `<T>`, it looks for either a `TyPath` that /// is not global and starts with `T`, or a `TyQPath`. fn find_type_parameters(ty: &ast::Ty, ty_param_names: &[ast::Name]) -> Vec<P<ast::Ty>> { use visit; struct Visitor<'a> { ty_param_names: &'a [ast::Name], types: Vec<P<ast::Ty>>, } impl<'a> visit::Visitor<'a> for Visitor<'a> { fn visit_ty(&mut self, ty: &'a ast::Ty) { match ty.node { ast::TyPath(_, ref path) if !path.global => { match path.segments.first() { Some(segment) => { if self.ty_param_names.contains(&segment.identifier.name) { self.types.push(P(ty.clone())); } } None => {} } } _ => {} } visit::walk_ty(self, ty) } } let mut visitor = Visitor { ty_param_names: ty_param_names, types: Vec::new(), }; visit::Visitor::visit_ty(&mut visitor, ty); visitor.types } impl<'a> TraitDef<'a> { pub fn expand(&self, cx: &mut ExtCtxt, mitem: &ast::MetaItem, item: &'a ast::Item, push: &mut FnMut(P<ast::Item>)) { let newitem = match item.node { ast::ItemStruct(ref struct_def, ref generics) => { self.expand_struct_def(cx, &**struct_def, item.ident, generics) } ast::ItemEnum(ref enum_def, ref generics) => { self.expand_enum_def(cx, enum_def, &item.attrs[..], item.ident, generics) } _ => { cx.span_err(mitem.span, "`derive` may only be applied to structs and enums"); return; } }; // Keep the lint attributes of the previous item to control how the // generated implementations are linted let mut attrs = newitem.attrs.clone(); attrs.extend(item.attrs.iter().filter(|a| { match &a.name()[..] { "allow" | "warn" | "deny" | "forbid" => true, _ => false, } }).cloned()); push(P(ast::Item { attrs: attrs, ..(*newitem).clone() })) } /// Given that we are deriving a trait `DerivedTrait` for a type like: /// /// ```ignore /// struct Struct<'a, ..., 'z, A, B: DeclaredTrait, C, ..., Z> where C: WhereTrait { /// a: A, /// b: B::Item, /// b1: <B as DeclaredTrait>::Item, /// c1: <C as WhereTrait>::Item, /// c2: Option<<C as WhereTrait>::Item>, /// ... /// } /// ``` /// /// create an impl like: /// /// ```ignore /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where /// C: WhereTrait, /// A: DerivedTrait + B1 + ... + BN, /// B: DerivedTrait + B1 + ... + BN, /// C: DerivedTrait + B1 + ... + BN, /// B::Item: DerivedTrait + B1 + ... + BN, /// <C as WhereTrait>::Item: DerivedTrait + B1 + ... + BN, /// ... /// { /// ... /// } /// ``` /// /// where B1, ..., BN are the bounds given by `bounds_paths`.'. Z is a phantom type, and /// therefore does not get bound by the derived trait. fn
(&self, cx: &mut ExtCtxt, type_ident: Ident, generics: &Generics, field_tys: Vec<P<ast::Ty>>, methods: Vec<P<ast::ImplItem>>) -> P<ast::Item> { let trait_path = self.path.to_path(cx, self.span, type_ident, generics); // Transform associated types from `deriving::ty::Ty` into `ast::ImplItem` let associated_types = self.associated_types.iter().map(|&(ident, ref type_def)| { P(ast::ImplItem { id: ast::DUMMY_NODE_ID, span: self.span, ident: ident, vis: ast::Inherited, attrs: Vec::new(), node: ast::TypeImplItem(type_def.to_ty(cx, self.span, type_ident, generics )), }) }); let Generics { mut lifetimes, ty_params, mut where_clause } = self.generics.to_generics(cx, self.span, type_ident, generics); let mut ty_params = ty_params.into_vec(); // Copy the lifetimes lifetimes.extend(generics.lifetimes.iter().cloned()); // Create the type parameters. ty_params.extend(generics.ty_params.iter().map(|ty_param| { // I don't think this can be moved out of the loop, since // a TyParamBound requires an ast id let mut bounds: Vec<_> = // extra restrictions on the generics parameters to the type being derived upon self.additional_bounds.iter().map(|p| { cx.typarambound(p.to_path(cx, self.span, type_ident, generics)) }).collect(); // require the current trait bounds.push(cx.typarambound(trait_path.clone())); // also add in any bounds from the declaration for declared_bound in &*ty_param.bounds { bounds.push((*declared_bound).clone()); } cx.typaram(self.span, ty_param.ident, OwnedSlice::from_vec(bounds), None) })); // and similarly for where clauses where_clause.predicates.extend(generics.where_clause.predicates.iter().map(|clause| { match *clause { ast::WherePredicate::BoundPredicate(ref wb) => { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { span: self.span, bound_lifetimes: wb.bound_lifetimes.clone(), bounded_ty: wb.bounded_ty.clone(), bounds: OwnedSlice::from_vec(wb.bounds.iter().cloned().collect()) }) } ast::WherePredicate::RegionPredicate(ref rb) => { ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { span: self.span, lifetime: rb.lifetime, bounds: rb.bounds.iter().cloned().collect() }) } ast::WherePredicate::EqPredicate(ref we) => { ast::WherePredicate::EqPredicate(ast::WhereEqPredicate { id: ast::DUMMY_NODE_ID, span: self.span, path: we.path.clone(), ty: we.ty.clone() }) } } })); if !ty_params.is_empty() { let ty_param_names: Vec<ast::Name> = ty_params.iter() .map(|ty_param| ty_param.ident.name) .collect(); for field_ty in field_tys.into_iter() { let tys = find_type_parameters(&*field_ty, &ty_param_names); for ty in tys.into_iter() { let mut bounds: Vec<_> = self.additional_bounds.iter().map(|p| { cx.typarambound(p.to_path(cx, self.span, type_ident, generics)) }).collect(); // require the current trait bounds.push(cx.typarambound(trait_path.clone())); let predicate = ast::WhereBoundPredicate { span: self.span, bound_lifetimes: vec![], bounded_ty: ty, bounds: OwnedSlice::from_vec(bounds), }; let predicate = ast::WherePredicate::BoundPredicate(predicate); where_clause.predicates.push(predicate); } } } let trait_generics = Generics { lifetimes: lifetimes, ty_params: OwnedSlice::from_vec(ty_params), where_clause: where_clause }; // Create the reference to the trait. let trait_ref = cx.trait_ref(trait_path); // Create the type parameters on the `self` path. let self_ty_params = generics.ty_params.map(|ty_param| { cx.ty_ident(self.span, ty_param.ident) }); let self_lifetimes: Vec<ast::Lifetime> = generics.lifetimes .iter() .map(|ld| ld.lifetime) .collect(); // Create the type of `self`. let self_type = cx.ty_path( cx.path_all(self.span, false, vec!( type_ident ), self_lifetimes, self_ty_params.into_vec(), Vec::new())); let attr = cx.attribute( self.span, cx.meta_word(self.span, InternedString::new("automatically_derived"))); // Just mark it now since we know that it'll end up used downstream attr::mark_used(&attr); let opt_trait_ref = Some(trait_ref); let ident = ast_util::impl_pretty_name(&opt_trait_ref, Some(&*self_type)); let mut a = vec![attr]; a.extend(self.attributes.iter().cloned()); cx.item( self.span, ident, a, ast::ItemImpl(ast::Unsafety::Normal, ast::ImplPolarity::Positive, trait_generics, opt_trait_ref, self_type, methods.into_iter().chain(associated_types).collect())) } fn expand_struct_def(&self, cx: &mut ExtCtxt, struct_def: &'a StructDef, type_ident: Ident, generics: &Generics) -> P<ast::Item> { let field_tys: Vec<P<ast::Ty>> = struct_def.fields.iter() .map(|field| field.node.ty.clone()) .collect(); let methods = self.methods.iter().map(|method_def| { let (explicit_self, self_args, nonself_args, tys) = method_def.split_self_nonself_args( cx, self, type_ident, generics); let body = if method_def.is_static() { method_def.expand_static_struct_method_body( cx, self, struct_def, type_ident, &self_args[..], &nonself_args[..]) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, &self_args[..], &nonself_args[..]) }; method_def.create_method(cx, self, type_ident, generics, abi::Rust, explicit_self, tys, body) }).collect(); self.create_derived_impl(cx, type_ident, generics, field_tys, methods) } fn expand_enum_def(&self, cx: &mut ExtCtxt, enum_def: &'a EnumDef, type_attrs: &[ast::Attribute], type_ident: Ident, generics: &Generics) -> P<ast::Item> { let mut field_tys = Vec::new(); for variant in enum_def.variants.iter() { match variant.node.kind { ast::VariantKind::TupleVariantKind(ref args) => { field_tys.extend(args.iter() .map(|arg| arg.ty.clone())); } ast::VariantKind::StructVariantKind(ref args) => { field_tys.extend(args.fields.iter() .map(|field| field.node.ty.clone())); } } } let methods = self.methods.iter().map(|method_def| { let (explicit_self, self_args, nonself_args, tys) = method_def.split_self_nonself_args(cx, self, type_ident, generics); let body = if method_def.is_static() { method_def.expand_static_enum_method_body( cx, self, enum_def, type_ident, &self_args[..], &nonself_args[..]) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_attrs, type_ident, self_args, &nonself_args[..]) }; method_def.create_method(cx, self, type_ident, generics, abi::Rust, explicit_self, tys, body) }).collect(); self.create_derived_impl(cx, type_ident, generics, field_tys, methods) } } fn find_repr_type_name(diagnostic: &SpanHandler, type_attrs: &[ast::Attribute]) -> &'static str { let mut repr_type_name = "i32"; for a in type_attrs { for r in &attr::find_repr_attrs(diagnostic, a) { repr_type_name = match *r { attr::ReprAny | attr::ReprPacked => continue, attr::ReprExtern => "i32", attr::ReprInt(_, attr::SignedInt(ast::TyIs)) => "isize", attr::ReprInt(_, attr::SignedInt(ast::TyI8)) => "i8", attr::ReprInt(_, attr::SignedInt(ast::TyI16)) => "i16", attr::ReprInt(_, attr::SignedInt(ast::TyI32)) => "i32", attr::ReprInt(_, attr::SignedInt(ast::TyI64)) => "i64", attr::ReprInt(_, attr::UnsignedInt(ast::TyUs)) => "usize", attr::ReprInt(_, attr::UnsignedInt(ast::TyU8)) => "u8", attr::ReprInt(_, attr::UnsignedInt(ast::TyU16)) => "u16", attr::ReprInt(_, attr::UnsignedInt(ast::TyU32)) => "u32", attr::ReprInt(_, attr::UnsignedInt(ast::TyU64)) => "u64", } } } repr_type_name } impl<'a> MethodDef<'a> { fn call_substructure_method(&self, cx: &mut ExtCtxt, trait_: &TraitDef, type_ident: Ident, self_args: &[P<Expr>], nonself_args: &[P<Expr>], fields: &SubstructureFields) -> P<Expr> { let substructure = Substructure { type_ident: type_ident, method_ident: cx.ident_of(self.name), self_args: self_args, nonself_args: nonself_args, fields: fields }; let mut f = self.combine_substructure.borrow_mut(); let f: &mut CombineSubstructureFunc = &mut *f; f(cx, trait_.span, &substructure) } fn get_ret_ty(&self, cx: &mut ExtCtxt, trait_: &TraitDef, generics: &Generics, type_ident: Ident) -> P<ast::Ty> { self.ret_ty.to_ty(cx, trait_.span, type_ident, generics) } fn is_static(&self) -> bool { self.explicit_self.is_none() } fn split_self_nonself_args(&self, cx: &mut ExtCtxt, trait_: &TraitDef, type_ident: Ident, generics: &Generics) -> (ast::ExplicitSelf, Vec<P<Expr>>, Vec<P<Expr>>, Vec<(Ident, P<ast::Ty>)>) { let mut self_args = Vec::new(); let mut nonself_args = Vec::new(); let mut arg_tys = Vec::new(); let mut nonstatic = false; let ast_explicit_self = match self.explicit_self { Some(ref self_ptr) => { let (self_expr, explicit_self) = ty::get_explicit_self(cx, trait_.span, self_ptr); self_args.push(self_expr); nonstatic = true; explicit_self } None => codemap::respan(trait_.span, ast::SelfStatic), }; for (i, ty) in self.args.iter().enumerate() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); let ident = cx.ident_of(&format!("__arg_{}", i)); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); match *ty { // for static methods, just treat any Self // arguments as a normal arg Self_ if nonstatic => { self_args.push(arg_expr); } Ptr(ref ty, _) if **ty == Self_ && nonstatic => { self_args.push(cx.expr_deref(trait_.span, arg_expr)) } _ => { nonself_args.push(arg_expr); } } } (ast_explicit_self, self_args, nonself_args, arg_tys) } fn create_method(&self, cx: &mut ExtCtxt, trait_: &TraitDef, type_ident: Ident, generics: &Generics, abi: Abi, explicit_self: ast::ExplicitSelf, arg_types: Vec<(Ident, P<ast::Ty>)> , body: P<Expr>) -> P<ast::ImplItem> { // create the generics that aren't for Self let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics); let self_arg = match explicit_self.node { ast::SelfStatic => None, // creating fresh self id _ => Some(ast::Arg::new_self(trait_.span, ast::MutImmutable, special_idents::self_)) }; let args = { let args = arg_types.into_iter().map(|(name, ty)| { cx.arg(trait_.span, name, ty) }); self_arg.into_iter().chain(args).collect() }; let ret_type = self.get_ret_ty(cx, trait_, generics, type_ident); let method_ident = cx.ident_of(self.name); let fn_decl = cx.fn_decl(args, ret_type); let body_block = cx.block_expr(body); // Create the method. P(ast::ImplItem { id: ast::DUMMY_NODE_ID, attrs: self.attributes.clone(), span: trait_.span, vis: ast::Inherited, ident: method_ident, node: ast::MethodImplItem(ast::MethodSig { generics: fn_generics, abi: abi, explicit_self: explicit_self, unsafety: ast::Unsafety::Normal, decl: fn_decl }, body_block) }) } /// ``` /// #[derive(PartialEq)] /// struct A { x: i32, y: i32 } /// /// // equivalent to: /// impl PartialEq for A { /// fn eq(&self, __arg_1: &A) -> bool { /// match *self { /// A {x: ref __self_0_0, y: ref __self_0_1} => { /// match *__arg_1 { /// A {x: ref __self_1_0, y: ref __self_1_1} => { /// __self_0_0.eq(__self_1_0) && __self_0_1.eq(__self_1_1) /// } /// } /// } /// } /// } /// } /// ``` fn expand_struct_method_body<'b>(&self, cx: &mut ExtCtxt, trait_: &TraitDef<'b>, struct_def: &'b StructDef, type_ident: Ident, self_args: &[P<Expr>], nonself_args: &[P<Expr>]) -> P<Expr> { let mut raw_fields = Vec::new(); // Vec<[fields of self], // [fields of next Self arg], [etc]> let mut patterns = Vec::new(); for i in 0..self_args.len() { let struct_path= cx.path(DUMMY_SP, vec!( type_ident )); let (pat, ident_expr) = trait_.create_struct_pattern(cx, struct_path, struct_def, &format!("__self_{}", i), ast::MutImmutable); patterns.push(pat); raw_fields.push(ident_expr); } // transpose raw_fields let fields = if !raw_fields.is_empty() { let mut raw_fields = raw_fields.into_iter().map(|v| v.into_iter()); let first_field = raw_fields.next().unwrap(); let mut other_fields: Vec<vec::IntoIter<_>> = raw_fields.collect(); first_field.map(|(span, opt_id, field, attrs)| { FieldInfo { span: span, name: opt_id, self_: field, other: other_fields.iter_mut().map(|l| { match l.next().unwrap() { (_, _, ex, _) => ex } }).collect(), attrs: attrs, } }).collect() } else { cx.span_bug(trait_.span, "no self arguments to non-static method in generic \ `derive`") }; // body of the inner most destructuring match let mut body = self.call_substructure_method( cx, trait_, type_ident, self_args, nonself_args, &Struct(fields)); // make a series of nested matches, to destructure the // structs. This is actually right-to-left, but it shouldn't // matter. for (arg_expr, pat) in self_args.iter().zip(patterns.iter()) { body = cx.expr_match(trait_.span, arg_expr.clone(), vec!( cx.arm(trait_.span, vec!(pat.clone()), body) )) } body } fn expand_static_struct_method_body(&self, cx: &mut ExtCtxt, trait_: &TraitDef, struct_def: &StructDef, type_ident: Ident, self_args: &[P<Expr>], nonself_args: &[P<Expr>]) -> P<Expr> { let summary = trait_.summarise_struct(cx, struct_def); self.call_substructure_method(cx, trait_, type_ident, self_args, nonself_args, &StaticStruct(struct_def, summary)) } /// ``` /// #[derive(PartialEq)] /// enum A { /// A1, /// A2(i32) /// } /// /// // is equivalent to /// /// impl PartialEq for A { /// fn eq(&self, __arg_1: &A) -> ::bool { /// match (&*self, &*__arg_1) { /// (&A1, &A1) => true, /// (&A2(ref __self_0), /// &A2(ref __arg_1_0)) => (*__self_0).eq(&(*__arg_1_0)), /// _ => { /// let __self_vi = match *self { A1(..) => 0, A2(..) => 1 }; /// let __arg_1_vi = match *__arg_1 { A1(..) => 0, A2(..) => 1 }; /// false /// } /// } /// } /// } /// ``` /// /// (Of course `__self_vi` and `__arg_1_vi` are unused for /// `PartialEq`, and those subcomputations will hopefully be removed /// as their results are unused. The point of `__self_vi` and /// `__arg_1_vi` is for `PartialOrd`; see #15503.) fn expand_enum_method_body<'b>(&self, cx: &mut ExtCtxt, trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_attrs: &[ast::Attribute], type_ident: Ident, self_args: Vec<P<Expr>>, nonself_args: &[P<Expr>]) -> P<Expr> { self.build_enum_match_tuple( cx, trait_, enum_def, type_attrs, type_ident, self_args, nonself_args) } /// Creates a match for a tuple of all `self_args`, where either all /// variants match, or it falls into a catch-all for when one variant /// does not match. /// There are N + 1 cases because is a case for each of the N /// variants where all of the variants match, and one catch-all for /// when one does not match. /// The catch-all handler is provided access the variant index values /// for each of the self-args, carried in precomputed variables. (Nota /// bene: the variant index values are not necessarily the /// discriminant values. See issue #15523.) /// ```{.text} /// match (this, that, ...) { /// (Variant1, Variant1, Variant1) => ... // delegate Matching on Variant1 /// (Variant2, Variant2, Variant2) => ... // delegate Matching on Variant2 /// ... /// _ => { /// let __this_vi = match this { Variant1 => 0, Variant2 => 1, ... }; /// let __that_vi = match that { Variant1 => 0, Variant2 => 1, ... }; /// ... // catch-all remainder can inspect above variant index values. /// } /// } /// ``` fn build_enum_match_tuple<'b>( &self, cx: &mut ExtCtxt, trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_attrs: &[ast::Attribute], type_ident: Ident, self_args: Vec<P<Expr>>, nonself_args: &[P<Expr>]) -> P<Expr> { let sp = trait_.span; let variants = &enum_def.variants; let self_arg_names = self_args.iter().enumerate() .map(|(arg_count, _self_arg)| { if arg_count == 0 { "__self".to_string() } else { format!("__arg_{}", arg_count) } }) .collect::<Vec<String>>(); let self_arg_idents = self_arg_names.iter() .map(|name|cx.ident_of(&name[..])) .collect::<Vec<ast::Ident>>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to an int // value corresponding to its discriminant. let vi_idents: Vec<ast::Ident> = self_arg_names.iter() .map(|name| { let vi_suffix = format!("{}_vi", &name[..]); cx.ident_of(&vi_suffix[..]) }) .collect::<Vec<ast::Ident>>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( self_arg_idents, &variants[..], &vi_idents[..]); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 // (Variant2, Variant2, ...) => Body2 // ... // where each tuple has length = self_args.len() let mut match_arms: Vec<ast::Arm> = variants.iter().enumerate() .map(|(index, variant)| { let mk_self_pat = |cx: &mut ExtCtxt, self_arg_name: &str| { let (p, idents) = trait_.create_enum_variant_pattern(cx, type_ident, &**variant, self_arg_name, ast::MutImmutable); (cx.pat(sp, ast::PatRegion(p, ast::MutImmutable)), idents) }; // A single arm has form (&VariantK, &VariantK, ...) => BodyK // (see "Final wrinkle" note below for why.) let mut subpats = Vec::with_capacity(self_arg_names.len()); let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); let first_self_pat_idents = { let (p, idents) = mk_self_pat(cx, &self_arg_names[0]); subpats.push(p); idents }; for self_arg_name in &self_arg_names[1..] { let (p, idents) = mk_self_pat(cx, &self_arg_name[..]); subpats.push(p); self_pats_idents.push(idents); } // Here is the pat = `(&VariantK, &VariantK, ...)` let single_pat = cx.pat_tuple(sp, subpats); // For the BodyK, we need to delegate to our caller, // passing it an EnumMatching to indicate which case // we are in. // All of the Self args have the same variant in these // cases. So we transpose the info in self_pats_idents // to gather the getter expressions together, in the // form that EnumMatching expects. // The transposition is driven by walking across the // arg fields of the variant for the first self pat. let field_tuples = first_self_pat_idents.into_iter().enumerate() // For each arg field of self, pull out its getter expr ... .map(|(field_index, (sp, opt_ident, self_getter_expr, attrs))| { // ... but FieldInfo also wants getter expr // for matching other arguments of Self type; // so walk across the *other* self_pats_idents // and pull out getter for same field in each // of them (using `field_index` tracked above). // That is the heart of the transposition. let others = self_pats_idents.iter().map(|fields| { let (_, _opt_ident, ref other_getter_expr, _) = fields[field_index]; // All Self args have same variant, so // opt_idents are the same. (Assert // here to make it self-evident that // it is okay to ignore `_opt_ident`.) assert!(opt_ident == _opt_ident); other_getter_expr.clone() }).collect::<Vec<P<Expr>>>(); FieldInfo { span: sp, name: opt_ident, self_: self_getter_expr, other: others, attrs: attrs, } }).collect::<Vec<FieldInfo>>(); // Now, for some given VariantK, we have built up // expressions for referencing every field of every // Self arg, assuming all are instances of VariantK. // Build up code associated with such a case. let substructure = EnumMatching(index, &**variant, field_tuples); let arm_expr = self.call_substructure_method( cx, trait_, type_ident, &self_args[..], nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) }).collect(); // We will usually need the catch-all after matching the // tuples `(VariantK, VariantK, ...)` for each VariantK of the // enum. But: // // * when there is only one Self arg, the arms above suffice // (and the deriving we call back into may not be prepared to // handle EnumNonMatchCollapsed), and, // // * when the enum has only one variant, the single arm that // is already present always suffices. // // * In either of the two cases above, if we *did* add a // catch-all `_` match, it would trigger the // unreachable-pattern error. // if variants.len() > 1 && self_args.len() > 1 { // Build a series of let statements mapping each self_arg // to its discriminant value. If this is a C-style enum // with a specific repr type, then casts the values to // that type. Otherwise casts to `i32` (the default repr // type). // // i.e. for `enum E<T> { A, B(1), C(T, T) }`, and a deriving // with three Self args, builds three statements: // // ``` // let __self0_vi = unsafe { // std::intrinsics::discriminant_value(&self) } as i32; // let __self1_vi = unsafe { // std::intrinsics::discriminant_value(&__arg1) } as i32; // let __self2_vi = unsafe { // std::intrinsics::discriminant_value(&__arg2) } as i32; // ``` let mut index_let_stmts: Vec<P<ast::Stmt>> = Vec::new(); let target_type_name = find_repr_type_name(&cx.parse_sess.span_diagnostic, type_attrs); for (&ident, self_arg) in vi_idents.iter().zip(self_args.iter()) { let path = vec![cx.ident_of_std("core"), cx.ident_of("intrinsics"), cx.ident_of("discriminant_value")]; let call = cx.expr_call_global( sp, path, vec![cx.expr_addr_of(sp, self_arg.clone())]); let variant_value = cx.expr_block(P(ast::Block { stmts: vec![], expr: Some(call), id: ast::DUMMY_NODE_ID, rules: ast::UnsafeBlock(ast::CompilerGenerated), span: sp })); let target_ty = cx.ty_ident(sp, cx.ident_of(target_type_name)); let variant_disr = cx.expr_cast(sp, variant_value, target_ty); let let_stmt = cx.stmt_let(sp, false, ident, variant_disr); index_let_stmts.push(let_stmt); } let arm_expr = self.call_substructure_method( cx, trait_, type_ident, &self_args[..], nonself_args, &catch_all_substructure); // Builds the expression: // { // let __self0_vi = ...; // let __self1_vi = ...; // ... // <delegated expression referring to __self0_vi, et al.> // } let arm_expr = cx.expr_block( cx.block_all(sp, index_let_stmts, Some(arm_expr))); // Builds arm: // _ => { let __self0_vi = ...; // let __self1_vi = ...; // ... // <delegated expression as above> } let catch_all_match_arm = cx.arm(sp, vec![cx.pat_wild(sp)], arm_expr); match_arms.push(catch_all_match_arm); } else if variants.is_empty() { // As an additional wrinkle, For a zero-variant enum A, // currently the compiler // will accept `fn (a: &Self) { match *a { } }` // but rejects `fn (a: &Self) { match (&*a,) { } }` // as well as `fn (a: &Self) { match ( *a,) { } }` // // This means that the strategy of building up a tuple of // all Self arguments fails when Self is a zero variant // enum: rustc rejects the expanded program, even though // the actual code tends to be impossible to execute (at // least safely), according to the type system. // // The most expedient fix for this is to just let the // code fall through to the catch-all. But even this is // error-prone, since the catch-all as defined above would // generate code like this: // // _ => { let __self0 = match *self { }; // let __self1 = match *__arg_0 { }; // <catch-all-expr> } // // Which is yields bindings for variables which type // inference cannot resolve to unique types. // // One option to the above might be to add explicit type // annotations. But the *only* reason to go down that path // would be to try to make the expanded output consistent // with the case when the number of enum variants >= 1. // // That just isn't worth it. In fact, trying to generate // sensible code for *any* deriving on a zero-variant enum // does not make sense. But at the same time, for now, we // do not want to cause a compile failure just because the // user happened to attach a deriving to their // zero-variant enum. // // Instead, just generate a failing expression for the // zero variant case, skipping matches and also skipping // delegating back to the end user code entirely. // // (See also #4499 and #12609; note that some of the // discussions there influence what choice we make here; // e.g. if we feature-gate `match x { ... }` when x refers // to an uninhabited type (e.g. a zero-variant enum or a // type holding such an enum), but do not feature-gate // zero-variant enums themselves, then attempting to // derive Debug on such a type could here generate code // that needs the feature gate enabled.) return cx.expr_unreachable(sp); } // Final wrinkle: the self_args are expressions that deref // down to desired l-values, but we cannot actually deref // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. let borrowed_self_args = self_args.move_map(|self_arg| cx.expr_addr_of(sp, self_arg)); let match_arg = cx.expr(sp, ast::ExprTup(borrowed_self_args)); cx.expr_match(sp, match_arg, match_arms) } fn expand_static_enum_method_body(&self, cx: &mut ExtCtxt, trait_: &TraitDef, enum_def: &EnumDef, type_ident: Ident, self_args: &[P<Expr>], nonself_args: &[P<Expr>]) -> P<Expr> { let summary = enum_def.variants.iter().map(|v| { let ident = v.node.name; let summary = match v.node.kind { ast::TupleVariantKind(ref args) => { Unnamed(args.iter().map(|va| trait_.set_expn_info(cx, va.ty.span)).collect()) } ast::StructVariantKind(ref struct_def) => { trait_.summarise_struct(cx, &**struct_def) } }; (ident, v.span, summary) }).collect(); self.call_substructure_method(cx, trait_, type_ident, self_args, nonself_args, &StaticEnum(enum_def, summary)) } } #[derive(PartialEq)] // dogfooding! enum StructType { Unknown, Record, Tuple } // general helper methods. impl<'a> TraitDef<'a> { fn set_expn_info(&self, cx: &mut ExtCtxt, mut to_set: Span) -> Span { let trait_name = match self.path.path.last() { None => cx.span_bug(self.span, "trait with empty path in generic `derive`"), Some(name) => *name }; to_set.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { call_site: to_set, callee: codemap::NameAndSpan { name: format!("derive({})", trait_name), format: codemap::MacroAttribute, span: Some(self.span), allow_internal_unstable: false, } }); to_set } fn summarise_struct(&self, cx: &mut ExtCtxt, struct_def: &StructDef) -> StaticFields { let mut named_idents = Vec::new(); let mut just_spans = Vec::new(); for field in struct_def.fields.iter(){ let sp = self.set_expn_info(cx, field.span); match field.node.kind { ast::NamedField(ident, _) => named_idents.push((ident, sp)), ast::UnnamedField(..) => just_spans.push(sp), } } match (just_spans.is_empty(), named_idents.is_empty()) { (false, false) => cx.span_bug(self.span, "a struct with named and unnamed \ fields in generic `derive`"), // named fields (_, false) => Named(named_idents), // tuple structs (includes empty structs) (_, _) => Unnamed(just_spans) } } fn create_subpatterns(&self, cx: &mut ExtCtxt, field_paths: Vec<ast::SpannedIdent> , mutbl: ast::Mutability) -> Vec<P<ast::Pat>> { field_paths.iter().map(|path| { cx.pat(path.span, ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None)) }).collect() } fn create_struct_pattern(&self, cx: &mut ExtCtxt, struct_path: ast::Path, struct_def: &'a StructDef, prefix: &str, mutbl: ast::Mutability) -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) { if struct_def.fields.is_empty() { return (cx.pat_enum(self.span, struct_path, vec![]), vec![]); } let mut paths = Vec::new(); let mut ident_expr = Vec::new(); let mut struct_type = Unknown; for (i, struct_field) in struct_def.fields.iter().enumerate() { let sp = self.set_expn_info(cx, struct_field.span); let opt_id = match struct_field.node.kind { ast::NamedField(ident, _) if (struct_type == Unknown || struct_type == Record) => { struct_type = Record; Some(ident) } ast::UnnamedField(..) if (struct_type == Unknown || struct_type == Tuple) => { struct_type = Tuple; None } _ => { cx.span_bug(sp, "a struct with named and unnamed fields in `derive`"); } }; let ident = cx.ident_of(&format!("{}_{}", prefix, i)); paths.push(codemap::Spanned{span: sp, node: ident}); let val = cx.expr( sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident))))); ident_expr.push((sp, opt_id, val, &struct_field.node.attrs[..])); } let subpats = self.create_subpatterns(cx, paths, mutbl); // struct_type is definitely not Unknown, since struct_def.fields // must be nonempty to reach here let pattern = if struct_type == Record { let field_pats = subpats.into_iter().zip(ident_expr.iter()) .map(|(pat, &(_, id, _, _))| { // id is guaranteed to be Some codemap::Spanned { span: pat.span, node: ast::FieldPat { ident: id.unwrap(), pat: pat, is_shorthand: false }, } }).collect(); cx.pat_struct(self.span, struct_path, field_pats) } else { cx.pat_enum(self.span, struct_path, subpats) }; (pattern, ident_expr) } fn create_enum_variant_pattern(&self, cx: &mut ExtCtxt, enum_ident: ast::Ident, variant: &'a ast::Variant, prefix: &str, mutbl: ast::Mutability) -> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) { let variant_ident = variant.node.name; let variant_path = cx.path(variant.span, vec![enum_ident, variant_ident]); match variant.node.kind { ast::TupleVariantKind(ref variant_args) => { if variant_args.is_empty() { return (cx.pat_enum(variant.span, variant_path, vec![]), vec![]); } let mut paths = Vec::new(); let mut ident_expr: Vec<(_, _, _, &'a [ast::Attribute])> = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); let ident = cx.ident_of(&format!("{}_{}", prefix, i)); let path1 = codemap::Spanned{span: sp, node: ident}; paths.push(path1); let expr_path = cx.expr_path(cx.path_ident(sp, ident)); let val = cx.expr(sp, ast::ExprParen(cx.expr_deref(sp, expr_path))); ident_expr.push((sp, None, val, &[])); } let subpats = self.create_subpatterns(cx, paths, mutbl); (cx.pat_enum(variant.span, variant_path, subpats), ident_expr) } ast::StructVariantKind(ref struct_def) => { self.create_struct_pattern(cx, variant_path, &**struct_def, prefix, mutbl) } } } } /* helpful premade recipes */ /// Fold the fields. `use_foldl` controls whether this is done /// left-to-right (`true`) or right-to-left (`false`). pub fn cs_fold<F>(use_foldl: bool, mut f: F, base: P<Expr>, mut enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) -> P<Expr> where F: FnMut(&mut ExtCtxt, Span, P<Expr>, P<Expr>, &[P<Expr>]) -> P<Expr>, { match *substructure.fields { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { if use_foldl { all_fields.iter().fold(base, |old, field| { f(cx, field.span, old, field.self_.clone(), &field.other) }) } else { all_fields.iter().rev().fold(base, |old, field| { f(cx, field.span, old, field.self_.clone(), &field.other) }) } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => enum_nonmatch_f(cx, trait_span, (&all_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") } } } /// Call the method that is being derived on all the fields, and then /// process the collected results. i.e. /// /// ``` /// f(cx, span, vec![self_1.method(__arg_1_1, __arg_2_1), /// self_2.method(__arg_1_2, __arg_2_2)]) /// ``` #[inline] pub fn cs_same_method<F>(f: F, mut enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) -> P<Expr> where F: FnOnce(&mut ExtCtxt, Span, Vec<P<Expr>>) -> P<Expr>, { match *substructure.fields { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { // call self_n.method(other_1_n, other_2_n, ...) let called = all_fields.iter().map(|field| { cx.expr_method_call(field.span, field.self_.clone(), substructure.method_ident, field.other.iter() .map(|e| cx.expr_addr_of(field.span, e.clone())) .collect()) }).collect(); f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => enum_nonmatch_f(cx, trait_span, (&all_self_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") } } } /// Fold together the results of calling the derived method on all the /// fields. `use_foldl` controls whether this is done left-to-right /// (`true`) or right-to-left (`false`). #[inline] pub fn cs_same_method_fold<F>(use_foldl: bool, mut f: F, base: P<Expr>, enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) -> P<Expr> where F: FnMut(&mut ExtCtxt, Span, P<Expr>, P<Expr>) -> P<Expr>, { cs_same_method( |cx, span, vals| { if use_foldl { vals.into_iter().fold(base.clone(), |old, new| { f(cx, span, old, new) }) } else { vals.into_iter().rev().fold(base.clone(), |old, new| { f(cx, span, old, new) }) } }, enum_nonmatch_f, cx, trait_span, substructure) } /// Use a given binop to combine the result of calling the derived method /// on all the fields. #[inline] pub fn cs_binop(binop: ast::BinOp_, base: P<Expr>, enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) -> P<Expr> { cs_same_method_fold( true, // foldl is good enough |cx, span, old, new| { cx.expr_binary(span, binop, old, new) }, base, enum_nonmatch_f, cx, trait_span, substructure) } /// cs_binop with binop == or #[inline] pub fn cs_or(enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, span: Span, substructure: &Substructure) -> P<Expr> { cs_binop(ast::BiOr, cx.expr_bool(span, false), enum_nonmatch_f, cx, span, substructure) } /// cs_binop with binop == and #[inline] pub fn cs_and(enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, span: Span, substructure: &Substructure) -> P<Expr> { cs_binop(ast::BiAnd, cx.expr_bool(span, true), enum_nonmatch_f, cx, span, substructure) }
create_derived_impl
make_facebank.py
# make facebank import warnings warnings.filterwarnings("ignore") import os import torch from model import Backbone import argparse from pathlib import Path from torchvision import transforms as trans from PIL import Image import numpy as np def
(path_images,facebank_path, model, mtcnn, device , tta = True): # test_transform_ = trans.Compose([ trans.ToTensor(), trans.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5]) ]) # model.eval() embeddings = [] names = ['Unknown'] idx = 0 for path in path_images.iterdir(): if path.is_file(): continue else: idx += 1 print("idx {} : {}".format(idx,path)) embs = [] for file in path.iterdir(): # print(file) if not file.is_file(): continue else: try: # print("---------------------------") img = Image.open(file) print(" {}) {}".format(idx,file)) except: continue with torch.no_grad(): if tta: mirror = trans.functional.hflip(img) emb = model(test_transform_(img).to(device).unsqueeze(0)) emb_mirror = model(test_transform_(mirror).to(device).unsqueeze(0)) embs.append(l2_norm(emb + emb_mirror)) else: embs.append(model(test_transform_(img).to(device).unsqueeze(0))) if len(embs) == 0: continue embedding = torch.cat(embs).mean(0,keepdim=True) embeddings.append(embedding) names.append(path.name) embeddings = torch.cat(embeddings) names = np.array(names) torch.save(embeddings, facebank_path+'/facebank.pth') np.save(facebank_path + '/names', names) return embeddings, names if __name__ == '__main__': # 需要制作人脸库对应的 图片地址 path_images = "./images/" # 定义模型 device_ = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") model_ = Backbone(50, 1., "ir_se").to(device_) # 加载模型 if os.access("./model_ir_se50.pth",os.F_OK): model_.load_state_dict(torch.load("./model_ir_se50.pth")) model_.eval() facebank_path = "./facebank/" # 人脸库对应地址 targets, names = prepare_facebank(Path(path_images), facebank_path,model_, "" ,device_, tta = False) # 构建 人脸 底库
prepare_facebank
error.rs
use bitcoincore_rpc::{ bitcoin::{ consensus::encode::Error as BitcoinEncodeError, hashes::Error as HashesError, secp256k1::Error as Secp256k1Error, util::{address::Error as AddressError, key::Error as KeyError}, }, jsonrpc::{error::RpcError, Error as JsonRpcError}, Error as BitcoinError, }; use hex::FromHexError; use hyper::Error as HyperError; use serde_json::Error as SerdeJsonError; use std::io::ErrorKind as IoErrorKind; use thiserror::Error; use tokio::time::error::Elapsed; #[derive(Error, Debug)] pub enum ConversionError { #[error("FromHexError: {0}")] FromHexError(#[from] FromHexError), #[error("AddressError: {0}")] AddressError(#[from] AddressError), #[error("HashesError: {0}")] HashesError(#[from] HashesError), #[error("Invalid format")] InvalidFormat, #[error("Invalid payload")] InvalidPayload, #[error("Could not convert block hash")] BlockHashError, } #[derive(Error, Debug)] pub enum Error { #[error("BitcoinEncodeError: {0}")] BitcoinEncodeError(#[from] BitcoinEncodeError), #[error("BitcoinError: {0}")] BitcoinError(#[from] BitcoinError), #[error("ConversionError: {0}")] ConversionError(#[from] ConversionError), #[error("Error occurred in callback: {0}")] CallbackError(Box<dyn std::error::Error + Send + Sync>), #[error("Json error: {0}")] SerdeJsonError(#[from] SerdeJsonError), #[error("Secp256k1Error: {0}")] Secp256k1Error(#[from] Secp256k1Error), #[error("KeyError: {0}")] KeyError(#[from] KeyError), #[error("Timeout: {0}")] TimeElapsed(#[from] Elapsed), #[error("Could not confirm transaction")] ConfirmationError, #[error("Could not find block at height")] InvalidBitcoinHeight, #[error("Failed to sign transaction")] TransactionSigningError, #[error("Failed to parse transaction")] ParsingError, #[error("Failed to obtain public key")] MissingPublicKey, #[error("Failed to connect")] ConnectionRefused, #[error("Wallet not found")] WalletNotFound, #[error("Invalid Bitcoin network")] InvalidBitcoinNetwork, } impl Error { pub fn is_connection_refused(&self) -> bool { matches!(self, Self::BitcoinError(BitcoinError::JsonRpc(JsonRpcError::Hyper(HyperError::Io(err)))) if err.kind() == IoErrorKind::ConnectionRefused ) } pub fn is_connection_aborted(&self) -> bool { matches!(self, Self::BitcoinError(BitcoinError::JsonRpc(JsonRpcError::Hyper(HyperError::Io(err)))) if err.kind() == IoErrorKind::ConnectionAborted ) } pub fn is_json_decode_error(&self) -> bool { matches!( self, Self::BitcoinError(BitcoinError::JsonRpc(JsonRpcError::Json(_))) ) } pub fn is_wallet_error(&self) -> bool { matches!(self, Self::BitcoinError(BitcoinError::JsonRpc(JsonRpcError::Rpc(err))) if BitcoinRpcError::from(err.clone()) == BitcoinRpcError::RpcWalletError ) } pub fn is_wallet_not_found(&self) -> bool
pub fn is_invalid_parameter(&self) -> bool { matches!(self, Self::BitcoinError(BitcoinError::JsonRpc(JsonRpcError::Rpc(err))) if BitcoinRpcError::from(err.clone()) == BitcoinRpcError::RpcInvalidParameter ) } } #[derive(Debug, FromPrimitive, PartialEq, Eq)] pub enum BitcoinRpcError { /// Standard JSON-RPC 2.0 errors RpcInvalidRequest = -32600, RpcMethodNotFound = -32601, RpcInvalidParams = -32602, RpcInternalError = -32603, RpcParseError = -32700, /// General application defined errors RpcMiscError = -1, RpcTypeError = -3, RpcInvalidAddressOrKey = -5, RpcOutOfMemory = -7, RpcInvalidParameter = -8, RpcDatabaseError = -20, RpcDeserializationErrr = -22, RpcVerifyError = -25, RpcVerifyRejected = -26, RpcVerifyAlreadyInChain = -27, RpcInWarmup = -28, RpcMethodDeprecated = -32, /// Aliases for backward compatibility // RpcTransactionError = RpcVerifyError, // RpcTransactionRejected = RpcVerifyRejected, // RpcTransactionAlreadyInChain = RpcVerifyAlreadyInChain, /// P2P client errors RpcClientNotConnected = -9, RpcClientInInitialDownload = -10, RpcClientNodeAlreadyAdded = -23, RpcClientNodeNotAdded = -24, RpcClientNodeNotConnected = -29, RpcClientInvalidIpOrSubnet = -30, RpcClientP2PDisabled = -31, /// Chain errors RpcClientMempoolDisabled = -33, /// Wallet errors RpcWalletError = -4, RpcWalletInsufficientFunds = -6, RpcWalletInvalidLabelName = -11, RpcWalletKeypoolRanOut = -12, RpcWalletUnlockNeeded = -13, RpcWalletPassphraseIncorrect = -14, RpcWalletWrongEncState = -15, RpcWalletEncryptionFailed = -16, RpcWalletAlreadyUnlocked = -17, RpcWalletNotFound = -18, RpcWalletNotSpecified = -19, /// Backwards compatible aliases // RpcWalletInvalidAccountName = RpcWalletInvalidLabelName, /// Unused reserved codes. RpcForbiddenBySafeMode = -2, /// Unknown error code (not in spec). RpcUnknownError = 0, } impl From<RpcError> for BitcoinRpcError { fn from(err: RpcError) -> Self { match num::FromPrimitive::from_i32(err.code) { Some(err) => err, None => Self::RpcUnknownError, } } }
{ matches!(self, Error::BitcoinError(BitcoinError::JsonRpc(JsonRpcError::Rpc(err))) if BitcoinRpcError::from(err.clone()) == BitcoinRpcError::RpcWalletNotFound ) }
gulpfile.js
const gulp = require('gulp') const gulpRename = require('gulp-rename') const gulpClean = require('gulp-clean') const gulpReplace = require('gulp-replace') const relative = require('relative') const prettier = require('prettier') const resolve = require('resolve') const webpackStream = require('webpack-stream') const webpack = require('webpack') const gulpEslint = require('gulp-eslint') const deepEqual = require('deep-equal') const gulpStylelint = require('gulp-stylelint') const del = require('del') const path = require('path') const argv = require('yargs').argv const options = { 'target': argv.target || 'weapp' } const APP_TYPE = options.target const isWeapp = options.target === 'weapp' const isSwan = options.target === 'swan' const isAliapp = options.target === 'aliapp' const isQqapp = options.target === 'qqapp' const isTtapp = options.target === 'ttapp' let dependencies = null let prevDependencies = null let DEST = '' let API_ADAPTER = '' if (isWeapp) { DEST = 'distWeapp' } else if (isSwan) { DEST = 'distSwan' API_ADAPTER = 'swan.' } else if (isQqapp) { DEST = 'distQqapp' API_ADAPTER = 'qq.' } else if (isTtapp) { DEST = 'distTtapp' API_ADAPTER = 'tt.' } else { DEST = 'distAliapp' API_ADAPTER = 'my.' } // XML = 'src/**/*.{wxml,wxss}' const SRC = { STYLE: ['src/**/*.{css,scss}', '!src/adapters/**/*'], SCRIPT: ['src/**/*.js', '!src/adapters/**/*'], JSON: ['src/**/*.json', '!src/project*.json', '!src/adapters/**/*'], IMAGE: ['src/**/*.{png,jpg,jpeg,gif,svg}', '!src/adapters/**/*'], CONFIG: 'src/project*.json', XML: ['src/**/*.wxml', '!src/adapters/**/*'], ADAPTER: [`src/adapters/unique/${APP_TYPE}/**/*`, `src/adapters/common/${APP_TYPE}/**/*`] } function handleError (err) { console.log(err.toString()) process.exit(-1) } function imagemin() { return gulp .src(SRC.IMAGE, {since: gulp.lastRun(imagemin)}) .pipe(gulp.dest(DEST)) } function cleanDist() { return gulp.src(DEST, { allowEmpty: true }).pipe(gulpClean()) } let preProcessAdapterPath = [] // 兼容代码预处理 function preProcessAdapter() { return gulp .src(SRC.ADAPTER, {since: gulp.lastRun(preProcessAdapter)}) .pipe( gulpRename(path => { if (path.extname !== '') { preProcessAdapterPath.push(`src/${path.dirname}/${path.basename}${path.extname}`) } }) ) .pipe(gulp.dest('src')) } function compileXmlCommon () { return gulp .src(SRC.XML, {since: gulp.lastRun(xml)}) .pipe( gulpReplace(/APP\_TYPE/g, function (match) { return `'${APP_TYPE}'` }) ) } function xml() { return compileXmlCommon().pipe(gulp.dest(DEST)) } function eslint() { return gulp .src(SRC.SCRIPT, {since: gulp.lastRun(eslint)}) .pipe( gulpEslint({ configFile: '.eslintrc', fix: true, }) ) .pipe(gulpEslint.format()) .pipe(gulp.dest('./src/')) } function compileScriptCommon() { return gulp .src(SRC.SCRIPT, {since: gulp.lastRun(pathScript)}) .pipe( gulpReplace(/('src\/).*'/g, function (match) { let relativePath = `'${relative( `/src/${this.file.relative}`, `/${match.substring(1, match.length - 1)}` )}'` if (!/^'\.\.\/.*/.test(relativePath)) { relativePath = `'./${relativePath.substring(1, match.length)}` } return relativePath }) ) .pipe( gulpReplace(/\bfrom '[^\.]\S*'/g, function (match) { const dependency = match.substring(6, match.length - 1) const relativePath = relative( `/${this.file.relative}`, `/node/${dependency}` ) dependencies = { ...dependencies, [dependency]: resolve.sync(dependency, { basedir: __dirname }), } return `from '${relativePath}'` }) ).pipe( gulpReplace(/APP\_TYPE/g, function (match) { return `'${APP_TYPE}'` }) ) } function pathScript() { return compileScriptCommon().pipe(gulp.dest(DEST)) } // 支付宝小程序适配-组件生命周期/组件properties function compileScript2Aliapp() { return compileScriptCommon() .pipe( gulpReplace(/attached|ready/g, function (match) { return 'didMount' }) ) .pipe( gulpReplace(/detached/g, function (match) { return 'didUnmount' }) ) .pipe( gulpReplace(/\.properties/g, function (match) { return '.props' }) ) .pipe( gulpReplace(/wx\./g, function (match) { return API_ADAPTER }) ) .pipe(gulp.dest(DEST)) } // 百度小程序适配 script function compileScript2Swan() { return compileScriptCommon() .pipe(
) .pipe(gulp.dest(DEST)) } // QQ小程序适配 script function compileScript2Qqapp() { return compileScriptCommon() .pipe( gulpReplace(/wx\./g, function (match) { return API_ADAPTER }) ) .pipe(gulp.dest(DEST)) } // 头条小程序适配 script function compileScript2Ttapp() { return compileScriptCommon() .pipe( gulpReplace(/wx\./g, function (match) { return API_ADAPTER }) ) .pipe(gulp.dest(DEST)) } function packDep(done) { const flag = dependencies && deepEqual(dependencies, prevDependencies) console.log(dependencies) console.log(prevDependencies) if (flag) return done() prevDependencies = dependencies return gulp .src('src/app.js') .pipe( webpackStream( { mode: 'production', watch: false, entry: dependencies, output: { libraryTarget: 'umd', filename: '[name].js', }, }, webpack ) ) .pipe(gulp.dest(`${DEST}/node`)) } function pathJson() { return gulp .src(SRC.JSON, {since: gulp.lastRun(pathJson)}) .pipe( gulpReplace(/("src\/).*"/g, function (match) { const relativePath = `"${relative( `/src/${this.file.relative}`, `/${match.substring(1, match.length - 1)}` )}"` return relativePath }) ) .pipe(gulp.dest(DEST)) } function stylelint() { let cssName = '' if (isWeapp) { cssName = '.wxss' } else if (isQqapp) { cssName = '.qss' } else if (isSwan) { cssName = '.css' } else if (isTtapp) { cssName = '.ttss' } else { cssName = '.acss' } return gulp .src(SRC.STYLE, {since: gulp.lastRun(stylelint)}) .pipe(gulpStylelint({ failAfterError: false, fix: true, }).on('error', handleError)) .pipe(gulp.dest('src')) .pipe( gulpRename(path => { path.extname = cssName }) ) .pipe( gulpReplace(/\.css/g, function (match) { return cssName }) ) .pipe(gulp.dest(DEST)) } function compileXML2Swan() { return compileXmlCommon() .pipe( gulpReplace(/(if|elif|for)=\"\{\{(.*?)\}\}\"/g, function (match, p1, p2) { return `${p1}="${p2}"` }) ) .pipe( gulpReplace(/wx\:|\.wxml/g, function (match) { if (match === '.wxml') { return '.swan' } else { return 's-' } }) ) .pipe( gulpReplace(/(\<template.*data=\")\{\{(.*?)\}\}\"/g, function (match, p1, p2) { return `${p1}{{{${p2}}}}"` }) ) .pipe( gulpRename(path => { path.extname = '.swan' }) ) .pipe(gulp.dest(DEST)) } function compileXML2Aliapp() { return compileXmlCommon() .pipe( gulpReplace(/wx\:|\.wxml/g, function (match) { if (match === '.wxml') { return '.axml' } else { return 'a:' } }) ) .pipe( gulpReplace(/\s(capture\-bind|catch)\:?([a-z])ouch([a-z])/g, function (match, p1, p2, p3) { return ` catch${p2.toUpperCase()}ouch${p3.toUpperCase()}` }) ) .pipe( gulpReplace(/\s(bind|catch)\:?([a-z])ong([a-z])/g, function (match, p1, p2, p3) { const key = p1 === 'catch' ? p1 : 'on' return ` ${key}${p2.toUpperCase()}ong${p3.toUpperCase()}` }) ) .pipe( gulpReplace(/\s(bind|catch)\:?([a-z])/g, function (match, p1, p2) { const key = p1 === 'catch' ? p1 : 'on' return ` ${key}${p2.toUpperCase()}` }) ) .pipe( gulpRename(path => { path.extname = '.axml' }) ) .pipe(gulp.dest(DEST)) } function compileXML2Qqapp() { return compileXmlCommon() .pipe( gulpReplace(/wx\:|\.wxml/g, function (match) { if (match === '.wxml') { return '.qml' } else { return 'qq:' } }) ) .pipe( gulpRename(path => { path.extname = '.qml' }) ) .pipe(gulp.dest(DEST)) } function compileXML2Ttapp() { return gulp .src(SRC.XML) .pipe( gulpReplace(/wx\:|\.wxml/g, function (match) { if (match === '.wxml') { return '.ttml' } else { return 'tt:' } }) ) .pipe( gulpRename(path => { path.extname = '.ttml' }) ) .pipe(gulp.dest(DEST)) } // todo function copyConfig() { return gulp .src(`config/project.${APP_TYPE}.json`, {since: gulp.lastRun(copyConfig)}) .pipe( gulpRename(path => { if (isWeapp || isQqapp || isTtapp) { path.basename = 'project.config' } }) ) .pipe(gulp.dest(DEST)) } function sass() { return gulp .src('src/**/*.swan', {since: gulp.lastRun(sass), allowEmpty: true}) .pipe(gulpClean()) // .pipe( // gulpRename(path => { // path.extname = '.swan' // }) // ) // .pipe(gulp.dest('src')) // return gulp // .src(SRC.STYLE, {since: gulp.lastRun(sass)}) // .pipe(gulpSourcemaps.init()) // .pipe(gulpSass().on('error', gulpSass.logError)) // .pipe(gulpSourcemaps.write()) // .pipe( // gulpRename(path => { // path.extname = '.css' // }) // ) // .pipe(gulp.dest(DEST)) } function formatScript() { return gulp .src(SRC.SCRIPT, {since: gulp.lastRun(formatScript)}) .pipe( gulpReplace(/([\s\S]*)/, (match, string) => { const options = { semi: false, singleQuote: true, trailingComma: 'all', } return prettier.format(string, options) }) ) .pipe(gulp.dest('src')) } const swan = gulp.series(compileXML2Swan) const axml = gulp.series(compileXML2Aliapp) const qml = gulp.series(compileXML2Qqapp) const ttml = gulp.series(compileXML2Ttapp) let js = null let mpXml = null if (isAliapp) { js = gulp.series(eslint, compileScript2Aliapp, packDep) mpXml = axml } else if (isSwan) { js = gulp.series(eslint, compileScript2Swan, packDep) mpXml = swan } else if (isQqapp) { js = gulp.series(eslint, compileScript2Qqapp, packDep) mpXml = qml } else if (isTtapp) { js = gulp.series(eslint, compileScript2Ttapp, packDep) mpXml = ttml } else { js = gulp.series(eslint, pathScript, packDep) mpXml = xml } const styles = gulp.series(stylelint) const config = gulp.series(copyConfig) const adapter = gulp.series(preProcessAdapter) const build = gulp.series(cleanDist, adapter, gulp.parallel(pathJson, imagemin, js, styles, config, mpXml)) const start = gulp.series(build) // const watch = gulp.series(watch) // todo watch优化 function watch() { gulp.watch(SRC.ADAPTER, preProcessAdapter) gulp.watch(SRC.STYLE, styles) gulp.watch(SRC.SCRIPT, js) gulp.watch(SRC.JSON, pathJson) gulp.watch(SRC.IMAGE, imagemin) gulp.watch(SRC.XML, mpXml) gulp.watch(SRC.CONFIG, config) gulp.watch([SRC.STYLE, SRC.SCRIPT, SRC.JSON, SRC.IMAGE, SRC.XML, SRC.CONFIG]) .on('unlink', function(filepath) { const filePathFromSrc = path.relative(path.resolve('src'), filepath); const destFilePath = path.resolve('dist', filePathFromSrc); del.sync(destFilePath); }) } gulp.task('js', js) gulp.task('format', gulp.series(formatScript, eslint, stylelint)) gulp.task('stylelint', stylelint) gulp.task('watch', watch) gulp.task('start', start)
gulpReplace(/wx\./g, function (match) { return API_ADAPTER })
winsys.go
//go:generate go run golang.org/x/sys/windows/mkwinsyscall -output zsyscall_windows.go syscall_windows.go
// +build windows package winsys
serialize_tree.py
# Given the root to a binary tree, implement serialize(root), which serializes # the tree into a string, and deserialize(s), which deserializes the string back # into the tree. class
: def __init__(self, val, left=None, right=None): self.val = val self.left = left self.right = right def serialize(node): ans = [] level = [node] while level: children = [] for n in level: if n: children.append(n.left) children.append(n.right) ans.append(n.val if n else None) level = children return ans def deserialize(node, index=0): if not node[index:]: return None root = Node(node[index], deserialize(node, index*2+1), deserialize(node, index*2+2)) return root if __name__ == '__main__': node = Node('root', Node('left', Node('left.left')), Node('right')) print(serialize(node)) assert deserialize(serialize(node)).left.left.val == 'left.left'
Node
builtInFunctionsMap.ts
/** * @module botbuilder-lg-LSP */ /** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. */ import { ReturnType } from 'adaptive-expressions'; export class
{ public constructor(params: string[], returntype: ReturnType, introduction: string) { this.Params = params; this.Returntype = returntype; this.Introduction = introduction; } public Params: string[]; public Returntype: ReturnType; public Introduction: string; } // https://github.com/microsoft/BotBuilder-Samples/blob/master/experimental/common-expression-language/prebuilt-functions.md export const buildInFunctionsMap: Map<string, FunctionEntity> = new Map<string, FunctionEntity>([ [ 'add', new FunctionEntity( ['num1: number', 'num2: number'], ReturnType.Number, 'Return the result from adding two numbers.' ), ], [ 'div', new FunctionEntity( ['dividend: number', 'divisor: number'], ReturnType.Number, 'Return the integer result from dividing two numbers. To get the remainder result, see mod().' ), ], [ 'mod', new FunctionEntity( ['dividend: number', 'divisor: number'], ReturnType.Number, 'Return the remainder from dividing two numbers. To get the integer result, see div().' ), ], [ 'mul', new FunctionEntity( ['multiplicand1: number', 'multiplicand2: number'], ReturnType.Number, 'Return the product from multiplying two numbers.' ), ], [ 'sub', new FunctionEntity( ['minuend: number', 'subtrahend: number'], ReturnType.Number, 'Return the result from subtracting the second number from the first number.' ), ], [ 'exp', new FunctionEntity( ['minuend: number', 'subtrahend: number'], ReturnType.Number, 'Return exponentiation of one number to another.' ), ], [ 'concat', new FunctionEntity( ['...strings: string[]'], ReturnType.String, 'Combine two or more strings and return the resulting string. E.g. concat(‘hello’, ‘world’, ‘…’)' ), ], [ 'not', new FunctionEntity( ['expression: bool'], ReturnType.Boolean, 'Check whether an expression is false. Return true when the expression is false, or return false when true.' ), ], [ 'and', new FunctionEntity( ['...input: any[]'], ReturnType.Boolean, 'Check whether all expressions are true. Return true when all expressions are true, or return false when at least one expression is false.' ), ], [ 'or', new FunctionEntity( ['...input: any[]'], ReturnType.Boolean, 'Check whether at least one expression is true. Return true when at least one expression is true, or return false when all are false.' ), ], [ 'equals', new FunctionEntity( ['...input: any[]'], ReturnType.Boolean, 'Comparison equal. Returns true if specified values are equal' ), ], [ 'greater', new FunctionEntity( ['value: any', 'compareTo: any'], ReturnType.Boolean, 'Check whether the first value is greater than the second value. Return true when the first value is more, or return false when less.' ), ], [ 'greaterOrEquals', new FunctionEntity( ['value: any', 'compareTo: any'], ReturnType.Boolean, 'Check whether the first value is greater than or equal to the second value. Return true when the first value is greater or equal, or return false when the first value is less.' ), ], [ 'less', new FunctionEntity( ['value: any', 'compareTo: any'], ReturnType.Boolean, 'Check whether the first value is less than the second value. Return true when the first value is less, or return false when the first value is more.' ), ], [ 'lessOrEquals', new FunctionEntity( ['value: any', 'compareTo: any'], ReturnType.Boolean, 'Check whether the first value is less than or equal to the second value. Return true when the first value is less than or equal, or return false when the first value is more.' ), ], [ 'join', new FunctionEntity( ['collection: Array', 'delimiter: string'], ReturnType.String, 'Return a string that has all the items from an array and has each character separated by a delimiter.' ), ], ['empty', new FunctionEntity(['collection: any'], ReturnType.Boolean, 'Check if the collection is empty')], ['newGuid', new FunctionEntity([], ReturnType.String, 'Return new guid string')], [ 'min', new FunctionEntity(['...numbers: number[]'], ReturnType.Number, 'Returns the smallest value from a collection'), ], [ 'max', new FunctionEntity(['...numbers: number[]'], ReturnType.Number, 'Returns the largest value from a collection'), ], [ 'average', new FunctionEntity(['...numbers: number[]'], ReturnType.Number, 'Returns the average value from a collection'), ], [ 'sum', new FunctionEntity(['...numbers: number[]'], ReturnType.Number, 'Return the result from adding numbers in a list.'), ], [ 'exists', new FunctionEntity(['expression: expression'], ReturnType.Boolean, 'Returns the smallest value from a collection'), ], ['length', new FunctionEntity(['str: string'], ReturnType.Number, 'Returns the length of a string')], [ 'replace', new FunctionEntity( ['text: string', 'oldText: string', 'newText: string'], ReturnType.String, 'Replace a substring with the specified string, and return the updated string. case sensitive' ), ], [ 'replaceIgnoreCase', new FunctionEntity( ['text: string', 'oldText: string', 'newText: string'], ReturnType.String, 'Replace a substring with the specified string, and return the updated string. Case in-sensitive' ), ], [ 'split', new FunctionEntity( ['text: string', 'delimiter: string'], ReturnType.Object, 'Returns an array that contains substrings based on the delimiter specified.' ), ], [ 'substring', new FunctionEntity( ['text: string', 'startIndex: number', 'length?: number'], ReturnType.String, 'Returns characters from a string. Substring(sourceString, startPos, endPos). startPos cannot be less than 0. endPos greater than source strings length will be taken as the max length of the string' ), ], ['toLower', new FunctionEntity(['text: string'], ReturnType.String, 'Convert a string to all upper case characters')], ['toUpper', new FunctionEntity(['text: string'], ReturnType.String, 'Convert a string to all lower case characters')], [ 'trim', new FunctionEntity(['text: string'], ReturnType.String, 'Remove leading and trailing white spaces from a string'), ], [ 'count', new FunctionEntity( ['collection: string|Array'], ReturnType.Number, 'Returns the number of items in the collection' ), ], [ 'contains', new FunctionEntity( ['collection: stirng|Array|Map', 'value: stirng|Array|Map'], ReturnType.Boolean, 'Works to find an item in a string or to find an item in an array or to find a parameter in a complex object. E.g. contains(‘hello world, ‘hello); contains([‘1’, ‘2’], ‘1’); contains({“foo”:”bar”}, “foo”)' ), ], [ 'first', new FunctionEntity(['collection: string|Array'], ReturnType.Object, 'Returns the first item from the collection'), ], [ 'last', new FunctionEntity(['collection: string|Array'], ReturnType.Object, 'Returns the last item from the collection'), ], [ 'foreach', new FunctionEntity( ['collection: Array | Object', 'iteratorName: string', 'function: any'], ReturnType.Object, 'Operate on each element and return the new collection' ), ], [ 'addDays', new FunctionEntity( ['timestamp: string', 'days: number', 'format?: string'], ReturnType.String, 'Add number of specified days to a given timestamp' ), ], [ 'addHours', new FunctionEntity( ['timestamp: string', 'hours: number', 'format?: string'], ReturnType.String, 'Add specified number of hours to a given timestamp' ), ], [ 'addMinutes', new FunctionEntity( ['timestamp: string', 'minutes: number', 'format?: string'], ReturnType.String, 'Add specified number of minutes to a given timestamp' ), ], [ 'addSeconds', new FunctionEntity( ['timestamp: string', 'seconds: number', 'format?: string'], ReturnType.String, 'Add specified number of seconds to a given timestamp' ), ], [ 'dayOfMonth', new FunctionEntity( ['timestamp: string'], ReturnType.Number, 'Returns day of month for a given timestamp or timex expression.' ), ], [ 'dayOfWeek', new FunctionEntity(['timestamp: string'], ReturnType.Number, 'Return the day of the week from a timestamp.'), ], [ 'dayOfYear', new FunctionEntity(['timestamp: string'], ReturnType.Number, 'Return the day of the year from a timestamp.'), ], ['month', new FunctionEntity(['timestamp: string'], ReturnType.Number, 'Returns the month of given timestamp')], [ 'date', new FunctionEntity( ['timestamp: string'], ReturnType.Number, 'Return the date of a specified timestamp in "M/dd/yyyy" format.' ), ], ['year', new FunctionEntity(['timestamp: string'], ReturnType.Number, 'Returns year for the given timestamp')], ['utcNow', new FunctionEntity(['format?: string'], ReturnType.String, 'Returns current timestamp as string')], [ 'formatDateTime', new FunctionEntity( ['timestamp: string', 'format?: string'], ReturnType.String, 'Return a timestamp in the specified format.' ), ], [ 'subtractFromTime', new FunctionEntity( ['timestamp: string', 'interval: number', 'timeUnit: string', 'format?: string'], ReturnType.String, 'Subtract a number of time units from a timestamp.' ), ], [ 'dateReadBack', new FunctionEntity( ['currentDate: string', 'targetDate: string'], ReturnType.String, 'Uses the date-time library to provide a date readback. dateReadBack(currentDate, targetDate). E.g. dateReadBack(‘2016/05/30’,’2016/05/23’)=>"Yesterday"' ), ], [ 'getTimeOfDay', new FunctionEntity( ['timestamp: string'], ReturnType.String, 'Returns time of day for a given timestamp (midnight = 12AM, morning = 12:01AM – 11:59PM, noon = 12PM, afternoon = 12:01PM -05:59PM, evening = 06:00PM – 10:00PM, night = 10:01PM – 11:59PM)' ), ], [ 'float', new FunctionEntity( ['value: string'], ReturnType.Number, 'Return floating point representation of the specified string or the string itself if conversion is not possible' ), ], [ 'int', new FunctionEntity( ['value: string'], ReturnType.Number, 'Return integer representation of the specified string or the string itself if conversion is not possible' ), ], ['string', new FunctionEntity(['value: any'], ReturnType.String, 'Return string version of the specified value')], [ 'bool', new FunctionEntity( ['value: any'], ReturnType.Boolean, 'Return Boolean representation of the specified string. Bool(‘true’), bool(1)' ), ], ['createArray', new FunctionEntity(['...objects: any[]'], ReturnType.Object, 'Create an array from multiple inputs')], [ 'if', new FunctionEntity( ['expression: boolean', 'valueIfTrue: any', 'valueIfFalse: any'], ReturnType.Object, 'if(exp, valueIfTrue, valueIfFalse)' ), ], [ 'rand', new FunctionEntity( ['minValue: number', 'maxValue: number'], ReturnType.Number, 'Returns a random number between specified min and max value – rand(<minValue>, <maxValue>)' ), ], [ 'json', new FunctionEntity( ['value: string|XML'], ReturnType.String, 'Return the JavaScript Object Notation (JSON) type value or object for a string or XML.' ), ], [ 'getProperty', new FunctionEntity( ['jsobObject: any', 'property: string'], ReturnType.Object, 'Return the value of the given property in a JSON object.' ), ], [ 'addProperty', new FunctionEntity( ['jsobObject: any', 'property: string', 'value: any'], ReturnType.Object, 'Add a property and its value, or name-value pair, to a JSON object, and return the updated object. If the object already exists at runtime, the function throws an error.' ), ], [ 'removeProperty', new FunctionEntity( ['jsobObject: any', 'property: string'], ReturnType.Object, 'Remove a property from an object and return the updated object.' ), ], [ 'setProperty', new FunctionEntity( ['jsobObject: any', 'property: string', 'value: any'], ReturnType.Object, "Set the value for an object's property and return the updated object. To add a new property, you can use this function or the addProperty() function." ), ], [ 'endsWith', new FunctionEntity( ['text: string', 'value: string'], ReturnType.Boolean, 'Return if a text is end with another string' ), ], [ 'startsWith', new FunctionEntity( ['text: string', 'value: string'], ReturnType.Boolean, 'Return if a text is end with another string' ), ], ['countWord', new FunctionEntity(['text: string'], ReturnType.Number, 'Returns the word count')], ['addOrdinal', new FunctionEntity(['num: number'], ReturnType.String, 'e.g. addOrdinal(10) = 10th')], [ 'indexOf', new FunctionEntity( ['text: string', 'value: string'], ReturnType.Number, 'Returns the index of the value from the text' ), ], [ 'lastIndexOf', new FunctionEntity( ['text: string', 'value: string'], ReturnType.Number, 'Returns the last index of the value from the text' ), ], [ 'union', new FunctionEntity( ['...values: Array[]'], ReturnType.Object, 'Produces the set union of two sequences by using the default equality comparer.' ), ], [ 'intersection', new FunctionEntity( ['...values: Array[]'], ReturnType.Object, ' Produces the set intersection of two sequences by using the default equality comparer to compare values.' ), ], [ 'skip', new FunctionEntity( ['array: Array', 'length: number'], ReturnType.Object, 'Bypasses a specified number of elements in a sequence and then returns the remaining elements.' ), ], [ 'take', new FunctionEntity( ['array: Array', 'length: number'], ReturnType.Object, 'Returns a specified number of contiguous elements from the start of a sequence.' ), ], [ 'subArray', new FunctionEntity( ['array: Array', 'startIndex: number', 'endIndex: number'], ReturnType.Object, 'Returns the sub array from start index to end index' ), ], ['array', new FunctionEntity(['value: any'], ReturnType.Object, 'Create a new array with single value ')], ['binary', new FunctionEntity(['value: string'], ReturnType.String, 'Return the binary version for an input value.')], [ 'dataUri', new FunctionEntity( ['value: string'], ReturnType.String, 'Return a data uniform resource identifier (URI) for a string.' ), ], [ 'dataUriToBinary', new FunctionEntity( ['value: string'], ReturnType.String, 'Return the binary version for a data uniform resource identifier (URI). Use this function rather than decodeDataUri(). Although both functions work the same way, dataUriBinary() is preferred.' ), ], [ 'dataUriToString', new FunctionEntity( ['value: string'], ReturnType.String, 'Return the string version for a data uniform resource identifier (URI).' ), ], [ 'base64', new FunctionEntity(['value: string'], ReturnType.String, 'Return the base64-encoded version for a string.'), ], [ 'base64ToBinary', new FunctionEntity(['value: string'], ReturnType.String, 'Return the binary version for a base64-encoded string.'), ], [ 'base64ToString', new FunctionEntity( ['value: string'], ReturnType.String, 'Return the string version for a base64-encoded string, effectively decoding the base64 string. Use this function rather than decodeBase64. Although both functions work the same way, base64ToString() is preferred.' ), ], [ 'uriComponent', new FunctionEntity( ['value: string'], ReturnType.String, 'Return the binary version for a uniform resource identifier (URI) component.' ), ], [ 'uriComponentToString', new FunctionEntity( ['value: string'], ReturnType.String, 'Return the string version for a uniform resource identifier (URI) encoded string, effectively decoding the URI-encoded string.' ), ], //TODO. Make sure xml can be used in both browser/node environment //['xml', new FunctionEntity(['xmlStr: string]'], ReturnType.Object, 'Return the XML version for a string.')], [ 'range', new FunctionEntity( ['startIndex: number', 'count: number'], ReturnType.Object, 'Return an integer array that starts from a specified integer.' ), ], [ 'getFutureTime', new FunctionEntity( ['interval: number', 'timeUnit: string', 'format?: string'], ReturnType.String, 'Return the current timestamp plus the specified time units.' ), ], [ 'getPastTime', new FunctionEntity( ['interval: number', 'timeUnit: string', 'format?: string'], ReturnType.String, 'Return the current timestamp minus the specified time units.' ), ], [ 'addToTime', new FunctionEntity( ['timestamp: string', 'interval: number', 'timeUnit: string', 'format?: string'], ReturnType.String, 'Add a number of time units to a timestamp. See also getFutureTime()' ), ], [ 'convertFromUtc', new FunctionEntity( ['timestamp: string', 'destinationTimeZone: string', 'format?: string'], ReturnType.String, 'Convert a timestamp from Universal Time Coordinated(UTC) to target time zone.' ), ], [ 'convertToUtc', new FunctionEntity( ['timestamp: string', 'sourceTimeZone: string', 'format?: string'], ReturnType.String, 'Convert a timestamp to Universal Time Coordinated(UTC) from source time zone.' ), ], [ 'startOfDay', new FunctionEntity( ['timestamp: string', 'format?: string'], ReturnType.String, 'Return the start of the day for a timestamp.' ), ], [ 'startOfHour', new FunctionEntity( ['timestamp: string', 'format?: string'], ReturnType.String, 'Return the start of the hour for a timestamp.' ), ], [ 'startOfMonth', new FunctionEntity( ['timestamp: string', 'format?: string'], ReturnType.String, 'Return the start of the month for a timestamp.' ), ], [ 'ticks', new FunctionEntity( ['timestamp: string'], ReturnType.Number, 'Return the ticks property value for a specified timestamp. A tick is 100-nanosecond interval.' ), ], [ 'uriQuery', new FunctionEntity( ['uri: string'], ReturnType.String, 'Return the query value for a unified resource identifier(URI).' ), ], [ 'uriHost', new FunctionEntity( ['uri: string'], ReturnType.String, 'Return the host value for a unified resource identifier(URI).' ), ], [ 'uriPath', new FunctionEntity( ['uri: string'], ReturnType.String, 'Return the path value for a unified resource identifier(URI).' ), ], [ 'uriPathAndQuery', new FunctionEntity( ['uri: string'], ReturnType.String, 'Return the path and query value for a unified resource identifier(URI).' ), ], [ 'uriScheme', new FunctionEntity( ['uri: string'], ReturnType.String, 'Return the scheme value for a unified resource identifier(URI).' ), ], [ 'uriPort', new FunctionEntity( ['uri: string'], ReturnType.String, 'Return the port value for a unified resource identifier(URI).' ), ], [ 'coalesce', new FunctionEntity( ['...object: any[]'], ReturnType.Number, 'Return the first non-null value from one or more parameters. Empty strings, empty arrays, and empty objects are not null.' ), ], /* TODO. Make sure xpath can be used in both browser/node environment [ 'xpath', new FunctionEntity( ['xml: any', 'xpath: any'], ReturnType.Object, 'Check XML for nodes or values that match an XPath (XML Path Language) expression, and return the matching nodes or values. An XPath expression, or just "XPath", helps you navigate an XML document structure so that you can select nodes or compute values in the XML content.' ), ], */ [ 'select', new FunctionEntity( ['collection: Array | Object', 'iteratorName: string', 'function: any'], ReturnType.Object, 'Operate on each element and return the new collection' ), ], [ 'where', new FunctionEntity( ['collection: Array | Object', 'iteratorName: string', 'confitionFunction: any'], ReturnType.Object, 'Filter on each element and return the new collection of filtered elements which match specific condition' ), ], [ 'sortBy', new FunctionEntity( ['collection: Array', 'property: string'], ReturnType.Object, 'Sort elements in the collection with ascending order and return the sorted collection.' ), ], [ 'sortByDescending', new FunctionEntity( ['collection: Array', 'property: string'], ReturnType.Object, 'Sort elements in the collection with descending order and return the sorted collection.' ), ], [ 'indicesAndValues', new FunctionEntity( ['collection: Array'], ReturnType.Object, 'Turned an array into an array of objects with index (current index) and value property.' ), ], [ 'jPath', new FunctionEntity( ['json: Object', 'path: string'], ReturnType.Object, 'Check JSON or JSON string for nodes or value that match a path expression, and return the matching nodes.' ), ], [ 'setPathToValue', new FunctionEntity( ['path: any', 'value: Object'], ReturnType.Object, 'Retrieve the value of the specified property from the JSON object.' ), ], [ 'isMatch', new FunctionEntity( ['targetString: string', 'pattern: string'], ReturnType.Object, 'test a given string ia match a common regex pattern.' ), ], //type checing functions [ 'isInteger', new FunctionEntity(['input: any'], ReturnType.Boolean, 'determine whether a given input is an integer number.'), ], [ 'isFloat', new FunctionEntity(['input: any'], ReturnType.Boolean, 'determine whether a given input is a float point number.'), ], [ 'isBoolean', new FunctionEntity(['input: any'], ReturnType.Boolean, 'determine whether a given input is a boolean.'), ], ['isArray', new FunctionEntity(['input: any'], ReturnType.Boolean, 'determine whether a given input is an array.')], ['isObject', new FunctionEntity(['input: any'], ReturnType.Boolean, 'determine whether a given input is an object.')], [ 'isDateTime', new FunctionEntity( ['input: any'], ReturnType.Boolean, 'determine whether a given input is a UTC ISO format timestamp.' ), ], ['isString', new FunctionEntity(['input: any'], ReturnType.Boolean, 'determine whether a given input is a string.')], // Functions injected from LG library // https://github.com/microsoft/BotBuilder-Samples/blob/master/experimental/language-generation/docs/Functions-injected-from-LG.md [ 'template', new FunctionEntity( ['templateName: string', '...params: any[]'], ReturnType.Object, 'Return the evaluated result of given template name and params.' ), ], [ 'fromFile', new FunctionEntity( ['filePath: string'], ReturnType.String, 'Return the evaluated result of the expression in the given file.' ), ], [ 'expandText', new FunctionEntity( ['text: string'], ReturnType.Object, 'Return the evaluated result of the plain text and get the data binding result.' ), ], [ 'isTemplate', new FunctionEntity( ['templateName: string'], ReturnType.Boolean, 'Return whether a given template name is included in the evaluator.' ), ], [ 'ActivityAttachment', new FunctionEntity( ['content: Object', 'type: string'], ReturnType.Boolean, 'Return an activityAttachment constructed from an object and a type.' ), ], ]);
FunctionEntity
agentconfig.go
// Copyright 2018 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package agentconfig stores and retrieves configuration settings for the OS Config agent. package agentconfig import ( "context" "crypto/sha256" "encoding/json" "flag" "fmt" "io/ioutil" "net" "net/http" "net/url" "os" "path/filepath" "runtime" "strconv" "strings" "sync" "time" "cloud.google.com/go/compute/metadata" "github.com/GoogleCloudPlatform/osconfig/clog" "golang.org/x/oauth2/jws" ) const ( // metadataIP is the documented metadata server IP address. metadataIP = "169.254.169.254" // metadataHostEnv is the environment variable specifying the // GCE metadata hostname. metadataHostEnv = "GCE_METADATA_HOST" // InstanceMetadata is the compute metadata URL. InstanceMetadata = "http://metadata.google.internal/computeMetadata/v1/instance" // IdentityTokenPath is the instance identity token path. IdentityTokenPath = "instance/service-accounts/default/identity?audience=osconfig.googleapis.com&format=full" // ReportURL is the guest attributes endpoint. ReportURL = InstanceMetadata + "/guest-attributes" googetRepoDir = "C:/ProgramData/GooGet/repos" googetRepoFilePath = googetRepoDir + "/google_osconfig_managed.repo" zypperRepoDir = "/etc/zypp/repos.d" zypperRepoFilePath = zypperRepoDir + "/google_osconfig_managed.repo" yumRepoDir = "/etc/yum.repos.d" yumRepoFilePath = yumRepoDir + "/google_osconfig_managed.repo" aptRepoDir = "/etc/apt/sources.list.d" aptRepoFilePath = aptRepoDir + "/google_osconfig_managed.list" prodEndpoint = "{zone}-osconfig.googleapis.com:443" osInventoryEnabledDefault = false guestPoliciesEnabledDefault = false taskNotificationEnabledDefault = false debugEnabledDefault = false oldConfigDirLinux = "/etc/osconfig" cacheDirWindows = `C:\Program Files\Google\OSConfig` cacheDirLinux = "/var/lib/google_osconfig_agent" taskStateFileWindows = cacheDirWindows + `\osconfig_task.state` taskStateFileLinux = cacheDirLinux + "/osconfig_task.state" oldTaskStateFileLinux = oldConfigDirLinux + "/osconfig_task.state" restartFileWindows = cacheDirWindows + `\osconfig_agent_restart_required` restartFileLinux = cacheDirLinux + "/osconfig_agent_restart_required" oldRestartFileLinux = oldConfigDirLinux + "/osconfig_agent_restart_required" osConfigPollIntervalDefault = 10 osConfigMetadataPollTimeout = 60 ) var ( endpoint = flag.String("endpoint", prodEndpoint, "osconfig endpoint override") debug = flag.Bool("debug", false, "set debug log verbosity") stdout = flag.Bool("stdout", false, "log to stdout") disableLocalLogging = flag.Bool("disable_local_logging", false, "disable logging using event log or syslog") agentConfig = &config{} agentConfigMx sync.RWMutex version string lEtag = &lastEtag{Etag: "0"} // Current supported capabilites for this agent. // These are matched server side to what tasks this agent can // perform. capabilities = []string{"PATCH_GA", "GUEST_POLICY_BETA", "CONFIG_V1"} osConfigWatchConfigTimeout = 10 * time.Minute defaultClient = &http.Client{ Transport: &http.Transport{ Dial: (&net.Dialer{ Timeout: 2 * time.Second, KeepAlive: 30 * time.Second, }).Dial, }, } freeOSMemory = strings.ToLower(os.Getenv("OSCONFIG_FREE_OS_MEMORY")) disableInventoryWrite = strings.ToLower(os.Getenv("OSCONFIG_DISABLE_INVENTORY_WRITE")) ) type config struct { aptRepoFilePath string instanceName string instanceZone string projectID string svcEndpoint string googetRepoFilePath string zypperRepoFilePath string yumRepoFilePath string instanceID string numericProjectID int64 osConfigPollInterval int debugEnabled bool taskNotificationEnabled bool guestPoliciesEnabled bool osInventoryEnabled bool } func (c *config) parseFeatures(features string, enabled bool) { for _, f := range strings.Split(features, ",") { f = strings.ToLower(strings.TrimSpace(f)) switch f { case "tasks", "ospatch": // ospatch is the legacy flag c.taskNotificationEnabled = enabled case "guestpolicies", "ospackage": // ospackage is the legacy flag c.guestPoliciesEnabled = enabled case "osinventory": c.osInventoryEnabled = enabled } } } func (c *config) asSha256() string { h := sha256.New() h.Write([]byte(fmt.Sprintf("%v", c))) return fmt.Sprintf("%x", h.Sum(nil)) } func getAgentConfig() config { agentConfigMx.RLock() defer agentConfigMx.RUnlock() return *agentConfig } type lastEtag struct { Etag string mu sync.RWMutex } func (e *lastEtag) set(etag string) { e.mu.Lock() defer e.mu.Unlock() e.Etag = etag } func (e *lastEtag) get() string { e.mu.RLock() defer e.mu.RUnlock() return e.Etag } func parseBool(s string) bool { enabled, err := strconv.ParseBool(s) if err != nil { // Bad entry returns as not enabled. return false } return enabled } type metadataJSON struct { Instance instanceJSON Project projectJSON } type instanceJSON struct { Attributes attributesJSON ID *json.Number Zone string Name string } type projectJSON struct { Attributes attributesJSON ProjectID string NumericProjectID int64 } type attributesJSON struct { PollIntervalOld *json.Number `json:"os-config-poll-interval"` PollInterval *json.Number `json:"osconfig-poll-interval"` InventoryEnabledOld string `json:"os-inventory-enabled"` InventoryEnabled string `json:"enable-os-inventory"` PreReleaseFeaturesOld string `json:"os-config-enabled-prerelease-features"` PreReleaseFeatures string `json:"osconfig-enabled-prerelease-features"` DebugEnabledOld string `json:"enable-os-config-debug"` LogLevel string `json:"osconfig-log-level"` OSConfigEndpointOld string `json:"os-config-endpoint"` OSConfigEndpoint string `json:"osconfig-endpoint"` OSConfigEnabled string `json:"enable-osconfig"` DisabledFeatures string `json:"osconfig-disabled-features"` } func createConfigFromMetadata(md metadataJSON) *config { old := getAgentConfig() c := &config{ osInventoryEnabled: osInventoryEnabledDefault, guestPoliciesEnabled: guestPoliciesEnabledDefault, taskNotificationEnabled: taskNotificationEnabledDefault, debugEnabled: debugEnabledDefault, svcEndpoint: prodEndpoint, osConfigPollInterval: osConfigPollIntervalDefault, googetRepoFilePath: googetRepoFilePath, zypperRepoFilePath: zypperRepoFilePath, yumRepoFilePath: yumRepoFilePath, aptRepoFilePath: aptRepoFilePath, projectID: old.projectID, numericProjectID: old.numericProjectID, instanceZone: old.instanceZone, instanceName: old.instanceName, instanceID: old.instanceID, } if md.Project.ProjectID != "" { c.projectID = md.Project.ProjectID } if md.Project.NumericProjectID != 0 { c.numericProjectID = md.Project.NumericProjectID } if md.Instance.Zone != "" { c.instanceZone = md.Instance.Zone } if md.Instance.Name != "" { c.instanceName = md.Instance.Name } if md.Instance.ID != nil { c.instanceID = md.Instance.ID.String() } // Check project first then instance as instance metadata overrides project. switch { case md.Project.Attributes.InventoryEnabled != "": c.osInventoryEnabled = parseBool(md.Project.Attributes.InventoryEnabled) case md.Project.Attributes.InventoryEnabledOld != "": c.osInventoryEnabled = parseBool(md.Project.Attributes.InventoryEnabledOld) } c.parseFeatures(md.Project.Attributes.PreReleaseFeaturesOld, true) c.parseFeatures(md.Project.Attributes.PreReleaseFeatures, true) if md.Project.Attributes.OSConfigEnabled != "" { e := parseBool(md.Project.Attributes.OSConfigEnabled) c.taskNotificationEnabled = e c.guestPoliciesEnabled = e c.osInventoryEnabled = e } c.parseFeatures(md.Project.Attributes.DisabledFeatures, false) switch { case md.Instance.Attributes.InventoryEnabled != "": c.osInventoryEnabled = parseBool(md.Instance.Attributes.InventoryEnabled) case md.Instance.Attributes.InventoryEnabledOld != "": c.osInventoryEnabled = parseBool(md.Instance.Attributes.InventoryEnabledOld) } c.parseFeatures(md.Instance.Attributes.PreReleaseFeaturesOld, true) c.parseFeatures(md.Instance.Attributes.PreReleaseFeatures, true) if md.Instance.Attributes.OSConfigEnabled != "" { e := parseBool(md.Instance.Attributes.OSConfigEnabled) c.taskNotificationEnabled = e c.guestPoliciesEnabled = e c.osInventoryEnabled = e } c.parseFeatures(md.Instance.Attributes.DisabledFeatures, false) switch { case md.Project.Attributes.PollInterval != nil: if val, err := md.Project.Attributes.PollInterval.Int64(); err == nil { c.osConfigPollInterval = int(val) } case md.Project.Attributes.PollIntervalOld != nil: if val, err := md.Project.Attributes.PollIntervalOld.Int64(); err == nil { c.osConfigPollInterval = int(val) } } switch { case md.Instance.Attributes.PollInterval != nil: if val, err := md.Instance.Attributes.PollInterval.Int64(); err == nil { c.osConfigPollInterval = int(val) } case md.Instance.Attributes.PollIntervalOld != nil: if val, err := md.Instance.Attributes.PollInterval.Int64(); err == nil { c.osConfigPollInterval = int(val) } } switch { case md.Project.Attributes.DebugEnabledOld != "": c.debugEnabled = parseBool(md.Project.Attributes.DebugEnabledOld) case md.Instance.Attributes.DebugEnabledOld != "": c.debugEnabled = parseBool(md.Instance.Attributes.DebugEnabledOld) } switch strings.ToLower(md.Project.Attributes.LogLevel) { case "debug": c.debugEnabled = true case "info": c.debugEnabled = false } switch strings.ToLower(md.Instance.Attributes.LogLevel) { case "debug": c.debugEnabled = true case "info": c.debugEnabled = false } // Flags take precedence over metadata. if *debug { c.debugEnabled = true } setSVCEndpoint(md, c) return c } func setSVCEndpoint(md metadataJSON, c *config) { switch { case *endpoint != prodEndpoint: c.svcEndpoint = *endpoint case md.Instance.Attributes.OSConfigEndpoint != "": c.svcEndpoint = md.Instance.Attributes.OSConfigEndpoint case md.Instance.Attributes.OSConfigEndpointOld != "": c.svcEndpoint = md.Instance.Attributes.OSConfigEndpointOld case md.Project.Attributes.OSConfigEndpoint != "": c.svcEndpoint = md.Project.Attributes.OSConfigEndpoint case md.Project.Attributes.OSConfigEndpointOld != "": c.svcEndpoint = md.Project.Attributes.OSConfigEndpointOld } // Example instanceZone: projects/123456/zones/us-west1-b parts := strings.Split(c.instanceZone, "/") zone := parts[len(parts)-1] c.svcEndpoint = strings.ReplaceAll(c.svcEndpoint, "{zone}", zone) } func formatMetadataError(err error) error { if urlErr, ok := err.(*url.Error); ok { if _, ok := urlErr.Err.(*net.DNSError); ok { return fmt.Errorf("DNS error when requesting metadata, check DNS settings and ensure metadata.google.internal is setup in your hosts file: %w", err) } if _, ok := urlErr.Err.(*net.OpError); ok { return fmt.Errorf("network error when requesting metadata, make sure your instance has an active network and can reach the metadata server: %w", err) } } return err } func getMetadata(suffix string) ([]byte, string, error) { host := os.Getenv(metadataHostEnv) if host == "" { // Using 169.254.169.254 instead of "metadata" here because Go // binaries built with the "netgo" tag and without cgo won't // know the search suffix for "metadata" is // ".google.internal", and this IP address is documented as // being stable anyway. host = metadataIP } computeMetadataURL := "http://" + host + "/computeMetadata/v1/" + suffix req, err := http.NewRequest("GET", computeMetadataURL, nil) if err != nil { return nil, "", err } req.Header.Add("Metadata-Flavor", "Google") resp, err := defaultClient.Do(req) if err != nil { return nil, "", err } defer resp.Body.Close() if resp.StatusCode == http.StatusNotFound { return nil, "", err } if resp.StatusCode != http.StatusOK { return nil, "", err } all, err := ioutil.ReadAll(resp.Body) if err != nil { return nil, "", err } return all, resp.Header.Get("Etag"), nil } // WatchConfig looks for changes in metadata keys. Upon receiving successful response, // it create a new agent config. func WatchConfig(ctx context.Context) error { var md []byte var webError error // Max watch time, after this WatchConfig will return. timeout := time.After(osConfigWatchConfigTimeout) // Min watch loop time. loopTicker := time.NewTicker(5 * time.Second) defer loopTicker.Stop() eTag := lEtag.get() webErrorCount := 0 unmarshalErrorCount := 0 for { md, eTag, webError = getMetadata(fmt.Sprintf("?recursive=true&alt=json&wait_for_change=true&last_etag=%s&timeout_sec=%d", lEtag.get(), osConfigMetadataPollTimeout)) if webError == nil && eTag != lEtag.get() { var metadataConfig metadataJSON if err := json.Unmarshal(md, &metadataConfig); err != nil { // Try up to three times (with 5s sleep) to get and unmarshal metadata. // Most unmarshal errors are transient read issues with the metadata server // so we should retry without logging the error. if unmarshalErrorCount >= 3 { return err } unmarshalErrorCount++ select { case <-timeout: return err case <-ctx.Done(): return nil case <-loopTicker.C: continue } } unmarshalErrorCount = 0 lEtag.set(eTag) newAgentConfig := createConfigFromMetadata(metadataConfig) agentConfigMx.Lock() if agentConfig.asSha256() != newAgentConfig.asSha256() { agentConfig = newAgentConfig agentConfigMx.Unlock() break } agentConfigMx.Unlock() } // Try up to 12 times (60s) to wait for slow network initialization, after // that resort to using defaults and returning the error. if webError != nil { if webErrorCount == 12 { return formatMetadataError(webError) } webErrorCount++ } select { case <-timeout: return webError case <-ctx.Done(): return nil case <-loopTicker.C: continue } } return webError } // LogFeatures logs the osconfig feature status. func LogFeatures(ctx context.Context) { clog.Infof(ctx, "OSConfig enabled features status:{GuestPolicies: %t, OSInventory: %t, PatchManagement: %t}.", GuestPoliciesEnabled(), OSInventoryEnabled(), TaskNotificationEnabled()) } // SvcPollInterval returns the frequency to poll the service. func SvcPollInterval() time.Duration { return time.Duration(getAgentConfig().osConfigPollInterval) * time.Minute } // SerialLogPort is the serial port to log to. func SerialLogPort() string { if runtime.GOOS == "windows" { return "COM1" } // Don't write directly to the serial port on Linux as syslog already writes there. return "" } // Debug sets the debug log verbosity. func Debug() bool { return *debug || getAgentConfig().debugEnabled } // Stdout flag. func Stdout() bool { return *stdout } // DisableLocalLogging flag. func DisableLocalLogging() bool { return *disableLocalLogging } // SvcEndpoint is the OS Config service endpoint. func SvcEndpoint() string
// ZypperRepoDir is the location of the zypper repo files. func ZypperRepoDir() string { return zypperRepoDir } // ZypperRepoFormat is the format of the zypper repo files. func ZypperRepoFormat() string { return filepath.Join(zypperRepoDir, "osconfig_managed_%s.repo") } // ZypperRepoFilePath is the location where the zypper repo file will be created. func ZypperRepoFilePath() string { return getAgentConfig().zypperRepoFilePath } // YumRepoDir is the location of the yum repo files. func YumRepoDir() string { return yumRepoDir } // YumRepoFormat is the format of the yum repo files. func YumRepoFormat() string { return filepath.Join(yumRepoDir, "osconfig_managed_%s.repo") } // YumRepoFilePath is the location where the yum repo file will be created. func YumRepoFilePath() string { return getAgentConfig().yumRepoFilePath } // AptRepoDir is the location of the apt repo files. func AptRepoDir() string { return aptRepoDir } // AptRepoFormat is the format of the apt repo files. func AptRepoFormat() string { return filepath.Join(aptRepoDir, "osconfig_managed_%s.list") } // AptRepoFilePath is the location where the apt repo file will be created. func AptRepoFilePath() string { return getAgentConfig().aptRepoFilePath } // GooGetRepoDir is the location of the googet repo files. func GooGetRepoDir() string { return googetRepoDir } // GooGetRepoFormat is the format of the googet repo files. func GooGetRepoFormat() string { return filepath.Join(googetRepoDir, "osconfig_managed_%s.repo") } // GooGetRepoFilePath is the location where the googet repo file will be created. func GooGetRepoFilePath() string { return getAgentConfig().googetRepoFilePath } // OSInventoryEnabled indicates whether OSInventory should be enabled. func OSInventoryEnabled() bool { return getAgentConfig().osInventoryEnabled } // GuestPoliciesEnabled indicates whether GuestPolicies should be enabled. func GuestPoliciesEnabled() bool { return getAgentConfig().guestPoliciesEnabled } // TaskNotificationEnabled indicates whether TaskNotification should be enabled. func TaskNotificationEnabled() bool { return getAgentConfig().taskNotificationEnabled } // Instance is the URI of the instance the agent is running on. func Instance() string { // Zone contains 'projects/project-id/zones' as a prefix. return fmt.Sprintf("%s/instances/%s", Zone(), Name()) } // NumericProjectID is the numeric project ID of the instance. func NumericProjectID() int64 { return getAgentConfig().numericProjectID } // ProjectID is the project ID of the instance. func ProjectID() string { return getAgentConfig().projectID } // Zone is the zone the instance is running in. func Zone() string { return getAgentConfig().instanceZone } // Name is the instance name. func Name() string { return getAgentConfig().instanceName } // ID is the instance id. func ID() string { return getAgentConfig().instanceID } type idToken struct { exp *time.Time raw string sync.Mutex } func (t *idToken) get() error { data, err := metadata.Get(IdentityTokenPath) if err != nil { return fmt.Errorf("error getting token from metadata: %w", err) } cs, err := jws.Decode(data) if err != nil { return err } t.raw = data exp := time.Unix(cs.Exp, 0) t.exp = &exp return nil } var identity idToken // IDToken is the instance id token. func IDToken() (string, error) { identity.Lock() defer identity.Unlock() // Rerequest token if expiry is within 10 minutes. if identity.exp == nil || time.Now().After(identity.exp.Add(-10*time.Minute)) { if err := identity.get(); err != nil { return "", err } } return identity.raw, nil } // Version is the agent version. func Version() string { return version } // SetVersion sets the agent version. func SetVersion(v string) { version = v } // Capabilities returns the agents capabilities. func Capabilities() []string { return capabilities } // TaskStateFile is the location of the task state file. func TaskStateFile() string { if runtime.GOOS == "windows" { return taskStateFileWindows } return taskStateFileLinux } // OldTaskStateFile is the location of the task state file. func OldTaskStateFile() string { return oldTaskStateFileLinux } // RestartFile is the location of the restart required file. func RestartFile() string { if runtime.GOOS == "windows" { return restartFileWindows } return restartFileLinux } // OldRestartFile is the location of the restart required file. func OldRestartFile() string { return oldRestartFileLinux } // CacheDir is the location of the cache directory. func CacheDir() string { if runtime.GOOS == "windows" { return cacheDirWindows } return cacheDirLinux } // UserAgent for creating http/grpc clients. func UserAgent() string { return "google-osconfig-agent/" + Version() } // DisableInventoryWrite returns true if the DisableInventoryWrite setting is set. func DisableInventoryWrite() bool { return strings.EqualFold(disableInventoryWrite, "true") || disableInventoryWrite == "1" } // FreeOSMemory returns true if the FreeOSMemory setting is set. func FreeOSMemory() bool { return strings.EqualFold(freeOSMemory, "true") || freeOSMemory == "1" }
{ return getAgentConfig().svcEndpoint }
mysqllayer.go
/* Copyright AppsCode Inc. and Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package v1alpha1 import ( "context" "time" v1alpha1 "kubeform.dev/provider-aws-api/apis/opsworks/v1alpha1" scheme "kubeform.dev/provider-aws-api/client/clientset/versioned/scheme" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" rest "k8s.io/client-go/rest" ) // MysqlLayersGetter has a method to return a MysqlLayerInterface. // A group's client should implement this interface. type MysqlLayersGetter interface { MysqlLayers(namespace string) MysqlLayerInterface } // MysqlLayerInterface has methods to work with MysqlLayer resources. type MysqlLayerInterface interface { Create(ctx context.Context, mysqlLayer *v1alpha1.MysqlLayer, opts v1.CreateOptions) (*v1alpha1.MysqlLayer, error) Update(ctx context.Context, mysqlLayer *v1alpha1.MysqlLayer, opts v1.UpdateOptions) (*v1alpha1.MysqlLayer, error) UpdateStatus(ctx context.Context, mysqlLayer *v1alpha1.MysqlLayer, opts v1.UpdateOptions) (*v1alpha1.MysqlLayer, error) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error Get(ctx context.Context, name string, opts v1.GetOptions) (*v1alpha1.MysqlLayer, error) List(ctx context.Context, opts v1.ListOptions) (*v1alpha1.MysqlLayerList, error) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.MysqlLayer, err error) MysqlLayerExpansion } // mysqlLayers implements MysqlLayerInterface type mysqlLayers struct { client rest.Interface ns string } // newMysqlLayers returns a MysqlLayers func newMysqlLayers(c *OpsworksV1alpha1Client, namespace string) *mysqlLayers { return &mysqlLayers{ client: c.RESTClient(), ns: namespace, } } // Get takes name of the mysqlLayer, and returns the corresponding mysqlLayer object, and an error if there is any. func (c *mysqlLayers) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha1.MysqlLayer, err error) { result = &v1alpha1.MysqlLayer{} err = c.client.Get(). Namespace(c.ns). Resource("mysqllayers"). Name(name). VersionedParams(&options, scheme.ParameterCodec). Do(ctx). Into(result) return } // List takes label and field selectors, and returns the list of MysqlLayers that match those selectors. func (c *mysqlLayers) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha1.MysqlLayerList, err error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } result = &v1alpha1.MysqlLayerList{} err = c.client.Get(). Namespace(c.ns). Resource("mysqllayers"). VersionedParams(&opts, scheme.ParameterCodec). Timeout(timeout). Do(ctx). Into(result) return } // Watch returns a watch.Interface that watches the requested mysqlLayers. func (c *mysqlLayers) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } opts.Watch = true return c.client.Get(). Namespace(c.ns). Resource("mysqllayers"). VersionedParams(&opts, scheme.ParameterCodec). Timeout(timeout). Watch(ctx) } // Create takes the representation of a mysqlLayer and creates it. Returns the server's representation of the mysqlLayer, and an error, if there is any. func (c *mysqlLayers) Create(ctx context.Context, mysqlLayer *v1alpha1.MysqlLayer, opts v1.CreateOptions) (result *v1alpha1.MysqlLayer, err error) { result = &v1alpha1.MysqlLayer{} err = c.client.Post(). Namespace(c.ns). Resource("mysqllayers"). VersionedParams(&opts, scheme.ParameterCodec). Body(mysqlLayer). Do(ctx). Into(result) return } // Update takes the representation of a mysqlLayer and updates it. Returns the server's representation of the mysqlLayer, and an error, if there is any. func (c *mysqlLayers) Update(ctx context.Context, mysqlLayer *v1alpha1.MysqlLayer, opts v1.UpdateOptions) (result *v1alpha1.MysqlLayer, err error) {
err = c.client.Put(). Namespace(c.ns). Resource("mysqllayers"). Name(mysqlLayer.Name). VersionedParams(&opts, scheme.ParameterCodec). Body(mysqlLayer). Do(ctx). Into(result) return } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *mysqlLayers) UpdateStatus(ctx context.Context, mysqlLayer *v1alpha1.MysqlLayer, opts v1.UpdateOptions) (result *v1alpha1.MysqlLayer, err error) { result = &v1alpha1.MysqlLayer{} err = c.client.Put(). Namespace(c.ns). Resource("mysqllayers"). Name(mysqlLayer.Name). SubResource("status"). VersionedParams(&opts, scheme.ParameterCodec). Body(mysqlLayer). Do(ctx). Into(result) return } // Delete takes name of the mysqlLayer and deletes it. Returns an error if one occurs. func (c *mysqlLayers) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("mysqllayers"). Name(name). Body(&opts). Do(ctx). Error() } // DeleteCollection deletes a collection of objects. func (c *mysqlLayers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error { var timeout time.Duration if listOpts.TimeoutSeconds != nil { timeout = time.Duration(*listOpts.TimeoutSeconds) * time.Second } return c.client.Delete(). Namespace(c.ns). Resource("mysqllayers"). VersionedParams(&listOpts, scheme.ParameterCodec). Timeout(timeout). Body(&opts). Do(ctx). Error() } // Patch applies the patch and returns the patched mysqlLayer. func (c *mysqlLayers) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha1.MysqlLayer, err error) { result = &v1alpha1.MysqlLayer{} err = c.client.Patch(pt). Namespace(c.ns). Resource("mysqllayers"). Name(name). SubResource(subresources...). VersionedParams(&opts, scheme.ParameterCodec). Body(data). Do(ctx). Into(result) return }
result = &v1alpha1.MysqlLayer{}
actions_not_implemented.go
package horizon
// NotImplementedAction renders a NotImplemented prblem type NotImplementedAction struct { Action } // JSON is a method for actions.JSON func (action *NotImplementedAction) JSON() { problem.Render(action.Ctx, action.W, problem.NotImplemented) }
import "bitbucket.org/atticlab/horizon/render/problem"
ISaveManager.ts
import { ISaveHelp } from "./ISaveHelp"; /** * 存储管理接口 */ export interface ISaveManager { /** * 存储数据个数 */ readonly count: number; /** * 设置存储辅助器 * @param saveHelp */ setSaveHelp(saveHelp: ISaveHelp): void; /** * 存储数字类型 * @param name 存储名 * @param value 存储值
setNumber(name: string, value: number): void; /** * 获取数字类型的存储 * @param name 存储名 * @param defaultValue 存储默认值 * @returns 数字类型的存储或者数字默认值 */ getNumber(name: string, defaultValue?: number): number | null; /** * 存储字符串类型 * @param name 存储名 * @param value 存储值 */ setString(name: string, value: string): void; /** * 获取字符串类型的存储 * @param name 存储名 * @param defaultValue 存储默认值 * @returns 字符串类型的存储或者字符串默认值 */ getString(name: string, defaultValue?: string): string | null; /** * 存储对象类型 * @param name 存储名 * @param value 存储值 */ setObject(name: string, value: object): void; /** * 获取对象类型的存储 * @param name 存储名 * @param defaultValue 存储默认值 * @returns 对象类型的存储或者对象默认值 */ getObject(name: string, defaultValue?: object): object | null; /** * 删除存储数据 * @param name 存储名 */ deleteData(name: string): void; /** * 清空存储数据 */ clear(): void; /** * 遍历存储数据 * @param callbackfn * @param thisArg */ forEach(callbackfn: (name: string, value: string) => void, thisArg?: any): void; }
*/
NoteListScreen.js
import React from 'react'; import { Button, FlatList, Platform, StyleSheet, Text, TouchableHighlight, TouchableNativeFeedback, TouchableOpacity, View } from 'react-native'; import ActionButton from 'react-native-action-button'; import Swipeout from 'react-native-swipeout'; import Loading from './Loading'; import theme from '../theme'; import uuid from 'uuid'; // BEGIN-REDUX import { connect } from 'react-redux'; import actions from '../redux/actions'; // END-REDUX // Platform-dependent Touchable component const Touchable = (Platform.OS === 'android') ? TouchableNativeFeedback : TouchableHighlight; // Stylesheet for the NoteList component const styles = StyleSheet.create({ addItemButton: { fontSize: 28, color: '#C769FF', }, iosAddItemIcon: { fontSize: 24, color: '#C769FF', marginRight: 16 }, container: { backgroundColor: 'white', flex: 1 }, flatlistitem: { backgroundColor: 'white', borderBottomWidth: 1, borderBottomColor: '#F0F0F0', paddingBottom: 0 } }); // Stylesheet for the individual note items in the note list. const noteItemStyles = StyleSheet.create({ container: { backgroundColor: 'white', flex: 1, flexDirection: 'column', height: 40, marginBottom: 32, marginLeft: 16, marginRight: 16, marginTop: 16 }, content: { color: '#C2BCC5', fontSize: 14 }, contentContainer: { flexBasis: 'auto' }, title: { color: '#353535', fontSize: 20 }, titleContainer: { flexGrow: 1 } }); /** * Component for displaying a textual icon that is touchable in the top header bar * * @param {Props} props properties for this component */ const HeaderButton = (props) => { return ( <TouchableOpacity onPress={(event) => props.onPress(event)}> <Text style={props.style}>{props.children}</Text> </TouchableOpacity> ); }; /** * Component for displaying an individual row of the NoteList * * @param {Props} props properties for this component */ const NoteListItem = (props) => { const onPress = props.onPress ? props.onPress : () => { /* Do Nothing */ }; return ( <Touchable onPress={onPress}> <View style={noteItemStyles.container}> <View style={noteItemStyles.titleContainer}> <Text numberOfLines={1} style={noteItemStyles.title}>{props.item.title}</Text> </View> <View style={noteItemStyles.contentContainer}> <Text numberOfLines={2} style={noteItemStyles.content}>{props.item.content}</Text> </View> </View> </Touchable> ); } /** * The Home Screen - this is a container component built on top of * the React Navigation system that is fed the list of notes to be * displayed * * @class NoteList * @extends {React.Component} */ class
extends React.Component { /** * Initial state for the component. The activeRow is the object that has an open * drawer for swiping. Only one drawer can be open at any time. It is null to * indicate no open drawer. */ state = { activeRow: null }; /** * Options for react-navigation * * @static * @memberof NoteList */ static navigationOptions = ({ navigation }) => { return { /*title: 'Notes',*/ headerStyle: { backgroundColor: theme.headerBackgroundColor }, headerTintColor: theme.headerForegroundColor, headerRight: (Platform.OS === 'ios') ? <HeaderButton style={styles.iosAddItemIcon} onPress={() => NoteListScreen.onAddNote(navigation.navigate)}>+</HeaderButton> : false } }; /** * This has to be a static method because it is called in two places - by the floating * action button on Android and by the navigation options on iOS. * @param {Function} navigate method to call to navigate to a new screen * @memberof NoteList */ static onAddNote(navigate) { navigate('details', { noteId: uuid.v4() }); } /** * Event handler called when the user swipes-left. * * @param {any} item the item that was swiped * @param {any} rowId the rowId of the item that was swiped * @param {any} dir the direction that was swiped * @memberof NoteList */ onSwipeOpen(item, rowId, dir) { this.setState({ activeRow: item.noteId }); } /** * Event handler called when the system closes the swipe-drawer (either * because the user clicked elsewhere or the item was deleted) * * @param {any} item the item that was swiped * @param {any} rowId the rowId of the item that was swiped * @param {any} dir the direction that was swiped * @memberof NoteList */ onSwipeClose(item, rowId, dir) { if (item.noteId === this.state.activeRow && typeof dir !== 'undefined') { this.setState({ activeRow: null }); } } /** * Event handler called when a user tries to press a note. * * @param {String} noteId the id of the note to be selected * @memberof NoteList */ onViewNote(item) { const { navigate } = this.props.navigation; navigate('details', { noteId: item.noteId }); } /** * Event handler called when a user tries to delete a note. * * @param {String} noteId the id of the note to be deleted * @memberof NoteList */ onDeleteNote(item) { this.props.deleteNote(item.noteId); } /** * Renders a single element in the list * @param {Object} item the item to be rendered * @param {number} index the rowId of the item to be rendered * @returns {JSX.Element} the rendered list element * @memberof NoteList */ renderItem(item, index) { const swipeSettings = { autoClose: true, close: item.noteId !== this.state.activeRow, onClose: (secId, rowId, dir) => this.onSwipeClose(item, rowId, dir), onOpen: (secId, rowId, dir) => this.onSwipeOpen(item, rowId, dir), right: [ { onPress: () => this.onDeleteNote(item), text: 'Delete', type: 'delete' } ], rowId: index, sectionId: 1, style: styles.flatlistitem }; return ( <Swipeout {...swipeSettings}> <NoteListItem item={item} onPress={() => this.onViewNote(item)}/> </Swipeout> ); } /** * Part of the React lifecycle that actually renders the component. * * @returns {JSX.Element} a component tree rendered in JSX * @memberof NoteList */ render() { const params = { noteList: { data: this.props.notes, extraData: this.state.activeRow, keyExtractor: (item) => item.noteId, renderItem: ({ item, index }) => this.renderItem(item, index) }, actionButton: { buttonColor: theme.actionButtonColor, onPress: () => NoteListScreen.onAddNote(this.props.navigation.navigate) } } if (this.props.loading) { return <Loading/>; } return ( <View style={styles.container}> <FlatList {...params.noteList} /> {(Platform.OS === 'android') && <ActionButton {...params.actionButton} />} </View> ); } } // BEGIN-REDUX /** * Maps the redux store state to properties required by this container * component. In this case, we only want to see the records that are * not deleted. * * @param {Object} state the redux store state */ const mapStateToProps = (state) => { return { notes: state.notes }; }; /** * Maps the dispatch method to dispatch the appropriate actions based * on the events that will be generated by this container component. * * @param {Function} dispatch the dispatcher from redux */ const mapDispatchToProps = (dispatch) => { return { deleteNote: (noteId) => dispatch(actions.notes.deleteNote({ noteId })) }; }; const NoteListScreen = connect(mapStateToProps, mapDispatchToProps)(NoteList); // END-REDUX export default NoteListScreen;
NoteList
scontext.go
package scontext import ( "context" "errors" "reflect" ) // 由于请求的上下文信息的 thrift 定义在 util 项目中,本模块主要为了避免循环依赖 const ( ContextKeyTraceID = "traceID" ContextKeyHead = "Head" ContextKeyHeadUid = "uid" ContextKeyHeadSource = "source" ContextKeyHeadIp = "ip" ContextKeyHeadRegion = "region" ContextKeyHeadDt = "dt" ContextKeyHeadUnionId = "unionid" ContextKeyControl = "Control" ) const DefaultGroup = "" var ErrInvalidContext = errors.New("invalid context") type ContextHeader interface { ToKV() map[string]interface{} } type ContextControlRouter interface { GetControlRouteGroup() (string, bool) SetControlRouteGroup(string) error } type ContextControlCaller interface { GetControlCallerServerName() (string, bool) SetControlCallerServerName(string) error GetControlCallerServerId() (string, bool) SetControlCallerServerId(string) error GetControlCallerMethod() (string, bool) SetControlCallerMethod(string) error } func GetControlRouteGroup(ctx context.Context) (group string, ok bool) { value := ctx.Value(ContextKeyControl) if isNil(value) { ok = false return } control, ok := value.(ContextControlRouter) if ok == false { return } return control.GetControlRouteGroup() } func SetControlRouteGroup(ctx context.Context, group string) (context.Context, error) { value := ctx.Value(ContextKeyControl) if isNil(value) { return ctx, ErrInvalidContext } control, ok := value.(ContextControlRouter) if !ok { return ctx, ErrInvalidContext } err := control.SetControlRouteGroup(group) if err != nil { return ctx, err } return context.WithValue(ctx, ContextKeyControl, control), nil } func GetControlRouteGroupWithDefault(ctx context.Context, dv string) string { if group, ok := GetControlRouteGroup(ctx); ok { return group } return dv } func getHeaderByKey(ctx context.Context, key string) (val interface{}, ok bool) { head := ctx.Value(ContextKeyHead) if isNil(head) { ok = false return } var header ContextHeader if header, ok = head.(ContextHeader); ok { val, ok = header.ToKV()[key] } return } func GetUid(ctx context.Context) (uid int64, ok bool) { val, ok := getHeaderByKey(ctx, ContextKeyHeadUid) if ok { uid, ok = val.(int64) } return } func GetSource(ctx context.Context) (source int32, ok bool) { val, ok := getHeaderByKey(ctx, ContextKeyHeadSource) if ok { source, ok = val.(int32) } return } func GetIp(ctx context.Context) (ip string, ok bool) { val, ok := getHeaderByKey(ctx, ContextKeyHeadIp) if ok { ip, ok = val.(string) } return } func GetRegion(ctx context.Context) (region string, ok bool) { val, ok := getHeaderByKey(ctx, ContextKeyHeadRegion) if ok { region, ok = val.(string) } return } func GetDt(ctx context.Context) (dt int32, ok bool) { val, ok := getHeaderByKey(ctx, ContextKeyHeadDt) if ok { dt, ok = val.(int32) } return } func GetUnionId(ctx context.Context) (unionId string, ok bool) { val, ok := getHeaderByKey(ctx, ContextKeyHeadUnionId) if ok { unionId, ok = val.(string) } return } func getControlCaller(ctx context.Context) (ContextControlCaller, error) { value := ctx.Value(ContextKeyControl) if isNil(value) {
Name string, ok bool) { caller, ok := ctx.Value(ContextKeyControl).(ContextControlCaller) if !ok { return } return caller.GetControlCallerServerName() } func SetControlCallerServerName(ctx context.Context, serverName string) (context.Context, error) { caller, err := getControlCaller(ctx) if err != nil { return ctx, err } err = caller.SetControlCallerServerName(serverName) if err != nil { return ctx, err } return context.WithValue(ctx, ContextKeyControl, caller), nil } func GetControlCallerServerId(ctx context.Context) (serverId string, ok bool) { caller, ok := ctx.Value(ContextKeyControl).(ContextControlCaller) if !ok { return } return caller.GetControlCallerServerId() } func SetControlCallerServerId(ctx context.Context, serverId string) (context.Context, error) { caller, err := getControlCaller(ctx) if err != nil { return ctx, err } err = caller.SetControlCallerServerId(serverId) return context.WithValue(ctx, ContextKeyControl, caller), nil } func GetControlCallerMethod(ctx context.Context) (method string, ok bool) { caller, ok := ctx.Value(ContextKeyControl).(ContextControlCaller) if !ok { return } return caller.GetControlCallerMethod() } func SetControlCallerMethod(ctx context.Context, method string) (context.Context, error) { caller, err := getControlCaller(ctx) if err != nil { return ctx, err } err = caller.SetControlCallerMethod(method) if err != nil { return ctx, err } return context.WithValue(ctx, ContextKeyControl, caller), nil } // 判断是否为空指针 func isNil(i interface{}) bool { vi := reflect.ValueOf(i) if vi.Kind() == reflect.Ptr { return vi.IsNil() } return false }
return nil, ErrInvalidContext } caller, ok := value.(ContextControlCaller) if !ok { return nil, ErrInvalidContext } return caller, nil } func GetControlCallerServerName(ctx context.Context) (server
store.js
import {createStore} from 'redux'; import reducer from './reducer'
export default store
const store=createStore(reducer);