file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
views.py | from django.views.generic import View
from django.http import HttpResponse
from django.conf import settings
import os
class | (View):
def get(self, request):
try:
with open(os.path.join(str(settings.ROOT_DIR), 'frontend', 'build', 'index.html')) as file:
return HttpResponse(file.read())
except:
return HttpResponse(
"""
index.html not found ! build your React App!
""",
status=501,
)
| ReactAppView |
primitive_types3.rs | // primitive_types3.rs
// Create an array with at least 100 elements in it where the ??? is.
// Execute `rustlings hint primitive_types3` for hints!
fn main() | {
let a = [0; 100];
if a.len() >= 100 {
println!("Wow, that's a big array!");
} else {
println!("Meh, I eat arrays like that for breakfast.");
}
} |
|
Checkmark.js | import React from "react"
const Checkmark = () => {
return (
<span class="CheckmarkIcon">
| }
export default Checkmark | <svg xmlns="http://www.w3.org/2000/svg" width="78.369" height="78.369" viewBox="0 0 78.369 78.369"><path fill="#4c4c4c" d="M78.049 19.015L29.458 67.606a1.094 1.094 0 0 1-1.548 0L.32 40.015a1.094 1.094 0 0 1 0-1.547l6.704-6.704a1.095 1.095 0 0 1 1.548 0l20.113 20.112 41.113-41.113a1.095 1.095 0 0 1 1.548 0l6.703 6.704a1.094 1.094 0 0 1 0 1.548z"></path></svg>
</span>
)
|
production.py | from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env("DJANGO_SECRET_KEY")
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["penny-wise.men"])
# DATABASES
# ------------------------------------------------------------------------------
DATABASES["default"] = env.db("DATABASE_URL") # noqa F405
DATABASES["default"]["ATOMIC_REQUESTS"] = True # noqa F405
DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60) # noqa F405
# CACHES
# ------------------------------------------------------------------------------
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": env("REDIS_URL"),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
# Mimicing memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
"IGNORE_EXCEPTIONS": True,
},
}
}
# SECURITY
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-ssl-redirect
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure
SESSION_COOKIE_SECURE = True
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure
CSRF_COOKIE_SECURE = True
# https://docs.djangoproject.com/en/dev/topics/security/#ssl-https
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-seconds
# TODO: set this to 60 seconds first and then to 518400 once you prove the former works
SECURE_HSTS_SECONDS = 60
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True
)
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload
SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True)
# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True
)
# STORAGES
# ------------------------------------------------------------------------------
# https://django-storages.readthedocs.io/en/latest/#installation
INSTALLED_APPS += ["storages"] # noqa F405
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = env("DJANGO_AWS_ACCESS_KEY_ID")
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_SECRET_ACCESS_KEY = env("DJANGO_AWS_SECRET_ACCESS_KEY")
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_STORAGE_BUCKET_NAME = env("DJANGO_AWS_STORAGE_BUCKET_NAME")
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_QUERYSTRING_AUTH = False
# DO NOT change these unless you know what you're doing.
_AWS_EXPIRY = 60 * 60 * 24 * 7
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_S3_OBJECT_PARAMETERS = {
"CacheControl": f"max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate"
}
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
AWS_DEFAULT_ACL = None
# STATIC
# ------------------------
STATICFILES_STORAGE = "config.settings.production.StaticRootS3Boto3Storage"
STATIC_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/static/"
# MEDIA
# ------------------------------------------------------------------------------
# region http://stackoverflow.com/questions/10390244/
# Full-fledge class: https://stackoverflow.com/a/18046120/104731
from storages.backends.s3boto3 import S3Boto3Storage # noqa E402
class StaticRootS3Boto3Storage(S3Boto3Storage):
location = "static"
class | (S3Boto3Storage):
location = "media"
file_overwrite = False
# endregion
DEFAULT_FILE_STORAGE = "config.settings.production.MediaRootS3Boto3Storage"
MEDIA_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/media/"
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email
DEFAULT_FROM_EMAIL = env(
"DJANGO_DEFAULT_FROM_EMAIL", default="Penny Wise <[email protected]>"
)
# https://docs.djangoproject.com/en/dev/ref/settings/#server-email
SERVER_EMAIL = env("DJANGO_SERVER_EMAIL", default=DEFAULT_FROM_EMAIL)
# https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = env(
"DJANGO_EMAIL_SUBJECT_PREFIX", default="[Penny Wise]"
)
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL regex.
ADMIN_URL = env("DJANGO_ADMIN_URL")
# Anymail (Mailgun)
# ------------------------------------------------------------------------------
# https://anymail.readthedocs.io/en/stable/installation/#installing-anymail
INSTALLED_APPS += ["anymail"] # noqa F405
EMAIL_BACKEND = "anymail.backends.mailgun.EmailBackend"
# https://anymail.readthedocs.io/en/stable/installation/#anymail-settings-reference
ANYMAIL = {
"MAILGUN_API_KEY": env("MAILGUN_API_KEY"),
"MAILGUN_SENDER_DOMAIN": env("MAILGUN_DOMAIN"),
}
# Gunicorn
# ------------------------------------------------------------------------------
INSTALLED_APPS += ["gunicorn"] # noqa F405
# Collectfast
# ------------------------------------------------------------------------------
# https://github.com/antonagestam/collectfast#installation
INSTALLED_APPS = ["collectfast"] + INSTALLED_APPS # noqa F405
AWS_PRELOAD_METADATA = True
# LOGGING
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#logging
# See https://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}},
"formatters": {
"verbose": {
"format": "%(levelname)s %(asctime)s %(module)s "
"%(process)d %(thread)d %(message)s"
}
},
"handlers": {
"mail_admins": {
"level": "ERROR",
"filters": ["require_debug_false"],
"class": "django.utils.log.AdminEmailHandler",
},
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
},
},
"loggers": {
"django.request": {
"handlers": ["mail_admins"],
"level": "ERROR",
"propagate": True,
},
"django.security.DisallowedHost": {
"level": "ERROR",
"handlers": ["console", "mail_admins"],
"propagate": True,
},
},
}
# Your stuff...
# ------------------------------------------------------------------------------
| MediaRootS3Boto3Storage |
endpoint.rs | use crate::api_resolve::{Metadata, ProtocolHint};
use crate::app::dst::{DstAddr, Route};
use crate::app::L5D_REQUIRE_ID;
use crate::proxy::http::{identity_from_header, settings};
use crate::proxy::Source;
use crate::transport::{connect, tls};
use crate::{identity, tap};
use crate::{Conditional, NameAddr};
use indexmap::IndexMap;
use linkerd2_proxy_resolve::map_endpoint::MapEndpoint;
use std::net::SocketAddr;
use std::sync::Arc;
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Endpoint {
pub dst_logical: Option<NameAddr>,
pub dst_concrete: Option<NameAddr>,
pub addr: SocketAddr,
pub identity: tls::PeerIdentity,
pub metadata: Metadata,
pub http_settings: settings::Settings,
}
#[derive(Copy, Clone, Debug)]
pub struct FromMetadata;
impl Endpoint {
pub fn can_use_orig_proto(&self) -> bool {
match self.metadata.protocol_hint() {
ProtocolHint::Unknown => return false,
ProtocolHint::Http2 => (),
}
match self.http_settings {
settings::Settings::Http2 => false,
settings::Settings::Http1 {
keep_alive: _,
wants_h1_upgrade,
was_absolute_form: _,
} => !wants_h1_upgrade,
settings::Settings::NotHttp => {
unreachable!(
"Endpoint::can_use_orig_proto called when NotHttp: {:?}",
self,
);
}
}
}
pub fn from_request<B>(req: &http::Request<B>) -> Option<Self> {
let addr = req.extensions().get::<Source>()?.orig_dst_if_not_local()?;
let http_settings = settings::Settings::from_request(req);
let identity = match identity_from_header(req, L5D_REQUIRE_ID) {
Some(require_id) => Conditional::Some(require_id),
None => {
Conditional::None(tls::ReasonForNoPeerName::NotProvidedByServiceDiscovery.into())
}
};
Some(Self {
addr,
dst_logical: None,
dst_concrete: None,
identity,
metadata: Metadata::empty(),
http_settings,
})
}
}
impl From<SocketAddr> for Endpoint {
fn from(addr: SocketAddr) -> Self {
Self {
addr,
dst_logical: None,
dst_concrete: None,
identity: Conditional::None(tls::ReasonForNoPeerName::NotHttp.into()),
metadata: Metadata::empty(),
http_settings: settings::Settings::NotHttp,
}
}
}
impl std::fmt::Display for Endpoint {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.addr.fmt(f)
}
}
impl std::hash::Hash for Endpoint {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.dst_logical.hash(state);
self.dst_concrete.hash(state);
self.addr.hash(state);
self.identity.hash(state);
self.http_settings.hash(state);
// Ignore metadata.
}
}
impl tls::HasPeerIdentity for Endpoint {
fn peer_identity(&self) -> tls::PeerIdentity {
self.identity.clone()
}
}
impl connect::HasPeerAddr for Endpoint {
fn peer_addr(&self) -> SocketAddr {
self.addr
}
}
impl settings::HasSettings for Endpoint {
fn http_settings(&self) -> &settings::Settings {
&self.http_settings
}
}
impl tap::Inspect for Endpoint {
fn src_addr<B>(&self, req: &http::Request<B>) -> Option<SocketAddr> {
req.extensions().get::<Source>().map(|s| s.remote)
}
fn src_tls<'a, B>(
&self,
_: &'a http::Request<B>,
) -> Conditional<&'a identity::Name, tls::ReasonForNoIdentity> {
Conditional::None(tls::ReasonForNoPeerName::Loopback.into())
}
fn dst_addr<B>(&self, _: &http::Request<B>) -> Option<SocketAddr> {
Some(self.addr)
}
fn dst_labels<B>(&self, _: &http::Request<B>) -> Option<&IndexMap<String, String>> {
Some(self.metadata.labels())
}
fn dst_tls<B>(
&self,
_: &http::Request<B>,
) -> Conditional<&identity::Name, tls::ReasonForNoIdentity> {
self.identity.as_ref()
}
fn route_labels<B>(&self, req: &http::Request<B>) -> Option<Arc<IndexMap<String, String>>> {
req.extensions().get::<Route>().map(|r| r.labels().clone())
}
fn is_outbound<B>(&self, _: &http::Request<B>) -> bool |
}
impl MapEndpoint<DstAddr, Metadata> for FromMetadata {
type Out = Endpoint;
fn map_endpoint(&self, target: &DstAddr, addr: SocketAddr, metadata: Metadata) -> Endpoint {
let identity = metadata
.identity()
.cloned()
.map(Conditional::Some)
.unwrap_or_else(|| {
Conditional::None(tls::ReasonForNoPeerName::NotProvidedByServiceDiscovery.into())
});
Endpoint {
addr,
identity,
metadata,
dst_logical: target.dst_logical().name_addr().cloned(),
dst_concrete: target.dst_concrete().name_addr().cloned(),
http_settings: target.http_settings.clone(),
}
}
}
| {
true
} |
mdparser.py | # -*- coding: utf-8 -*-
# Markdown parsers.
#
#
# Author: Moogen Tian <http://blog.galeo.me>
#
# Legal:
#
# This file is published under BSD License.
#
# And the code structure references:
#
# * pagewise (by ainm <ainm at gmx.com>, with personal public license)
#
# * mynt (by Andrew Fricke, the author of Hoep, with BSD license)
#
# please NOTICE that!
#
# Hoep only accepts and returns *unicode* objects in Python 2 and
# *str* objects in Python 3.
from __future__ import unicode_literals
import re
import sys
#
# Error handling.
#
class MDParserException(Exception):
pass
def error(message, *args):
"""
Raise a MDParserException with a given message.
"""
raise MDParserException(message % args)
def warning(message, *args):
"""
Just display a message to standard error.
"""
sys.stderr.write("WARNING: " + message % args)
def halt(message, *args):
"""
Display a message to standard error and stop the program.
"""
sys.stderr.write("FATAL: " + message % args)
sys.exit(1)
#
# Markup support.
#
# Tables with bootstrap
def tablestrap(content, class_=''):
if class_:
class_ = class_.split()
if isinstance(class_, list):
if 'table' not in class_:
class_ = ['table'] + class_
class_ = ' '.join(class_)
if class_:
class_ = 'class="%s"' % class_
return ''.join(['<table ', class_, '>\n',
content, '\n</table>'])
# Pygments.
HAVE_PYGMENTS = True
try:
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import get_lexer_by_name
except ImportError:
HAVE_PYGMENTS = False
def require_pygments():
"""
For error reporting when trying to use a markup language
with pygments, but pygments isn't installed.
"""
if not HAVE_PYGMENTS:
error("please, install Pygments <http://pygments.org/>.")
def hl_with_pygments(text, lang, fmt_options={}):
s = ''
formatter = HtmlFormatter(**fmt_options)
try:
lexer = get_lexer_by_name(lang, stripall=True)
except ValueError:
s = '<div class="highlight"><span class="err">'\
'Error: language "%s" is not supported</span></div>' % lang
lexer = get_lexer_by_name('text', stripall=True)
return ''.join([s, highlight(text, lexer, formatter)])
# Available renderers will add themselves to this hash.
# The key is the renderer name, the value is another hash
# with two keys/values, the renderer constructor/options.
MARKUP_RENDERERS = {}
def xlate_exts_flags(exts_flags_opts, parser_exts_flags):
actual_exts = 0
actual_flags = 0
exts = exts_flags_opts['extensions']
flags = exts_flags_opts['render_flags']
parser_exts = parser_exts_flags['extensions']
parser_flags = parser_exts_flags['render_flags']
if ('fenced_code' in exts) or ('tables' in exts):
require_pygments()
for ext in exts:
if ext in parser_exts:
actual_exts |= parser_exts[ext]
else:
warning("ignoring unknown extension: %s", str(ext))
for flag in flags:
if flag in parser_flags:
actual_flags |= parser_flags[flag]
else:
warning("ignoring unknown render flag: %s", str(flag))
return actual_exts, actual_flags
#
# Misaka.
#
HAVE_MISAKA = True
try:
import misaka
from misaka import HtmlRenderer
MISAKA_EXTS_FLAGS = {
'extensions': {
'tables': misaka.EXT_TABLES,
'fenced_code': misaka.EXT_FENCED_CODE,
'footnotes': misaka.EXT_FOOTNOTES,
'autolink': misaka.EXT_AUTOLINK,
'strikethrough': misaka.EXT_STRIKETHROUGH,
'underline': misaka.EXT_UNDERLINE,
'highlight': misaka.EXT_HIGHLIGHT,
'quote': misaka.EXT_QUOTE,
'superscript': misaka.EXT_SUPERSCRIPT,
'math': misaka.EXT_MATH,
'no_intra_emphasis': misaka.EXT_NO_INTRA_EMPHASIS,
'space_headers': misaka.EXT_SPACE_HEADERS,
'math_explicit': misaka.EXT_MATH_EXPLICIT,
'disable_indented_code': misaka.EXT_DISABLE_INDENTED_CODE
},
'render_flags': {
'skip_html': misaka.HTML_SKIP_HTML,
'escape': misaka.HTML_ESCAPE,
'hard_wrap': misaka.HTML_HARD_WRAP,
'use_xhtml': misaka.HTML_USE_XHTML,
}
}
class MisakaRenderer(HtmlRenderer):
def __init__(self, tbl_class='', fmt_options={}, *args, **kwargs):
super(MisakaRenderer, self).__init__(*args, **kwargs)
self.tbl_class = tbl_class
self.fmt_options = fmt_options
if HAVE_PYGMENTS:
def blockcode(self, text, lang):
return hl_with_pygments(text, lang, self.fmt_options)
def table(self, content):
return tablestrap(content, self.tbl_class)
def misaka_renderer(options, tbl_class='', fmt_options={}):
"""
Returns a function that can be used to transform Markdown to HTML
using Misaka, preconfigured with the given extensions/flags.
"""
Renderer = MisakaRenderer
used_exts, used_flags = xlate_exts_flags(options, MISAKA_EXTS_FLAGS)
return misaka.Markdown(Renderer(tbl_class, fmt_options, used_flags), used_exts)
MARKUP_RENDERERS['misaka'] = {
'renderer': misaka_renderer,
'options': ['extensions', 'render_flags'],
}
except ImportError:
HAVE_MISAKA = False
#
# hoep
#
HAVE_HOEP = True
try:
import hoep as h
HOEP_EXTS_FLAGS = {
'extensions': {
'autolink': h.EXT_AUTOLINK,
'disable_indented_code': h.EXT_DISABLE_INDENTED_CODE,
'fenced_code': h.EXT_FENCED_CODE,
'footnotes': h.EXT_FOOTNOTES,
'highlight': h.EXT_HIGHLIGHT,
'lax_spacing': h.EXT_LAX_SPACING,
'no_intra_emphasis': h.EXT_NO_INTRA_EMPHASIS,
'quote': h.EXT_QUOTE,
'space_headers': h.EXT_SPACE_HEADERS,
'strikethrough': h.EXT_STRIKETHROUGH,
'superscript': h.EXT_SUPERSCRIPT,
'tables': h.EXT_TABLES,
'underline': h.EXT_UNDERLINE
},
'render_flags': {
'escape': h.HTML_ESCAPE,
'expand_tabs': h.HTML_EXPAND_TABS,
'hard_wrap': h.HTML_HARD_WRAP,
'safelink': h.HTML_SAFELINK,
'skip_html': h.HTML_SKIP_HTML,
'skip_images': h.HTML_SKIP_IMAGES,
'skip_links': h.HTML_SKIP_LINKS,
'skip_style': h.HTML_SKIP_STYLE,
'smartypants': h.HTML_SMARTYPANTS,
'toc': h.HTML_TOC,
'use_xhtml': h.HTML_USE_XHTML
}
}
class HoepRenderer(h.Hoep):
def __init__(self, extensions=0, render_flags=0, tbl_class='',
fmt_options={}):
super(HoepRenderer, self).__init__(extensions, render_flags)
self._toc_ids = {}
self._toc_patterns = (
(r'<[^<]+?>', ''),
(r'[^a-z0-9_.\s-]', ''),
(r'\s+', '-'),
(r'^[^a-z]+', ''),
(r'^$', 'section')
)
self.tbl_class = tbl_class
self.fmt_options = fmt_options
if HAVE_PYGMENTS:
def block_code(self, text, lang):
"""Highlight code with pygments.
"""
return hl_with_pygments(text, lang, self.fmt_options)
def table(self, header, body):
content = header + body
return tablestrap(content, self.tbl_class)
def header(self, text, level):
if self.render_flags & h.HTML_TOC:
| else:
return '<h{0}>{1}</h{0}>'.format(level, text)
def preprocess(self, markdown):
self._toc_ids.clear()
return markdown
def hoep_renderer(options, **kwargs):
"""
Returns a function that can be used to transform Markdown to HTML
using Hoep, preconfigured with the given extensions/flags.
"""
used_exts, used_flags = xlate_exts_flags(options, HOEP_EXTS_FLAGS)
return HoepRenderer(used_exts, used_flags, **kwargs).render
MARKUP_RENDERERS['hoep'] = {
'renderer': hoep_renderer,
'options': ['extensions', 'render_flags']
}
except ImportError:
HAVE_HOEP = False
class MarkupProvider(object):
def __init__(self, markup, options):
"""
Arguments:
- `markup`: str, 'misaka' | 'hoep'.
- `options`: dict, has the keys: 'extensions' and 'render_flags'.
"""
if markup not in MARKUP_RENDERERS:
error("Unavailable markup renderer: %s", markup)
self.markup = markup
if ('extensions' not in options) and ('render_flags' not in options):
error("Key error in options, must contain 'extensions' and 'render_flags'.")
self.options = options
def _get_option(self, option, markup_options={}):
"""
Lookup 'option' in 'markup_options' (a dict)
but fall back to default option if unbound.
"""
if markup_options and (option in markup_options):
return markup_options[option]
else:
return self.options[option]
def get_renderer(self, markup_options={}, **kwargs):
"""
Will return a function to render the item content
based on the options specified in it. All unspecified
options will be taken from the base configuration.
"""
options = {}
for option in MARKUP_RENDERERS[self.markup]['options']:
options[option] = self._get_option(option, markup_options)
return MARKUP_RENDERERS[self.markup]['renderer'](options, **kwargs)
| identifier = text.lower()
for pattern, replace in self._toc_patterns:
identifier = re.sub(pattern, replace, identifier)
if identifier in self._toc_ids:
self._toc_ids[identifier] += 1
identifier = '{0}-{1}'.format(identifier, self._toc_ids[identifier])
else:
self._toc_ids[identifier] = 1
return ('<h{0} id="{1}">{2}'
'<a class="headerlink" href="#{1}" title="Link to header title.">¶</a>'
'</h{0}>').format(level, identifier, text)
|
bert_tokenizer.py | # coding=utf-8
# Copyright 2020 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Basic tokenization ops for BERT preprocessing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import string_ops
from tensorflow_text.python.ops import regex_split_ops
from tensorflow_text.python.ops.normalize_ops import case_fold_utf8
from tensorflow_text.python.ops.normalize_ops import normalize_utf8
from tensorflow_text.python.ops.tokenization import TokenizerWithOffsets
from tensorflow_text.python.ops.wordpiece_tokenizer import WordpieceTokenizer
_DELIM_REGEX = [
r"\s+",
r"|".join([
r"[!-/]",
r"[:-@]",
r"[\[-`]",
r"[{-~]",
r"[\p{P}]",
]),
r"|".join([
r"[\x{4E00}-\x{9FFF}]",
r"[\x{3400}-\x{4DBF}]",
r"[\x{20000}-\x{2A6DF}]",
r"[\x{2A700}-\x{2B73F}]",
r"[\x{2B740}-\x{2B81F}]",
r"[\x{2B820}-\x{2CEAF}]",
r"[\x{F900}-\x{FAFF}]",
r"[\x{2F800}-\x{2FA1F}]",
]),
]
_DELIM_REGEX_PATTERN = "|".join(_DELIM_REGEX)
_KEEP_DELIM_NO_WHITESPACE = copy.deepcopy(_DELIM_REGEX)
_KEEP_DELIM_NO_WHITESPACE.remove(r"\s+")
_UNUSED_TOKEN_REGEX = "\\[unused\\d+\\]"
_KEEP_DELIM_NO_WHITESPACE_PATTERN = "|".join(_KEEP_DELIM_NO_WHITESPACE)
class BasicTokenizer(TokenizerWithOffsets):
r"""Basic tokenizer for for tokenizing text.
A basic tokenizer that tokenizes using some deterministic rules:
- For most languages, this tokenizer will split on whitespace.
- For Chinese, Japanese, and Korean characters, this tokenizer will split on
Unicode characters.
Attributes:
lower_case: bool - If true, a preprocessing step is added to lowercase the
text, apply NFD normalization, and strip accents characters.
keep_whitespace: bool - If true, preserves whitespace characters instead of
stripping them away.
normalization_form: If true and lower_case=False, the input text will be
normalized to `normalization_form`. See normalize_utf8() op for a list of
valid values.
preserve_unused_token: If true, text in the regex format "\\[unused\\d+\\]"
will be treated as a token and thus remain preserved as is to be looked up
in the vocabulary.
"""
def __init__(self,
lower_case=False,
keep_whitespace=False,
normalization_form=None,
preserve_unused_token=False):
self._lower_case = lower_case
if not keep_whitespace:
self._keep_delim_regex_pattern = _KEEP_DELIM_NO_WHITESPACE_PATTERN
else:
self._keep_delim_regex_pattern = _DELIM_REGEX_PATTERN
self._normalization_form = normalization_form
if preserve_unused_token:
self._delim_regex_pattern = "|".join(
[_UNUSED_TOKEN_REGEX, _DELIM_REGEX_PATTERN])
self._keep_delim_regex_pattern = "|".join(
[_UNUSED_TOKEN_REGEX, self._keep_delim_regex_pattern])
else:
self._delim_regex_pattern = _DELIM_REGEX_PATTERN
def tokenize(self, text_input):
tokens, _, _ = self.tokenize_with_offsets(text_input)
return tokens
def tokenize_with_offsets(self, text_input):
"""Performs basic word tokenization for BERT.
Args:
text_input: A `Tensor` or `RaggedTensor` of untokenized UTF-8 strings.
Returns:
A `RaggedTensor` of tokenized strings from text_input.
"""
# lowercase and strip accents (if option is set)
if self._lower_case:
text_input = case_fold_utf8(text_input)
text_input = normalize_utf8(text_input, "NFD")
text_input = string_ops.regex_replace(text_input, r"\p{Mn}", "")
else:
# utf8 normalization
if self._normalization_form is not None:
text_input = normalize_utf8(text_input, self._normalization_form)
# strip out control characters
text_input = string_ops.regex_replace(text_input, r"\p{Cc}|\p{Cf}", " ")
return regex_split_ops.regex_split_with_offsets(
text_input, self._delim_regex_pattern, self._keep_delim_regex_pattern,
"BertBasicTokenizer")
class BertTokenizer(TokenizerWithOffsets):
r"""Tokenizer used for BERT.
This tokenizer applies an end-to-end, text string to wordpiece tokenization.
It first applies basic tokenization, and then follwed by wordpiece
tokenization.
See BasicTokenizer and WordpieceTokenizer for their respective details.
Attributes:
vocab_lookup_table: A lookup table implementing the LookupInterface
containing the vocabulary of subwords or a string which is the file path
to the vocab.txt file.
suffix_indicator: (optional) The characters prepended to a wordpiece to
indicate that it is a suffix to another subword. Default is '##'.
max_bytes_per_word: (optional) Max size of input token. Default is 100.
max_chars_per_token: (optional) Max size of subwords, excluding suffix
indicator. If known, providing this improves the efficiency of decoding
long words.
token_out_type: (optional) The type of the token to return. This can be
`tf.int64` IDs, or `tf.string` subwords. The default is `tf.int64`.
unknown_token: (optional) The value to use when an unknown token is found.
Default is "[UNK]". If this is set to a string, and `token_out_type` is
`tf.int64`, the `vocab_lookup_table` is used to convert the
`unknown_token` to an integer. If this is set to `None`, out-of-vocabulary
tokens are left as is.
split_unknown_characters: (optional) Whether to split out single unknown
characters as subtokens. If False (default), words containing unknown
characters will be treated as single unknown tokens.
lower_case: bool - If true, a preprocessing step is added to lowercase the
text, apply NFD normalization, and strip accents characters.
keep_whitespace: bool - If true, preserves whitespace characters instead of
stripping them away.
normalization_form: If true and lower_case=False, the input text will be
normalized to `normalization_form`. See normalize_utf8() op for a list of
valid values.
preserve_unused_token: If true, text in the regex format `\\[unused\\d+\\]`
will be treated as a token and thus remain preserved as is to be looked up
in the vocabulary.
"""
def __init__(self,
vocab_lookup_table,
suffix_indicator="##",
max_bytes_per_word=100,
max_chars_per_token=None,
token_out_type=dtypes.int64,
unknown_token="[UNK]",
split_unknown_characters=False,
lower_case=False,
keep_whitespace=False,
normalization_form=None,
preserve_unused_token=False):
if isinstance(vocab_lookup_table, str) or isinstance(
vocab_lookup_table, ops.Tensor):
init = lookup_ops.TextFileIdTableInitializer(vocab_lookup_table)
vocab_lookup_table = lookup_ops.StaticVocabularyTableV1(
init, num_oov_buckets=1, lookup_key_dtype=dtypes.string)
print("Before ", type(lower_case))
if isinstance(lower_case, ops.Tensor):
lower_case = tf.compat.v1.get_default_session().run(lower_case)
print("After ", type(lower_case))
self._basic_tokenizer = BasicTokenizer(lower_case, keep_whitespace,
normalization_form,
preserve_unused_token)
self._wordpiece_tokenizer = WordpieceTokenizer(
vocab_lookup_table, suffix_indicator, max_bytes_per_word,
max_chars_per_token, token_out_type, unknown_token,
split_unknown_characters)
def tokenize_with_offsets(self, text_input):
tokens, begin, _ = self._basic_tokenizer.tokenize_with_offsets(text_input)
wordpieces, wp_begin, wp_end = (
self._wordpiece_tokenizer.tokenize_with_offsets(tokens))
begin_expanded = array_ops.expand_dims(begin, axis=2)
final_begin = begin_expanded + wp_begin
final_end = begin_expanded + wp_end
return wordpieces, final_begin, final_end
def tokenize(self, text_input):
"""Performs untokenized text to wordpiece tokenization for BERT.
Args:
text_input: input: A `Tensor` or `RaggedTensor` of untokenized UTF-8
strings. | contents (or ID in the vocab_lookup_table representing that string)
of the `jth` token in `input[i1...iN]`
"""
tokens = self._basic_tokenizer.tokenize(text_input)
return self._wordpiece_tokenizer.tokenize(tokens) |
Returns:
A `RaggedTensor` of tokens where `tokens[i1...iN, j]` is the string |
admin.go | // Copyright 2015 Matthew Holt and The Caddy Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package caddy
import (
"bytes"
"context"
"crypto"
"crypto/tls"
"crypto/x509"
"encoding/base64"
"encoding/json"
"errors"
"expvar"
"fmt"
"io"
"io/ioutil"
"net"
"net/http"
"net/http/pprof"
"net/url"
"os"
"path"
"regexp"
"strconv"
"strings"
"sync"
"time"
"github.com/caddyserver/caddy/v2/notify"
"github.com/crazcell/certmagic"
"github.com/prometheus/client_golang/prometheus"
"go.uber.org/zap"
)
// AdminConfig configures Caddy's API endpoint, which is used
// to manage Caddy while it is running.
type AdminConfig struct {
// If true, the admin endpoint will be completely disabled.
// Note that this makes any runtime changes to the config
// impossible, since the interface to do so is through the
// admin endpoint.
Disabled bool `json:"disabled,omitempty"`
// The address to which the admin endpoint's listener should
// bind itself. Can be any single network address that can be
// parsed by Caddy. Default: localhost:2019
Listen string `json:"listen,omitempty"`
// If true, CORS headers will be emitted, and requests to the
// API will be rejected if their `Host` and `Origin` headers
// do not match the expected value(s). Use `origins` to
// customize which origins/hosts are allowed. If `origins` is
// not set, the listen address is the only value allowed by
// default. Enforced only on local (plaintext) endpoint.
EnforceOrigin bool `json:"enforce_origin,omitempty"`
// The list of allowed origins/hosts for API requests. Only needed
// if accessing the admin endpoint from a host different from the
// socket's network interface or if `enforce_origin` is true. If not
// set, the listener address will be the default value. If set but
// empty, no origins will be allowed. Enforced only on local
// (plaintext) endpoint.
Origins []string `json:"origins,omitempty"`
// Options pertaining to configuration management.
Config *ConfigSettings `json:"config,omitempty"`
// Options that establish this server's identity. Identity refers to
// credentials which can be used to uniquely identify and authenticate
// this server instance. This is required if remote administration is
// enabled (but does not require remote administration to be enabled).
// Default: no identity management.
Identity *IdentityConfig `json:"identity,omitempty"`
// Options pertaining to remote administration. By default, remote
// administration is disabled. If enabled, identity management must
// also be configured, as that is how the endpoint is secured.
// See the neighboring "identity" object.
//
// EXPERIMENTAL: This feature is subject to change.
Remote *RemoteAdmin `json:"remote,omitempty"`
}
// ConfigSettings configures the management of configuration.
type ConfigSettings struct {
// Whether to keep a copy of the active config on disk. Default is true.
// Note that "pulled" dynamic configs (using the neighboring "load" module)
// are not persisted; only configs that are pushed to Caddy get persisted. | // managed elsewhere, and you want Caddy to pull its config dynamically
// when it starts. The pulled config completely replaces the current
// one, just like any other config load. It is an error if a pulled
// config is configured to pull another config.
//
// EXPERIMENTAL: Subject to change.
LoadRaw json.RawMessage `json:"load,omitempty" caddy:"namespace=caddy.config_loaders inline_key=module"`
// The interval to pull config. With a non-zero value, will pull config
// from config loader (eg. a http loader) with given interval.
//
// EXPERIMENTAL: Subject to change.
LoadInterval Duration `json:"load_interval,omitempty"`
}
// IdentityConfig configures management of this server's identity. An identity
// consists of credentials that uniquely verify this instance; for example,
// TLS certificates (public + private key pairs).
type IdentityConfig struct {
// List of names or IP addresses which refer to this server.
// Certificates will be obtained for these identifiers so
// secure TLS connections can be made using them.
Identifiers []string `json:"identifiers,omitempty"`
// Issuers that can provide this admin endpoint its identity
// certificate(s). Default: ACME issuers configured for
// ZeroSSL and Let's Encrypt. Be sure to change this if you
// require credentials for private identifiers.
IssuersRaw []json.RawMessage `json:"issuers,omitempty" caddy:"namespace=tls.issuance inline_key=module"`
issuers []certmagic.Issuer
}
// RemoteAdmin enables and configures remote administration. If enabled,
// a secure listener enforcing mutual TLS authentication will be started
// on a different port from the standard plaintext admin server.
//
// This endpoint is secured using identity management, which must be
// configured separately (because identity management does not depend
// on remote administration). See the admin/identity config struct.
//
// EXPERIMENTAL: Subject to change.
type RemoteAdmin struct {
// The address on which to start the secure listener.
// Default: :2021
Listen string `json:"listen,omitempty"`
// List of access controls for this secure admin endpoint.
// This configures TLS mutual authentication (i.e. authorized
// client certificates), but also application-layer permissions
// like which paths and methods each identity is authorized for.
AccessControl []*AdminAccess `json:"access_control,omitempty"`
}
// AdminAccess specifies what permissions an identity or group
// of identities are granted.
type AdminAccess struct {
// Base64-encoded DER certificates containing public keys to accept.
// (The contents of PEM certificate blocks are base64-encoded DER.)
// Any of these public keys can appear in any part of a verified chain.
PublicKeys []string `json:"public_keys,omitempty"`
// Limits what the associated identities are allowed to do.
// If unspecified, all permissions are granted.
Permissions []AdminPermissions `json:"permissions,omitempty"`
publicKeys []crypto.PublicKey
}
// AdminPermissions specifies what kinds of requests are allowed
// to be made to the admin endpoint.
type AdminPermissions struct {
// The API paths allowed. Paths are simple prefix matches.
// Any subpath of the specified paths will be allowed.
Paths []string `json:"paths,omitempty"`
// The HTTP methods allowed for the given paths.
Methods []string `json:"methods,omitempty"`
}
// newAdminHandler reads admin's config and returns an http.Handler suitable
// for use in an admin endpoint server, which will be listening on listenAddr.
func (admin AdminConfig) newAdminHandler(addr NetworkAddress, remote bool) adminHandler {
muxWrap := adminHandler{mux: http.NewServeMux()}
// secure the local or remote endpoint respectively
if remote {
muxWrap.remoteControl = admin.Remote
} else {
muxWrap.enforceHost = !addr.isWildcardInterface()
muxWrap.allowedOrigins = admin.allowedOrigins(addr)
}
addRouteWithMetrics := func(pattern string, handlerLabel string, h http.Handler) {
labels := prometheus.Labels{"path": pattern, "handler": handlerLabel}
h = instrumentHandlerCounter(
adminMetrics.requestCount.MustCurryWith(labels),
h,
)
muxWrap.mux.Handle(pattern, h)
}
// addRoute just calls muxWrap.mux.Handle after
// wrapping the handler with error handling
addRoute := func(pattern string, handlerLabel string, h AdminHandler) {
wrapper := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
err := h.ServeHTTP(w, r)
if err != nil {
labels := prometheus.Labels{
"path": pattern,
"handler": handlerLabel,
"method": strings.ToUpper(r.Method),
}
adminMetrics.requestErrors.With(labels).Inc()
}
muxWrap.handleError(w, r, err)
})
addRouteWithMetrics(pattern, handlerLabel, wrapper)
}
const handlerLabel = "admin"
// register standard config control endpoints
addRoute("/"+rawConfigKey+"/", handlerLabel, AdminHandlerFunc(handleConfig))
addRoute("/id/", handlerLabel, AdminHandlerFunc(handleConfigID))
addRoute("/stop", handlerLabel, AdminHandlerFunc(handleStop))
// register debugging endpoints
addRouteWithMetrics("/debug/pprof/", handlerLabel, http.HandlerFunc(pprof.Index))
addRouteWithMetrics("/debug/pprof/cmdline", handlerLabel, http.HandlerFunc(pprof.Cmdline))
addRouteWithMetrics("/debug/pprof/profile", handlerLabel, http.HandlerFunc(pprof.Profile))
addRouteWithMetrics("/debug/pprof/symbol", handlerLabel, http.HandlerFunc(pprof.Symbol))
addRouteWithMetrics("/debug/pprof/trace", handlerLabel, http.HandlerFunc(pprof.Trace))
addRouteWithMetrics("/debug/vars", handlerLabel, expvar.Handler())
// register third-party module endpoints
for _, m := range GetModules("admin.api") {
router := m.New().(AdminRouter)
handlerLabel := m.ID.Name()
for _, route := range router.Routes() {
addRoute(route.Pattern, handlerLabel, route.Handler)
}
}
return muxWrap
}
// allowedOrigins returns a list of origins that are allowed.
// If admin.Origins is nil (null), the provided listen address
// will be used as the default origin. If admin.Origins is
// empty, no origins will be allowed, effectively bricking the
// endpoint for non-unix-socket endpoints, but whatever.
func (admin AdminConfig) allowedOrigins(addr NetworkAddress) []string {
uniqueOrigins := make(map[string]struct{})
for _, o := range admin.Origins {
uniqueOrigins[o] = struct{}{}
}
if admin.Origins == nil {
if addr.isLoopback() {
if addr.IsUnixNetwork() {
// RFC 2616, Section 14.26:
// "A client MUST include a Host header field in all HTTP/1.1 request
// messages. If the requested URI does not include an Internet host
// name for the service being requested, then the Host header field MUST
// be given with an empty value."
uniqueOrigins[""] = struct{}{}
} else {
uniqueOrigins[net.JoinHostPort("localhost", addr.port())] = struct{}{}
uniqueOrigins[net.JoinHostPort("::1", addr.port())] = struct{}{}
uniqueOrigins[net.JoinHostPort("127.0.0.1", addr.port())] = struct{}{}
}
}
if !addr.IsUnixNetwork() {
uniqueOrigins[addr.JoinHostPort(0)] = struct{}{}
}
}
allowed := make([]string, 0, len(uniqueOrigins))
for origin := range uniqueOrigins {
allowed = append(allowed, origin)
}
return allowed
}
// replaceLocalAdminServer replaces the running local admin server
// according to the relevant configuration in cfg. If no configuration
// for the admin endpoint exists in cfg, a default one is used, so
// that there is always an admin server (unless it is explicitly
// configured to be disabled).
func replaceLocalAdminServer(cfg *Config) error {
// always be sure to close down the old admin endpoint
// as gracefully as possible, even if the new one is
// disabled -- careful to use reference to the current
// (old) admin endpoint since it will be different
// when the function returns
oldAdminServer := localAdminServer
defer func() {
// do the shutdown asynchronously so that any
// current API request gets a response; this
// goroutine may last a few seconds
if oldAdminServer != nil {
go func(oldAdminServer *http.Server) {
err := stopAdminServer(oldAdminServer)
if err != nil {
Log().Named("admin").Error("stopping current admin endpoint", zap.Error(err))
}
}(oldAdminServer)
}
}()
// always get a valid admin config
adminConfig := DefaultAdminConfig
if cfg != nil && cfg.Admin != nil {
adminConfig = cfg.Admin
}
// if new admin endpoint is to be disabled, we're done
if adminConfig.Disabled {
Log().Named("admin").Warn("admin endpoint disabled")
return nil
}
// extract a singular listener address
addr, err := parseAdminListenAddr(adminConfig.Listen, DefaultAdminListen)
if err != nil {
return err
}
handler := adminConfig.newAdminHandler(addr, false)
ln, err := Listen(addr.Network, addr.JoinHostPort(0))
if err != nil {
return err
}
localAdminServer = &http.Server{
Addr: addr.String(), // for logging purposes only
Handler: handler,
ReadTimeout: 10 * time.Second,
ReadHeaderTimeout: 5 * time.Second,
IdleTimeout: 60 * time.Second,
MaxHeaderBytes: 1024 * 64,
}
adminLogger := Log().Named("admin")
go func() {
if err := localAdminServer.Serve(ln); !errors.Is(err, http.ErrServerClosed) {
adminLogger.Error("admin server shutdown for unknown reason", zap.Error(err))
}
}()
adminLogger.Info("admin endpoint started",
zap.String("address", addr.String()),
zap.Bool("enforce_origin", adminConfig.EnforceOrigin),
zap.Strings("origins", handler.allowedOrigins))
if !handler.enforceHost {
adminLogger.Warn("admin endpoint on open interface; host checking disabled",
zap.String("address", addr.String()))
}
return nil
}
// manageIdentity sets up automated identity management for this server.
func manageIdentity(ctx Context, cfg *Config) error {
if cfg == nil || cfg.Admin == nil || cfg.Admin.Identity == nil {
return nil
}
// set default issuers; this is pretty hacky because we can't
// import the caddytls package -- but it works
if cfg.Admin.Identity.IssuersRaw == nil {
cfg.Admin.Identity.IssuersRaw = []json.RawMessage{
json.RawMessage(`{"module": "zerossl"}`),
json.RawMessage(`{"module": "acme"}`),
}
}
// load and provision issuer modules
if cfg.Admin.Identity.IssuersRaw != nil {
val, err := ctx.LoadModule(cfg.Admin.Identity, "IssuersRaw")
if err != nil {
return fmt.Errorf("loading identity issuer modules: %s", err)
}
for _, issVal := range val.([]interface{}) {
cfg.Admin.Identity.issuers = append(cfg.Admin.Identity.issuers, issVal.(certmagic.Issuer))
}
}
// we'll make a new cache when we make the CertMagic config, so stop any previous cache
if identityCertCache != nil {
identityCertCache.Stop()
}
logger := Log().Named("admin.identity")
cmCfg := cfg.Admin.Identity.certmagicConfig(logger, true)
// issuers have circular dependencies with the configs because,
// as explained in the caddytls package, they need access to the
// correct storage and cache to solve ACME challenges
for _, issuer := range cfg.Admin.Identity.issuers {
// avoid import cycle with caddytls package, so manually duplicate the interface here, yuck
if annoying, ok := issuer.(interface{ SetConfig(cfg *certmagic.Config) }); ok {
annoying.SetConfig(cmCfg)
}
}
// obtain and renew server identity certificate(s)
return cmCfg.ManageAsync(ctx, cfg.Admin.Identity.Identifiers)
}
// replaceRemoteAdminServer replaces the running remote admin server
// according to the relevant configuration in cfg. It stops any previous
// remote admin server and only starts a new one if configured.
func replaceRemoteAdminServer(ctx Context, cfg *Config) error {
if cfg == nil {
return nil
}
remoteLogger := Log().Named("admin.remote")
oldAdminServer := remoteAdminServer
defer func() {
if oldAdminServer != nil {
go func(oldAdminServer *http.Server) {
err := stopAdminServer(oldAdminServer)
if err != nil {
Log().Named("admin").Error("stopping current secure admin endpoint", zap.Error(err))
}
}(oldAdminServer)
}
}()
if cfg.Admin == nil || cfg.Admin.Remote == nil {
return nil
}
addr, err := parseAdminListenAddr(cfg.Admin.Remote.Listen, DefaultRemoteAdminListen)
if err != nil {
return err
}
// make the HTTP handler but disable Host/Origin enforcement
// because we are using TLS authentication instead
handler := cfg.Admin.newAdminHandler(addr, true)
// create client certificate pool for TLS mutual auth, and extract public keys
// so that we can enforce access controls at the application layer
clientCertPool := x509.NewCertPool()
for i, accessControl := range cfg.Admin.Remote.AccessControl {
for j, certBase64 := range accessControl.PublicKeys {
cert, err := decodeBase64DERCert(certBase64)
if err != nil {
return fmt.Errorf("access control %d public key %d: parsing base64 certificate DER: %v", i, j, err)
}
accessControl.publicKeys = append(accessControl.publicKeys, cert.PublicKey)
clientCertPool.AddCert(cert)
}
}
// create TLS config that will enforce mutual authentication
cmCfg := cfg.Admin.Identity.certmagicConfig(remoteLogger, false)
tlsConfig := cmCfg.TLSConfig()
tlsConfig.NextProtos = nil // this server does not solve ACME challenges
tlsConfig.ClientAuth = tls.RequireAndVerifyClientCert
tlsConfig.ClientCAs = clientCertPool
// convert logger to stdlib so it can be used by HTTP server
serverLogger, err := zap.NewStdLogAt(remoteLogger, zap.DebugLevel)
if err != nil {
return err
}
// create secure HTTP server
remoteAdminServer = &http.Server{
Addr: addr.String(), // for logging purposes only
Handler: handler,
TLSConfig: tlsConfig,
ReadTimeout: 10 * time.Second,
ReadHeaderTimeout: 5 * time.Second,
IdleTimeout: 60 * time.Second,
MaxHeaderBytes: 1024 * 64,
ErrorLog: serverLogger,
}
// start listener
ln, err := Listen(addr.Network, addr.JoinHostPort(0))
if err != nil {
return err
}
ln = tls.NewListener(ln, tlsConfig)
go func() {
if err := remoteAdminServer.Serve(ln); !errors.Is(err, http.ErrServerClosed) {
remoteLogger.Error("admin remote server shutdown for unknown reason", zap.Error(err))
}
}()
remoteLogger.Info("secure admin remote control endpoint started",
zap.String("address", addr.String()))
return nil
}
func (ident *IdentityConfig) certmagicConfig(logger *zap.Logger, makeCache bool) *certmagic.Config {
if ident == nil {
// user might not have configured identity; that's OK, we can still make a
// certmagic config, although it'll be mostly useless for remote management
ident = new(IdentityConfig)
}
cmCfg := &certmagic.Config{
Storage: DefaultStorage, // do not act as part of a cluster (this is for the server's local identity)
Logger: logger,
Issuers: ident.issuers,
}
if makeCache {
identityCertCache = certmagic.NewCache(certmagic.CacheOptions{
GetConfigForCert: func(certmagic.Certificate) (*certmagic.Config, error) {
return cmCfg, nil
},
})
}
return certmagic.New(identityCertCache, *cmCfg)
}
// IdentityCredentials returns this instance's configured, managed identity credentials
// that can be used in TLS client authentication.
func (ctx Context) IdentityCredentials(logger *zap.Logger) ([]tls.Certificate, error) {
if ctx.cfg == nil || ctx.cfg.Admin == nil || ctx.cfg.Admin.Identity == nil {
return nil, fmt.Errorf("no server identity configured")
}
ident := ctx.cfg.Admin.Identity
if len(ident.Identifiers) == 0 {
return nil, fmt.Errorf("no identifiers configured")
}
if logger == nil {
logger = Log()
}
magic := ident.certmagicConfig(logger, false)
return magic.ClientCredentials(ctx, ident.Identifiers)
}
// enforceAccessControls enforces application-layer access controls for r based on remote.
// It expects that the TLS server has already established at least one verified chain of
// trust, and then looks for a matching, authorized public key that is allowed to access
// the defined path(s) using the defined method(s).
func (remote RemoteAdmin) enforceAccessControls(r *http.Request) error {
for _, chain := range r.TLS.VerifiedChains {
for _, peerCert := range chain {
for _, adminAccess := range remote.AccessControl {
for _, allowedKey := range adminAccess.publicKeys {
// see if we found a matching public key; the TLS server already verified the chain
// so we know the client possesses the associated private key; this handy interface
// doesn't appear to be defined anywhere in the std lib, but was implemented here:
// https://github.com/golang/go/commit/b5f2c0f50297fa5cd14af668ddd7fd923626cf8c
comparer, ok := peerCert.PublicKey.(interface{ Equal(crypto.PublicKey) bool })
if !ok || !comparer.Equal(allowedKey) {
continue
}
// key recognized; make sure its HTTP request is permitted
for _, accessPerm := range adminAccess.Permissions {
// verify method
methodFound := accessPerm.Methods == nil
for _, method := range accessPerm.Methods {
if method == r.Method {
methodFound = true
break
}
}
if !methodFound {
return APIError{
HTTPStatus: http.StatusForbidden,
Message: "not authorized to use this method",
}
}
// verify path
pathFound := accessPerm.Paths == nil
for _, allowedPath := range accessPerm.Paths {
if strings.HasPrefix(r.URL.Path, allowedPath) {
pathFound = true
break
}
}
if !pathFound {
return APIError{
HTTPStatus: http.StatusForbidden,
Message: "not authorized to access this path",
}
}
}
// public key authorized, method and path allowed
return nil
}
}
}
}
// in theory, this should never happen; with an unverified chain, the TLS server
// should not accept the connection in the first place, and the acceptable cert
// pool is configured using the same list of public keys we verify against
return APIError{
HTTPStatus: http.StatusUnauthorized,
Message: "client identity not authorized",
}
}
func stopAdminServer(srv *http.Server) error {
if srv == nil {
return fmt.Errorf("no admin server")
}
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
err := srv.Shutdown(ctx)
if err != nil {
return fmt.Errorf("shutting down admin server: %v", err)
}
Log().Named("admin").Info("stopped previous server", zap.String("address", srv.Addr))
return nil
}
// AdminRouter is a type which can return routes for the admin API.
type AdminRouter interface {
Routes() []AdminRoute
}
// AdminRoute represents a route for the admin endpoint.
type AdminRoute struct {
Pattern string
Handler AdminHandler
}
type adminHandler struct {
mux *http.ServeMux
// security for local/plaintext) endpoint, on by default
enforceOrigin bool
enforceHost bool
allowedOrigins []string
// security for remote/encrypted endpoint
remoteControl *RemoteAdmin
}
// ServeHTTP is the external entry point for API requests.
// It will only be called once per request.
func (h adminHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
log := Log().Named("admin.api").With(
zap.String("method", r.Method),
zap.String("host", r.Host),
zap.String("uri", r.RequestURI),
zap.String("remote_addr", r.RemoteAddr),
zap.Reflect("headers", r.Header),
)
if r.TLS != nil {
log = log.With(
zap.Bool("secure", true),
zap.Int("verified_chains", len(r.TLS.VerifiedChains)),
)
}
if r.RequestURI == "/metrics" {
log.Debug("received request")
} else {
log.Info("received request")
}
h.serveHTTP(w, r)
}
// serveHTTP is the internal entry point for API requests. It may
// be called more than once per request, for example if a request
// is rewritten (i.e. internal redirect).
func (h adminHandler) serveHTTP(w http.ResponseWriter, r *http.Request) {
if h.remoteControl != nil {
// enforce access controls on secure endpoint
if err := h.remoteControl.enforceAccessControls(r); err != nil {
h.handleError(w, r, err)
return
}
}
if strings.Contains(r.Header.Get("Upgrade"), "websocket") {
// I've never been able demonstrate a vulnerability myself, but apparently
// WebSocket connections originating from browsers aren't subject to CORS
// restrictions, so we'll just be on the safe side
h.handleError(w, r, fmt.Errorf("websocket connections aren't allowed"))
return
}
if h.enforceHost {
// DNS rebinding mitigation
err := h.checkHost(r)
if err != nil {
h.handleError(w, r, err)
return
}
}
if h.enforceOrigin {
// cross-site mitigation
origin, err := h.checkOrigin(r)
if err != nil {
h.handleError(w, r, err)
return
}
if r.Method == http.MethodOptions {
w.Header().Set("Access-Control-Allow-Methods", "OPTIONS, GET, POST, PUT, PATCH, DELETE")
w.Header().Set("Access-Control-Allow-Headers", "Content-Type, Content-Length, Cache-Control")
w.Header().Set("Access-Control-Allow-Credentials", "true")
}
w.Header().Set("Access-Control-Allow-Origin", origin)
}
h.mux.ServeHTTP(w, r)
}
func (h adminHandler) handleError(w http.ResponseWriter, r *http.Request, err error) {
if err == nil {
return
}
if err == errInternalRedir {
h.serveHTTP(w, r)
return
}
apiErr, ok := err.(APIError)
if !ok {
apiErr = APIError{
HTTPStatus: http.StatusInternalServerError,
Err: err,
}
}
if apiErr.HTTPStatus == 0 {
apiErr.HTTPStatus = http.StatusInternalServerError
}
if apiErr.Message == "" && apiErr.Err != nil {
apiErr.Message = apiErr.Err.Error()
}
Log().Named("admin.api").Error("request error",
zap.Error(err),
zap.Int("status_code", apiErr.HTTPStatus),
)
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(apiErr.HTTPStatus)
encErr := json.NewEncoder(w).Encode(apiErr)
if encErr != nil {
Log().Named("admin.api").Error("failed to encode error response", zap.Error(encErr))
}
}
// checkHost returns a handler that wraps next such that
// it will only be called if the request's Host header matches
// a trustworthy/expected value. This helps to mitigate DNS
// rebinding attacks.
func (h adminHandler) checkHost(r *http.Request) error {
var allowed bool
for _, allowedHost := range h.allowedOrigins {
if r.Host == allowedHost {
allowed = true
break
}
}
if !allowed {
return APIError{
HTTPStatus: http.StatusForbidden,
Err: fmt.Errorf("host not allowed: %s", r.Host),
}
}
return nil
}
// checkOrigin ensures that the Origin header, if
// set, matches the intended target; prevents arbitrary
// sites from issuing requests to our listener. It
// returns the origin that was obtained from r.
func (h adminHandler) checkOrigin(r *http.Request) (string, error) {
origin := h.getOriginHost(r)
if origin == "" {
return origin, APIError{
HTTPStatus: http.StatusForbidden,
Err: fmt.Errorf("missing required Origin header"),
}
}
if !h.originAllowed(origin) {
return origin, APIError{
HTTPStatus: http.StatusForbidden,
Err: fmt.Errorf("client is not allowed to access from origin %s", origin),
}
}
return origin, nil
}
func (h adminHandler) getOriginHost(r *http.Request) string {
origin := r.Header.Get("Origin")
if origin == "" {
origin = r.Header.Get("Referer")
}
originURL, err := url.Parse(origin)
if err == nil && originURL.Host != "" {
origin = originURL.Host
}
return origin
}
func (h adminHandler) originAllowed(origin string) bool {
for _, allowedOrigin := range h.allowedOrigins {
originCopy := origin
if !strings.Contains(allowedOrigin, "://") {
// no scheme specified, so allow both
originCopy = strings.TrimPrefix(originCopy, "http://")
originCopy = strings.TrimPrefix(originCopy, "https://")
}
if originCopy == allowedOrigin {
return true
}
}
return false
}
func handleConfig(w http.ResponseWriter, r *http.Request) error {
switch r.Method {
case http.MethodGet:
w.Header().Set("Content-Type", "application/json")
err := readConfig(r.URL.Path, w)
if err != nil {
return APIError{HTTPStatus: http.StatusBadRequest, Err: err}
}
return nil
case http.MethodPost,
http.MethodPut,
http.MethodPatch,
http.MethodDelete:
// DELETE does not use a body, but the others do
var body []byte
if r.Method != http.MethodDelete {
if ct := r.Header.Get("Content-Type"); !strings.Contains(ct, "/json") {
return APIError{
HTTPStatus: http.StatusBadRequest,
Err: fmt.Errorf("unacceptable content-type: %v; 'application/json' required", ct),
}
}
buf := bufPool.Get().(*bytes.Buffer)
buf.Reset()
defer bufPool.Put(buf)
_, err := io.Copy(buf, r.Body)
if err != nil {
return APIError{
HTTPStatus: http.StatusBadRequest,
Err: fmt.Errorf("reading request body: %v", err),
}
}
body = buf.Bytes()
}
forceReload := r.Header.Get("Cache-Control") == "must-revalidate"
err := changeConfig(r.Method, r.URL.Path, body, forceReload)
if err != nil {
return err
}
default:
return APIError{
HTTPStatus: http.StatusMethodNotAllowed,
Err: fmt.Errorf("method %s not allowed", r.Method),
}
}
return nil
}
func handleConfigID(w http.ResponseWriter, r *http.Request) error {
idPath := r.URL.Path
parts := strings.Split(idPath, "/")
if len(parts) < 3 || parts[2] == "" {
return fmt.Errorf("request path is missing object ID")
}
if parts[0] != "" || parts[1] != "id" {
return fmt.Errorf("malformed object path")
}
id := parts[2]
// map the ID to the expanded path
currentCfgMu.RLock()
expanded, ok := rawCfgIndex[id]
defer currentCfgMu.RUnlock()
if !ok {
return fmt.Errorf("unknown object ID '%s'", id)
}
// piece the full URL path back together
parts = append([]string{expanded}, parts[3:]...)
r.URL.Path = path.Join(parts...)
return errInternalRedir
}
func handleStop(w http.ResponseWriter, r *http.Request) error {
if r.Method != http.MethodPost {
return APIError{
HTTPStatus: http.StatusMethodNotAllowed,
Err: fmt.Errorf("method not allowed"),
}
}
if err := notify.NotifyStopping(); err != nil {
Log().Error("unable to notify stopping to service manager", zap.Error(err))
}
exitProcess(Log().Named("admin.api"))
return nil
}
// unsyncedConfigAccess traverses into the current config and performs
// the operation at path according to method, using body and out as
// needed. This is a low-level, unsynchronized function; most callers
// will want to use changeConfig or readConfig instead. This requires a
// read or write lock on currentCfgMu, depending on method (GET needs
// only a read lock; all others need a write lock).
func unsyncedConfigAccess(method, path string, body []byte, out io.Writer) error {
var err error
var val interface{}
// if there is a request body, decode it into the
// variable that will be set in the config according
// to method and path
if len(body) > 0 {
err = json.Unmarshal(body, &val)
if err != nil {
return fmt.Errorf("decoding request body: %v", err)
}
}
enc := json.NewEncoder(out)
cleanPath := strings.Trim(path, "/")
if cleanPath == "" {
return fmt.Errorf("no traversable path")
}
parts := strings.Split(cleanPath, "/")
if len(parts) == 0 {
return fmt.Errorf("path missing")
}
// A path that ends with "..." implies:
// 1) the part before it is an array
// 2) the payload is an array
// and means that the user wants to expand the elements
// in the payload array and append each one into the
// destination array, like so:
// array = append(array, elems...)
// This special case is handled below.
ellipses := parts[len(parts)-1] == "..."
if ellipses {
parts = parts[:len(parts)-1]
}
var ptr interface{} = rawCfg
traverseLoop:
for i, part := range parts {
switch v := ptr.(type) {
case map[string]interface{}:
// if the next part enters a slice, and the slice is our destination,
// handle it specially (because appending to the slice copies the slice
// header, which does not replace the original one like we want)
if arr, ok := v[part].([]interface{}); ok && i == len(parts)-2 {
var idx int
if method != http.MethodPost {
idxStr := parts[len(parts)-1]
idx, err = strconv.Atoi(idxStr)
if err != nil {
return fmt.Errorf("[%s] invalid array index '%s': %v",
path, idxStr, err)
}
if idx < 0 || idx >= len(arr) {
return fmt.Errorf("[%s] array index out of bounds: %s", path, idxStr)
}
}
switch method {
case http.MethodGet:
err = enc.Encode(arr[idx])
if err != nil {
return fmt.Errorf("encoding config: %v", err)
}
case http.MethodPost:
if ellipses {
valArray, ok := val.([]interface{})
if !ok {
return fmt.Errorf("final element is not an array")
}
v[part] = append(arr, valArray...)
} else {
v[part] = append(arr, val)
}
case http.MethodPut:
// avoid creation of new slice and a second copy (see
// https://github.com/golang/go/wiki/SliceTricks#insert)
arr = append(arr, nil)
copy(arr[idx+1:], arr[idx:])
arr[idx] = val
v[part] = arr
case http.MethodPatch:
arr[idx] = val
case http.MethodDelete:
v[part] = append(arr[:idx], arr[idx+1:]...)
default:
return fmt.Errorf("unrecognized method %s", method)
}
break traverseLoop
}
if i == len(parts)-1 {
switch method {
case http.MethodGet:
err = enc.Encode(v[part])
if err != nil {
return fmt.Errorf("encoding config: %v", err)
}
case http.MethodPost:
// if the part is an existing list, POST appends to
// it, otherwise it just sets or creates the value
if arr, ok := v[part].([]interface{}); ok {
if ellipses {
valArray, ok := val.([]interface{})
if !ok {
return fmt.Errorf("final element is not an array")
}
v[part] = append(arr, valArray...)
} else {
v[part] = append(arr, val)
}
} else {
v[part] = val
}
case http.MethodPut:
if _, ok := v[part]; ok {
return fmt.Errorf("[%s] key already exists: %s", path, part)
}
v[part] = val
case http.MethodPatch:
if _, ok := v[part]; !ok {
return fmt.Errorf("[%s] key does not exist: %s", path, part)
}
v[part] = val
case http.MethodDelete:
delete(v, part)
default:
return fmt.Errorf("unrecognized method %s", method)
}
} else {
// if we are "PUTting" a new resource, the key(s) in its path
// might not exist yet; that's OK but we need to make them as
// we go, while we still have a pointer from the level above
if v[part] == nil && method == http.MethodPut {
v[part] = make(map[string]interface{})
}
ptr = v[part]
}
case []interface{}:
partInt, err := strconv.Atoi(part)
if err != nil {
return fmt.Errorf("[/%s] invalid array index '%s': %v",
strings.Join(parts[:i+1], "/"), part, err)
}
if partInt < 0 || partInt >= len(v) {
return fmt.Errorf("[/%s] array index out of bounds: %s",
strings.Join(parts[:i+1], "/"), part)
}
ptr = v[partInt]
default:
return fmt.Errorf("invalid traversal path at: %s", strings.Join(parts[:i+1], "/"))
}
}
return nil
}
// RemoveMetaFields removes meta fields like "@id" from a JSON message
// by using a simple regular expression. (An alternate way to do this
// would be to delete them from the raw, map[string]interface{}
// representation as they are indexed, then iterate the index we made
// and add them back after encoding as JSON, but this is simpler.)
func RemoveMetaFields(rawJSON []byte) []byte {
return idRegexp.ReplaceAllFunc(rawJSON, func(in []byte) []byte {
// matches with a comma on both sides (when "@id" property is
// not the first or last in the object) need to keep exactly
// one comma for correct JSON syntax
comma := []byte{','}
if bytes.HasPrefix(in, comma) && bytes.HasSuffix(in, comma) {
return comma
}
return []byte{}
})
}
// AdminHandler is like http.Handler except ServeHTTP may return an error.
//
// If any handler encounters an error, it should be returned for proper
// handling.
type AdminHandler interface {
ServeHTTP(http.ResponseWriter, *http.Request) error
}
// AdminHandlerFunc is a convenience type like http.HandlerFunc.
type AdminHandlerFunc func(http.ResponseWriter, *http.Request) error
// ServeHTTP implements the Handler interface.
func (f AdminHandlerFunc) ServeHTTP(w http.ResponseWriter, r *http.Request) error {
return f(w, r)
}
// APIError is a structured error that every API
// handler should return for consistency in logging
// and client responses. If Message is unset, then
// Err.Error() will be serialized in its place.
type APIError struct {
HTTPStatus int `json:"-"`
Err error `json:"-"`
Message string `json:"error"`
}
func (e APIError) Error() string {
if e.Err != nil {
return e.Err.Error()
}
return e.Message
}
// parseAdminListenAddr extracts a singular listen address from either addr
// or defaultAddr, returning the network and the address of the listener.
func parseAdminListenAddr(addr string, defaultAddr string) (NetworkAddress, error) {
input := addr
if input == "" {
input = defaultAddr
}
listenAddr, err := ParseNetworkAddress(input)
if err != nil {
return NetworkAddress{}, fmt.Errorf("parsing listener address: %v", err)
}
if listenAddr.PortRangeSize() != 1 {
return NetworkAddress{}, fmt.Errorf("must be exactly one listener address; cannot listen on: %s", listenAddr)
}
return listenAddr, nil
}
// decodeBase64DERCert base64-decodes, then DER-decodes, certStr.
func decodeBase64DERCert(certStr string) (*x509.Certificate, error) {
derBytes, err := base64.StdEncoding.DecodeString(certStr)
if err != nil {
return nil, err
}
return x509.ParseCertificate(derBytes)
}
var (
// DefaultAdminListen is the address for the local admin
// listener, if none is specified at startup.
DefaultAdminListen = "localhost:2019"
// DefaultRemoteAdminListen is the address for the remote
// (TLS-authenticated) admin listener, if enabled and not
// specified otherwise.
DefaultRemoteAdminListen = ":2021"
// DefaultAdminConfig is the default configuration
// for the local administration endpoint.
DefaultAdminConfig = &AdminConfig{
Listen: DefaultAdminListen,
}
)
// PIDFile writes a pidfile to the file at filename. It
// will get deleted before the process gracefully exits.
func PIDFile(filename string) error {
pid := []byte(strconv.Itoa(os.Getpid()) + "\n")
err := ioutil.WriteFile(filename, pid, 0600)
if err != nil {
return err
}
pidfile = filename
return nil
}
// idRegexp is used to match ID fields and their associated values
// in the config. It also matches adjacent commas so that syntax
// can be preserved no matter where in the object the field appears.
// It supports string and most numeric values.
var idRegexp = regexp.MustCompile(`(?m),?\s*"` + idKey + `"\s*:\s*(-?[0-9]+(\.[0-9]+)?|(?U)".*")\s*,?`)
// pidfile is the name of the pidfile, if any.
var pidfile string
// errInternalRedir indicates an internal redirect
// and is useful when admin API handlers rewrite
// the request; in that case, authentication and
// authorization needs to happen again for the
// rewritten request.
var errInternalRedir = fmt.Errorf("internal redirect; re-authorization required")
const (
rawConfigKey = "config"
idKey = "@id"
)
var bufPool = sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
}
// keep a reference to admin endpoint singletons while they're active
var (
localAdminServer, remoteAdminServer *http.Server
identityCertCache *certmagic.Cache
) | Persist *bool `json:"persist,omitempty"`
// Loads a configuration to use. This is helpful if your configs are |
handler_test.go | /*
* Copyright © 2015-2018 Aeneas Rekkas <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Aeneas Rekkas <[email protected]>
* @copyright 2015-2018 Aeneas Rekkas <[email protected]>
* @license Apache-2.0
*/
package oauth2_test
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
"strings"
"testing"
"time"
"github.com/go-openapi/strfmt"
"github.com/ory/hydra/internal/httpclient/client/admin"
"github.com/ory/hydra/internal/httpclient/models"
"github.com/ory/hydra/jwk"
"github.com/ory/hydra/x"
"github.com/ory/viper"
"github.com/ory/hydra/driver/configuration"
"github.com/ory/hydra/internal"
"github.com/ory/x/urlx"
jwt2 "github.com/dgrijalva/jwt-go"
"github.com/golang/mock/gomock"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/ory/fosite"
"github.com/ory/fosite/handler/openid"
"github.com/ory/fosite/token/jwt"
"github.com/ory/hydra/client"
hydra "github.com/ory/hydra/internal/httpclient/client"
"github.com/ory/hydra/oauth2"
)
var lifespan = time.Hour
var flushRequests = []*fosite.Request{
{
ID: "flush-1",
RequestedAt: time.Now().Round(time.Second),
Client: &client.Client{OutfacingID: "foobar"},
RequestedScope: fosite.Arguments{"fa", "ba"},
GrantedScope: fosite.Arguments{"fa", "ba"},
Form: url.Values{"foo": []string{"bar", "baz"}},
Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}},
},
{
ID: "flush-2",
RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Minute)),
Client: &client.Client{OutfacingID: "foobar"},
RequestedScope: fosite.Arguments{"fa", "ba"},
GrantedScope: fosite.Arguments{"fa", "ba"},
Form: url.Values{"foo": []string{"bar", "baz"}},
Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}},
},
{
ID: "flush-3",
RequestedAt: time.Now().Round(time.Second).Add(-(lifespan + time.Hour)),
Client: &client.Client{OutfacingID: "foobar"},
RequestedScope: fosite.Arguments{"fa", "ba"},
GrantedScope: fosite.Arguments{"fa", "ba"},
Form: url.Values{"foo": []string{"bar", "baz"}},
Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}},
},
}
func TestHandlerDeleteHandler(t *testing.T) {
conf := internal.NewConfigurationWithDefaults()
viper.Set(configuration.ViperKeyIssuerURL, "http://hydra.localhost")
reg := internal.NewRegistryMemory(t, conf)
cm := reg.ClientManager()
store := reg.OAuth2Storage()
h := oauth2.NewHandler(reg, conf)
deleteRequest := &fosite.Request{
ID: "del-1",
RequestedAt: time.Now().Round(time.Second),
Client: &client.Client{OutfacingID: "foobar"},
RequestedScope: fosite.Arguments{"fa", "ba"},
GrantedScope: fosite.Arguments{"fa", "ba"},
Form: url.Values{"foo": []string{"bar", "baz"}},
Session: &oauth2.Session{DefaultSession: &openid.DefaultSession{Subject: "bar"}},
}
require.NoError(t, cm.CreateClient(context.Background(), deleteRequest.Client.(*client.Client)))
require.NoError(t, store.CreateAccessTokenSession(context.Background(), deleteRequest.ID, deleteRequest))
r := x.NewRouterAdmin()
h.SetRoutes(r, r.RouterPublic(), func(h http.Handler) http.Handler {
return h
})
ts := httptest.NewServer(r)
defer ts.Close()
c := hydra.NewHTTPClientWithConfig(nil, &hydra.TransportConfig{Schemes: []string{"http"}, Host: urlx.ParseOrPanic(ts.URL).Host})
_, err := c.Admin.DeleteOAuth2Token(admin.NewDeleteOAuth2TokenParams().WithClientID("foobar"))
require.NoError(t, err)
ds := new(oauth2.Session)
ctx := context.Background()
_, err = store.GetAccessTokenSession(ctx, "del-1", ds)
require.Error(t, err, "not_found")
}
func TestHandlerFlushHandler(t *testing.T) {
conf := internal.NewConfigurationWithDefaults()
viper.Set(configuration.ViperKeyScopeStrategy, "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY")
viper.Set(configuration.ViperKeyIssuerURL, "http://hydra.localhost")
reg := internal.NewRegistryMemory(t, conf)
cl := reg.ClientManager()
store := reg.OAuth2Storage()
h := oauth2.NewHandler(reg, conf)
for _, r := range flushRequests {
_ = cl.CreateClient(context.Background(), r.Client.(*client.Client))
require.NoError(t, store.CreateAccessTokenSession(context.Background(), r.ID, r))
}
r := x.NewRouterAdmin()
h.SetRoutes(r, r.RouterPublic(), func(h http.Handler) http.Handler {
return h
})
ts := httptest.NewServer(r)
defer ts.Close()
c := hydra.NewHTTPClientWithConfig(nil, &hydra.TransportConfig{Schemes: []string{"http"}, Host: urlx.ParseOrPanic(ts.URL).Host})
ds := new(oauth2.Session)
ctx := context.Background()
_, err := c.Admin.FlushInactiveOAuth2Tokens(admin.NewFlushInactiveOAuth2TokensParams().WithBody(&models.FlushInactiveOAuth2TokensRequest{NotAfter: strfmt.DateTime(time.Now().Add(-time.Hour * 24))}))
require.NoError(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-1", ds)
require.NoError(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-2", ds)
require.NoError(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-3", ds)
require.NoError(t, err)
_, err = c.Admin.FlushInactiveOAuth2Tokens(admin.NewFlushInactiveOAuth2TokensParams().WithBody(&models.FlushInactiveOAuth2TokensRequest{NotAfter: strfmt.DateTime(time.Now().Add(-(lifespan + time.Hour/2)))}))
require.NoError(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-1", ds)
require.NoError(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-2", ds)
require.NoError(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-3", ds)
require.Error(t, err)
_, err = c.Admin.FlushInactiveOAuth2Tokens(admin.NewFlushInactiveOAuth2TokensParams().WithBody(&models.FlushInactiveOAuth2TokensRequest{NotAfter: strfmt.DateTime(time.Now())}))
require.NoError(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-1", ds)
require.NoError(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-2", ds)
require.Error(t, err)
_, err = store.GetAccessTokenSession(ctx, "flush-3", ds)
require.Error(t, err)
}
func TestUserinfo(t *testing.T) { |
func TestHandlerWellKnown(t *testing.T) {
conf := internal.NewConfigurationWithDefaults()
viper.Set(configuration.ViperKeyScopeStrategy, "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY")
viper.Set(configuration.ViperKeyIssuerURL, "http://hydra.localhost")
viper.Set(configuration.ViperKeySubjectTypesSupported, []string{"pairwise", "public"})
viper.Set(configuration.ViperKeyOIDCDiscoverySupportedClaims, []string{"sub"})
viper.Set(configuration.ViperKeyOAuth2ClientRegistrationURL, "http://client-register/registration")
viper.Set(configuration.ViperKeyOIDCDiscoveryUserinfoEndpoint, "/userinfo")
reg := internal.NewRegistryMemory(t, conf)
h := oauth2.NewHandler(reg, conf)
r := x.NewRouterAdmin()
h.SetRoutes(r, r.RouterPublic(), func(h http.Handler) http.Handler {
return h
})
ts := httptest.NewServer(r)
defer ts.Close()
res, err := http.Get(ts.URL + "/.well-known/openid-configuration")
require.NoError(t, err)
defer res.Body.Close()
trueConfig := oauth2.WellKnown{
Issuer: strings.TrimRight(conf.IssuerURL().String(), "/") + "/",
AuthURL: conf.OAuth2AuthURL().String(),
TokenURL: conf.OAuth2TokenURL().String(),
JWKsURI: conf.JWKSURL().String(),
RevocationEndpoint: urlx.AppendPaths(conf.IssuerURL(), oauth2.RevocationPath).String(),
RegistrationEndpoint: conf.OAuth2ClientRegistrationURL().String(),
SubjectTypes: []string{"pairwise", "public"},
ResponseTypes: []string{"code", "code id_token", "id_token", "token id_token", "token", "token id_token code"},
ClaimsSupported: conf.OIDCDiscoverySupportedClaims(),
ScopesSupported: conf.OIDCDiscoverySupportedScope(),
UserinfoEndpoint: conf.OIDCDiscoveryUserinfoEndpoint(),
TokenEndpointAuthMethodsSupported: []string{"client_secret_post", "client_secret_basic", "private_key_jwt", "none"},
GrantTypesSupported: []string{"authorization_code", "implicit", "client_credentials", "refresh_token"},
ResponseModesSupported: []string{"query", "fragment"},
IDTokenSigningAlgValuesSupported: []string{"RS256"},
UserinfoSigningAlgValuesSupported: []string{"none", "RS256"},
RequestParameterSupported: true,
RequestURIParameterSupported: true,
RequireRequestURIRegistration: true,
BackChannelLogoutSupported: true,
BackChannelLogoutSessionSupported: true,
FrontChannelLogoutSupported: true,
FrontChannelLogoutSessionSupported: true,
EndSessionEndpoint: urlx.AppendPaths(conf.IssuerURL(), oauth2.LogoutPath).String(),
RequestObjectSigningAlgValuesSupported: []string{"RS256", "none"},
}
var wellKnownResp oauth2.WellKnown
err = json.NewDecoder(res.Body).Decode(&wellKnownResp)
require.NoError(t, err, "problem decoding wellknown json response: %+v", err)
assert.EqualValues(t, trueConfig, wellKnownResp)
}
|
conf := internal.NewConfigurationWithDefaults()
viper.Set(configuration.ViperKeyScopeStrategy, "")
viper.Set(configuration.ViperKeyAuthCodeLifespan, lifespan)
viper.Set(configuration.ViperKeyIssuerURL, "http://hydra.localhost")
reg := internal.NewRegistryMemory(t, conf)
internal.MustEnsureRegistryKeys(reg, x.OpenIDConnectKeyName)
ctrl := gomock.NewController(t)
op := NewMockOAuth2Provider(ctrl)
defer ctrl.Finish()
reg.WithOAuth2Provider(op)
h := reg.OAuth2Handler()
router := x.NewRouterAdmin()
h.SetRoutes(router, router.RouterPublic(), func(h http.Handler) http.Handler {
return h
})
ts := httptest.NewServer(router)
defer ts.Close()
for k, tc := range []struct {
setup func(t *testing.T)
checkForSuccess func(t *testing.T, body []byte)
checkForUnauthorized func(t *testing.T, body []byte, header http.Header)
expectStatusCode int
}{
{
setup: func(t *testing.T) {
op.EXPECT().IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()).Return(fosite.AccessToken, nil, errors.New("asdf"))
},
expectStatusCode: http.StatusInternalServerError,
},
{
setup: func(t *testing.T) {
op.EXPECT().
IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()).
Return(fosite.RefreshToken, nil, nil)
},
checkForUnauthorized: func(t *testing.T, body []byte, headers http.Header) {
assert.True(t, headers.Get("WWW-Authenticate") != "", "%s", headers)
},
expectStatusCode: http.StatusUnauthorized,
},
{
setup: func(t *testing.T) {
op.EXPECT().
IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()).
Return(fosite.AccessToken, nil, fosite.ErrRequestUnauthorized)
},
checkForUnauthorized: func(t *testing.T, body []byte, headers http.Header) {
assert.True(t, headers.Get("WWW-Authenticate") != "", "%s", headers)
},
expectStatusCode: http.StatusUnauthorized,
},
{
setup: func(t *testing.T) {
op.EXPECT().
IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()).
DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) {
session = &oauth2.Session{
DefaultSession: &openid.DefaultSession{
Claims: &jwt.IDTokenClaims{
Subject: "alice",
},
Headers: new(jwt.Headers),
Subject: "alice",
},
Extra: map[string]interface{}{},
}
return fosite.AccessToken, &fosite.AccessRequest{
Request: fosite.Request{
Client: &client.Client{},
Session: session,
},
}, nil
})
},
expectStatusCode: http.StatusOK,
checkForSuccess: func(t *testing.T, body []byte) {
assert.True(t, strings.Contains(string(body), `"sub":"alice"`), "%s", body)
},
},
{
setup: func(t *testing.T) {
op.EXPECT().
IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()).
DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) {
session = &oauth2.Session{
DefaultSession: &openid.DefaultSession{
Claims: &jwt.IDTokenClaims{
Subject: "another-alice",
},
Headers: new(jwt.Headers),
Subject: "alice",
},
Extra: map[string]interface{}{},
}
return fosite.AccessToken, &fosite.AccessRequest{
Request: fosite.Request{
Client: &client.Client{},
Session: session,
},
}, nil
})
},
expectStatusCode: http.StatusOK,
checkForSuccess: func(t *testing.T, body []byte) {
assert.False(t, strings.Contains(string(body), `"sub":"alice"`), "%s", body)
assert.True(t, strings.Contains(string(body), `"sub":"another-alice"`), "%s", body)
},
},
{
setup: func(t *testing.T) {
op.EXPECT().
IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()).
DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) {
session = &oauth2.Session{
DefaultSession: &openid.DefaultSession{
Claims: &jwt.IDTokenClaims{
Subject: "alice",
},
Headers: new(jwt.Headers),
Subject: "alice",
},
Extra: map[string]interface{}{},
}
return fosite.AccessToken, &fosite.AccessRequest{
Request: fosite.Request{
Client: &client.Client{
UserinfoSignedResponseAlg: "none",
},
Session: session,
},
}, nil
})
},
expectStatusCode: http.StatusOK,
checkForSuccess: func(t *testing.T, body []byte) {
assert.True(t, strings.Contains(string(body), `"sub":"alice"`), "%s", body)
},
},
{
setup: func(t *testing.T) {
op.EXPECT().
IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()).
DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) {
session = &oauth2.Session{
DefaultSession: &openid.DefaultSession{
Claims: &jwt.IDTokenClaims{
Subject: "alice",
},
Headers: new(jwt.Headers),
Subject: "alice",
},
Extra: map[string]interface{}{},
}
return fosite.AccessToken, &fosite.AccessRequest{
Request: fosite.Request{
Client: &client.Client{
UserinfoSignedResponseAlg: "asdfasdf",
},
Session: session,
},
}, nil
})
},
expectStatusCode: http.StatusInternalServerError,
},
{
setup: func(t *testing.T) {
op.EXPECT().
IntrospectToken(gomock.Any(), gomock.Eq("access-token"), gomock.Eq(fosite.AccessToken), gomock.Any()).
DoAndReturn(func(_ context.Context, _ string, _ fosite.TokenType, session fosite.Session, _ ...string) (fosite.TokenType, fosite.AccessRequester, error) {
session = &oauth2.Session{
DefaultSession: &openid.DefaultSession{
Claims: &jwt.IDTokenClaims{
Subject: "alice",
},
Headers: new(jwt.Headers),
Subject: "alice",
},
Extra: map[string]interface{}{},
}
return fosite.AccessToken, &fosite.AccessRequest{
Request: fosite.Request{
Client: &client.Client{
OutfacingID: "foobar-client",
UserinfoSignedResponseAlg: "RS256",
},
Session: session,
},
}, nil
})
},
expectStatusCode: http.StatusOK,
checkForSuccess: func(t *testing.T, body []byte) {
claims, err := jwt2.Parse(string(body), func(token *jwt2.Token) (interface{}, error) {
keys, err := reg.KeyManager().GetKeySet(context.Background(), x.OpenIDConnectKeyName)
require.NoError(t, err)
t.Logf("%+v", keys)
key, err := jwk.FindKeyByPrefix(keys, "public")
return jwk.MustRSAPublic(key), nil
})
require.NoError(t, err)
assert.EqualValues(t, "alice", claims.Claims.(jwt2.MapClaims)["sub"])
assert.EqualValues(t, []interface{}{"foobar-client"}, claims.Claims.(jwt2.MapClaims)["aud"], "%#v", claims.Claims)
assert.NotEmpty(t, claims.Claims.(jwt2.MapClaims)["jti"])
},
},
} {
t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) {
tc.setup(t)
req, err := http.NewRequest("GET", ts.URL+"/userinfo", nil)
require.NoError(t, err)
req.Header.Set("Authorization", "Bearer access-token")
resp, err := http.DefaultClient.Do(req)
require.NoError(t, err)
defer resp.Body.Close()
require.EqualValues(t, tc.expectStatusCode, resp.StatusCode)
body, err := ioutil.ReadAll(resp.Body)
require.NoError(t, err)
if tc.expectStatusCode == http.StatusOK {
tc.checkForSuccess(t, body)
} else if tc.expectStatusCode == http.StatusUnauthorized {
tc.checkForUnauthorized(t, body, resp.Header)
}
})
}
}
|
demo_syn_atdoc.py | # -*- coding: utf-8 -*-
# A Survey on Negative Transfer
# https://github.com/chamwen/NT-Benchmark
import numpy as np
import argparse
import os
import torch as tr
import torch.nn as nn
import torch.optim as optim
from utils import network, loss, utils
from utils.network import calc_coeff
from utils.dataloader import read_syn_src_tar
from utils.utils import lr_scheduler_full, fix_random_seed, add_label_noise_noimg
from utils.loss import CELabelSmooth, CDANE, Entropy, RandomLayer
import torch.utils.data as Data
def data_load(Xs, Ys, Xt, Yt, args):
dset_loaders = {}
train_bs = args.batch_size
if args.noise_rate > 0:
Ys = add_label_noise_noimg(Ys, args.seed, args.class_num, args.noise_rate)
sample_idx_tar = tr.from_numpy(np.arange(len(Yt))).long()
data_src = Data.TensorDataset(Xs, Ys)
data_tar = Data.TensorDataset(Xt, Yt)
data_tar_idx = Data.TensorDataset(Xt, Yt, sample_idx_tar)
# for DAN/DANN/CDAN/MCC
dset_loaders["source"] = Data.DataLoader(data_src, batch_size=train_bs, shuffle=True, drop_last=True)
dset_loaders["target"] = Data.DataLoader(data_tar_idx, batch_size=train_bs, shuffle=True, drop_last=True)
dset_loaders["Target"] = Data.DataLoader(data_tar, batch_size=train_bs * 3, shuffle=False, drop_last=False)
return dset_loaders
def train_target(args):
X_src, y_src, X_tar, y_tar = read_syn_src_tar(args)
dset_loaders = data_load(X_src, y_src, X_tar, y_tar, args)
netF, netC = network.backbone_net(args, args.bottleneck)
netF.load_state_dict(tr.load(args.mdl_init_dir + 'netF.pt'))
netC.load_state_dict(tr.load(args.mdl_init_dir + 'netC.pt'))
base_network = nn.Sequential(netF, netC)
max_len = max(len(dset_loaders["source"]), len(dset_loaders["target"]))
args.max_iter = args.max_epoch * max_len
ad_net = network.AdversarialNetwork(args.bottleneck, 20).cuda()
ad_net.load_state_dict(tr.load(args.mdl_init_dir + 'netD_full.pt'))
random_layer = RandomLayer([args.bottleneck, args.class_num], args.bottleneck)
random_layer.cuda()
optimizer_f = optim.SGD(netF.parameters(), lr=args.lr * 0.1)
optimizer_c = optim.SGD(netC.parameters(), lr=args.lr)
optimizer_d = optim.SGD(ad_net.parameters(), lr=args.lr)
max_len = max(len(dset_loaders["source"]), len(dset_loaders["target"]))
max_iter = args.max_epoch * max_len
interval_iter = max_iter // 10
iter_num = 0
base_network.train()
class_num = args.class_num
mem_fea = tr.rand(len(dset_loaders["target"].dataset), args.bottleneck).cuda()
mem_fea = mem_fea / tr.norm(mem_fea, p=2, dim=1, keepdim=True)
mem_cls = tr.ones(len(dset_loaders["target"].dataset), class_num).cuda() / class_num
while iter_num < max_iter:
try:
inputs_source, labels_source = iter_source.next()
except:
iter_source = iter(dset_loaders["source"])
inputs_source, labels_source = iter_source.next()
try:
inputs_target, _, idx = iter_target.next()
except:
iter_target = iter(dset_loaders["target"])
inputs_target, _, idx = iter_target.next()
if inputs_source.size(0) == 1:
continue
iter_num += 1
lr_scheduler_full(optimizer_f, init_lr=args.lr * 0.1, iter_num=iter_num, max_iter=args.max_iter)
lr_scheduler_full(optimizer_c, init_lr=args.lr, iter_num=iter_num, max_iter=args.max_iter)
lr_scheduler_full(optimizer_d, init_lr=args.lr, iter_num=iter_num, max_iter=args.max_iter)
inputs_source, inputs_target, labels_source = inputs_source.cuda(), inputs_target.cuda(), labels_source.cuda()
features_source, outputs_source = base_network(inputs_source)
features_target, outputs_target = base_network(inputs_target)
features = tr.cat((features_source, features_target), dim=0) | # new version img loss
args.loss_trade_off = 1.0
outputs = tr.cat((outputs_source, outputs_target), dim=0)
softmax_out = nn.Softmax(dim=1)(outputs)
entropy = Entropy(softmax_out)
transfer_loss = CDANE([features, softmax_out], ad_net, entropy, calc_coeff(iter_num), random_layer=random_layer)
classifier_loss = CELabelSmooth(num_classes=args.class_num, epsilon=args.smooth)(outputs_source, labels_source)
# ATDOC
dis = -tr.mm(features_target.detach(), mem_fea.t())
for di in range(dis.size(0)):
dis[di, idx[di]] = tr.max(dis)
_, p1 = tr.sort(dis, dim=1)
w = tr.zeros(features_target.size(0), mem_fea.size(0)).cuda()
for wi in range(w.size(0)):
for wj in range(args.K):
w[wi][p1[wi, wj]] = 1 / args.K
weight_, pred = tr.max(w.mm(mem_cls), 1)
loss_ = nn.CrossEntropyLoss(reduction='none')(outputs_target, pred)
classifier_loss_atdoc = tr.sum(weight_ * loss_) / (tr.sum(weight_).item())
eff = iter_num / args.max_iter
total_loss = args.loss_trade_off * transfer_loss + classifier_loss + args.tar_par * eff * classifier_loss_atdoc
optimizer_f.zero_grad()
optimizer_c.zero_grad()
optimizer_d.zero_grad()
total_loss.backward()
optimizer_f.step()
optimizer_c.step()
optimizer_d.step()
# label memory
netF.eval()
netC.eval()
with tr.no_grad():
features_target, outputs_target = netC(netF(inputs_target))
features_target = features_target / tr.norm(features_target, p=2, dim=1, keepdim=True)
softmax_out = nn.Softmax(dim=1)(outputs_target)
outputs_target = softmax_out ** 2 / ((softmax_out ** 2).sum(dim=0))
mem_fea[idx] = (1.0 - args.momentum) * mem_fea[idx] + args.momentum * features_target.clone()
mem_cls[idx] = (1.0 - args.momentum) * mem_cls[idx] + args.momentum * outputs_target.clone()
if iter_num % interval_iter == 0 or iter_num == max_iter:
base_network.eval()
acc_t_te = utils.cal_acc_base(dset_loaders["Target"], base_network)
log_str = 'Task: {}, Iter:{}/{}; Acc = {:.2f}%'.format(args.task_str, iter_num, max_iter, acc_t_te)
print(log_str)
base_network.train()
return acc_t_te
if __name__ == '__main__':
data_name = 'moon'
if data_name == 'moon': num_class = 2
base_name_list = ['0', '1', '2', '3_45', '4_15', '6', '7', '8', '9']
domain_list = ['Raw', 'Tl', 'Sl', 'Rt', 'Sh', 'Sk', 'Ns', 'Ol', 'Sc']
file_list = [data_name + i for i in base_name_list]
num_domain = len(domain_list)
args = argparse.Namespace(bottleneck=64, lr=0.01, lr_decay1=0.1, lr_decay2=1.0,
epsilon=1e-05, layer='wn', class_num=num_class, smooth=0)
args.K = 5
args.momentum = 1.0
args.tar_par = 0.2
args.method = 'CDANE-ATDOC'
args.dset = data_name
args.backbone = 'ShallowNet'
args.batch_size = 32
args.max_epoch = 50
args.input_dim = 2
args.mdl_init_dir = 'outputs/mdl_init/' + args.dset + '/'
args.noise_rate = 0
dset_n = args.dset + '_' + str(args.noise_rate)
os.environ["CUDA_VISIBLE_DEVICES"] = '5'
args.data_env = 'gpu' # 'local'
args.seed = 2022
fix_random_seed(args.seed)
tr.backends.cudnn.deterministic = True
print(dset_n, args.method)
args.root_path = './data_synth/'
args.local_dir = r'/mnt/ssd2/wenz/NT-Benchmark/NT_UDA/'
args.result_dir = 'results/target/'
acc_all = np.zeros((len(domain_list) - 1))
for s in range(1, num_domain): # source
for t in [0]: # target
itr_idx = s - 1
info_str = '\n%s: %s --> %s' % (itr_idx, domain_list[s], domain_list[t])
print(info_str)
args.src, args.tar = file_list[s], file_list[t]
args.task_str = domain_list[s] + '_' + domain_list[t]
print(args)
acc_all[itr_idx] = train_target(args)
print('All acc: ', np.round(acc_all, 2))
print('Avg acc: ', np.round(np.mean(acc_all), 2)) | |
__manifest__.py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Irreversible Lock Date',
'version': '1.0',
'category': 'Accounting/Accounting',
'description': """
Make the lock date irreversible:
* You cannot set stricter restrictions on advisors than on users. Therefore, the All Users Lock Date must be anterior (or equal) to the Invoice/Bills Lock Date.
* You cannot lock a period that has not yet ended. Therefore, the All Users Lock Date must be anterior (or equal) to the last day of the previous month.
* Any new All Users Lock Date must be posterior (or equal) to the previous one. | 'depends': ['account'],
'data': [],
} | """, |
configuration.py | """
ORY Hydra
Welcome to the ORY Hydra HTTP API documentation. You will find documentation for all HTTP APIs here. # noqa: E501
The version of the OpenAPI document: v1.10.5
Generated by: https://openapi-generator.tech
"""
import copy
import logging
import multiprocessing
import sys
import urllib3
from http import client as http_client
from ory_hydra_client.exceptions import ApiValueError
JSON_SCHEMA_VALIDATION_KEYWORDS = {
'multipleOf', 'maximum', 'exclusiveMaximum',
'minimum', 'exclusiveMinimum', 'maxLength',
'minLength', 'pattern', 'maxItems', 'minItems'
}
class Configuration(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
:param host: Base url
:param api_key: Dict to store API key(s).
Each entry in the dict specifies an API key.
The dict key is the name of the security scheme in the OAS specification.
The dict value is the API key secret.
:param api_key_prefix: Dict to store API prefix (e.g. Bearer)
The dict key is the name of the security scheme in the OAS specification.
The dict value is an API key prefix when generating the auth data.
:param username: Username for HTTP basic authentication
:param password: Password for HTTP basic authentication
:param discard_unknown_keys: Boolean value indicating whether to discard
unknown properties. A server may send a response that includes additional
properties that are not known by the client in the following scenarios:
1. The OpenAPI document is incomplete, i.e. it does not match the server
implementation.
2. The client was generated using an older version of the OpenAPI document
and the server has been upgraded since then.
If a schema in the OpenAPI document defines the additionalProperties attribute,
then all undeclared properties received by the server are injected into the
additional properties map. In that case, there are undeclared properties, and
nothing to discard.
:param disabled_client_side_validations (string): Comma-separated list of
JSON schema validation keywords to disable JSON schema structural validation
rules. The following keywords may be specified: multipleOf, maximum,
exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
maxItems, minItems.
By default, the validation is performed for data generated locally by the client
and data received from the server, independent of any validation performed by
the server side. If the input data does not satisfy the JSON schema validation
rules specified in the OpenAPI document, an exception is raised.
If disabled_client_side_validations is set, structural validation is
disabled. This can be useful to troubleshoot data validation problem, such as
when the OpenAPI document validation rules do not match the actual API data
received by the server.
:param server_index: Index to servers configuration.
:param server_variables: Mapping with string values to replace variables in
templated server configuration. The validation of enums is performed for
variables with defined enum values before.
:param server_operation_index: Mapping from operation ID to an index to server
configuration.
:param server_operation_variables: Mapping from operation ID to a mapping with
string values to replace variables in templated server configuration.
The validation of enums is performed for variables with defined enum values before.
:param ssl_ca_cert: str - the path to a file of concatenated CA certificates
in PEM format
:Example:
HTTP Basic Authentication Example.
Given the following security scheme in the OpenAPI specification:
components:
securitySchemes:
http_basic_auth:
type: http
scheme: basic
Configure API client with HTTP basic authentication:
conf = ory_hydra_client.Configuration(
username='the-user',
password='the-password',
)
"""
_default = None
def __init__(self, host=None,
api_key=None, api_key_prefix=None,
access_token=None,
username=None, password=None,
discard_unknown_keys=False,
disabled_client_side_validations="",
server_index=None, server_variables=None,
server_operation_index=None, server_operation_variables=None,
ssl_ca_cert=None,
):
"""Constructor
"""
self._base_path = "http://localhost" if host is None else host
"""Default Base url
"""
self.server_index = 0 if server_index is None and host is None else server_index
self.server_operation_index = server_operation_index or {}
"""Default server index
"""
self.server_variables = server_variables or {}
self.server_operation_variables = server_operation_variables or {}
"""Default server variables
"""
self.temp_folder_path = None
"""Temp file folder for downloading files
"""
# Authentication Settings
self.access_token = access_token
self.api_key = {}
if api_key:
self.api_key = api_key
"""dict to store API key(s)
"""
self.api_key_prefix = {}
if api_key_prefix:
self.api_key_prefix = api_key_prefix
"""dict to store API prefix (e.g. Bearer)
"""
self.refresh_api_key_hook = None
"""function hook to refresh API key if expired
"""
self.username = username
"""Username for HTTP basic authentication
"""
self.password = password
"""Password for HTTP basic authentication
"""
self.discard_unknown_keys = discard_unknown_keys
self.disabled_client_side_validations = disabled_client_side_validations
self.logger = {}
"""Logging Settings
"""
self.logger["package_logger"] = logging.getLogger("ory_hydra_client")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
"""Log format
"""
self.logger_stream_handler = None
"""Log stream handler
"""
self.logger_file_handler = None
"""Log file handler
"""
self.logger_file = None
"""Debug file location
"""
self.debug = False
"""Debug switch
"""
self.verify_ssl = True
"""SSL/TLS verification
Set this to false to skip verifying SSL certificate when calling API
from https server.
"""
self.ssl_ca_cert = ssl_ca_cert
"""Set this to customize the certificate file to verify the peer.
"""
self.cert_file = None
"""client certificate file
"""
self.key_file = None
"""client key file
"""
self.assert_hostname = None
"""Set this to True/False to enable/disable SSL hostname verification.
"""
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
"""urllib3 connection pool's maximum number of connections saved
per pool. urllib3 uses 1 connection as default value, but this is
not the best value when you are making a lot of possibly parallel
requests to the same host, which is often the case here.
cpu_count * 5 is used as default value to increase performance.
"""
self.proxy = None
"""Proxy URL
"""
self.proxy_headers = None
"""Proxy headers
"""
self.safe_chars_for_path_param = ''
"""Safe chars for path_param
"""
self.retries = None
"""Adding retries to override urllib3 default value 3
"""
# Enable client side validation
self.client_side_validation = True
# Options to pass down to the underlying urllib3 socket
self.socket_options = None
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
if k not in ('logger', 'logger_file_handler'):
setattr(result, k, copy.deepcopy(v, memo))
# shallow copy of loggers
result.logger = copy.copy(self.logger)
# use setters to configure loggers
result.logger_file = self.logger_file
result.debug = self.debug
return result
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
if name == 'disabled_client_side_validations':
s = set(filter(None, value.split(',')))
for v in s:
if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
raise ApiValueError(
"Invalid keyword: '{0}''".format(v))
self._disabled_client_side_validations = s
@classmethod
def set_default(cls, default):
"""Set default instance of configuration.
It stores default configuration, which can be
returned by get_default_copy method.
:param default: object of Configuration
"""
cls._default = copy.deepcopy(default)
@classmethod
def get_default_copy(cls):
"""Return new instance of configuration.
This method returns newly created, based on default constructor,
object of Configuration class or returns a copy of default
configuration passed by the set_default method.
:return: The configuration object.
"""
if cls._default is not None:
return copy.deepcopy(cls._default)
return Configuration()
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in self.logger.items():
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in self.logger.items():
logger.setLevel(logging.DEBUG)
# turn on http_client debug
http_client.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in self.logger.items():
logger.setLevel(logging.WARNING)
# turn off http_client debug
http_client.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier, alias=None):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:param alias: The alternative identifier of apiKey.
:return: The token for api key authentication.
"""
if self.refresh_api_key_hook is not None:
self.refresh_api_key_hook(self)
key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None)
if key:
prefix = self.api_key_prefix.get(identifier)
if prefix:
return "%s %s" % (prefix, key)
else:
return key
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
username = ""
if self.username is not None:
username = self.username
password = ""
if self.password is not None:
password = self.password
return urllib3.util.make_headers(
basic_auth=username + ':' + password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
auth = {}
if self.username is not None and self.password is not None:
auth['basic'] = {
'type': 'basic',
'in': 'header',
'key': 'Authorization',
'value': self.get_basic_auth_token()
}
if self.access_token is not None:
auth['oauth2'] = {
'type': 'oauth2',
'in': 'header',
'key': 'Authorization',
'value': 'Bearer ' + self.access_token
}
return auth
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: v1.10.5\n"\
"SDK Package Version: v1.10.5".\
format(env=sys.platform, pyversion=sys.version)
def get_host_settings(self):
"""Gets an array of host settings
:return: An array of host settings
"""
return [
{
'url': "",
'description': "No description provided",
}
]
def get_host_from_settings(self, index, variables=None, servers=None):
"""Gets host URL based on the index and variables
:param index: array index of the host settings
:param variables: hash of variable and the corresponding value
:param servers: an array of host settings or None
:return: URL based on host settings
"""
if index is None:
return self._base_path
variables = {} if variables is None else variables
servers = self.get_host_settings() if servers is None else servers
try:
server = servers[index]
except IndexError:
raise ValueError(
"Invalid index {0} when selecting the host settings. "
"Must be less than {1}".format(index, len(servers)))
url = server['url']
# go through variables and replace placeholders
for variable_name, variable in server.get('variables', {}).items():
used_value = variables.get(
variable_name, variable['default_value'])
if 'enum_values' in variable \
and used_value not in variable['enum_values']:
raise ValueError(
"The variable `{0}` in the host URL has invalid value "
"{1}. Must be {2}.".format(
variable_name, variables[variable_name],
variable['enum_values']))
url = url.replace("{" + variable_name + "}", used_value)
return url
@property
def host(self): | return self.get_host_from_settings(self.server_index, variables=self.server_variables)
@host.setter
def host(self, value):
"""Fix base path."""
self._base_path = value
self.server_index = None | """Return generated host.""" |
operations.rs | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::pipeline::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(endpoint: impl Into<String>, credential: std::sync::Arc<dyn azure_core::TokenCredential>, scopes: Vec<String>) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::pipeline::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn admin_keys(&self) -> admin_keys::Client {
admin_keys::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
pub fn private_endpoint_connections(&self) -> private_endpoint_connections::Client {
private_endpoint_connections::Client(self.clone())
}
pub fn private_link_resources(&self) -> private_link_resources::Client {
private_link_resources::Client(self.clone())
}
pub fn query_keys(&self) -> query_keys::Client {
query_keys::Client(self.clone())
}
pub fn services(&self) -> services::Client {
services::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
AdminKeys_Get(#[from] admin_keys::get::Error),
#[error(transparent)]
AdminKeys_Regenerate(#[from] admin_keys::regenerate::Error),
#[error(transparent)]
QueryKeys_Create(#[from] query_keys::create::Error),
#[error(transparent)]
QueryKeys_ListBySearchService(#[from] query_keys::list_by_search_service::Error),
#[error(transparent)]
QueryKeys_Delete(#[from] query_keys::delete::Error),
#[error(transparent)]
Services_Get(#[from] services::get::Error),
#[error(transparent)]
Services_CreateOrUpdate(#[from] services::create_or_update::Error),
#[error(transparent)]
Services_Update(#[from] services::update::Error),
#[error(transparent)]
Services_Delete(#[from] services::delete::Error),
#[error(transparent)]
Services_ListByResourceGroup(#[from] services::list_by_resource_group::Error),
#[error(transparent)]
PrivateLinkResources_ListSupported(#[from] private_link_resources::list_supported::Error),
#[error(transparent)]
PrivateEndpointConnections_Get(#[from] private_endpoint_connections::get::Error),
#[error(transparent)]
PrivateEndpointConnections_Update(#[from] private_endpoint_connections::update::Error),
#[error(transparent)]
PrivateEndpointConnections_Delete(#[from] private_endpoint_connections::delete::Error),
#[error(transparent)]
PrivateEndpointConnections_ListByService(#[from] private_endpoint_connections::list_by_service::Error),
#[error(transparent)]
Services_ListBySubscription(#[from] services::list_by_subscription::Error),
#[error(transparent)]
Services_CheckNameAvailability(#[from] services::check_name_availability::Error),
}
pub mod operations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationListResult, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.Search/operations", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OperationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod admin_keys {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn regenerate(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
key_kind: impl Into<String>,
subscription_id: impl Into<String>,
) -> regenerate::Builder {
regenerate::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
key_kind: key_kind.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AdminKeyResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/listAdminKeys",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AdminKeyResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod regenerate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) key_kind: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AdminKeyResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/regenerateAdminKey/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name,
&self.key_kind
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AdminKeyResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod query_keys {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn create(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
name: impl Into<String>,
subscription_id: impl Into<String>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
name: name.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn list_by_search_service(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> list_by_search_service::Builder {
list_by_search_service::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn delete(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
key: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
key: key.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) name: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::QueryKey, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/createQueryKey/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name,
&self.name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::QueryKey =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_search_service {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ListQueryKeysResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/listQueryKeys",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ListQueryKeysResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) key: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/deleteQueryKey/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name,
&self.key
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
http::StatusCode::NOT_FOUND => Err(Error::NotFound404 {}),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod services {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn create_or_update(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
service: impl Into<models::SearchService>,
subscription_id: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
service: service.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn update(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
service: impl Into<models::SearchService>,
subscription_id: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
service: service.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn delete(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn list_by_resource_group(
&self,
resource_group_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn list_by_subscription(&self, subscription_id: impl Into<String>) -> list_by_subscription::Builder {
list_by_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn check_name_availability(
&self,
check_name_availability_input: impl Into<models::CheckNameAvailabilityInput>,
subscription_id: impl Into<String>,
) -> check_name_availability::Builder {
check_name_availability::Builder {
client: self.0.clone(),
check_name_availability_input: check_name_availability_input.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SearchService, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SearchService =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::SearchService),
Created201(models::SearchService),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) service: models::SearchService,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.service).map_err(Error::Serialize)?;
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SearchService =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SearchService =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) service: models::SearchService,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SearchService, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.service).map_err(Error::Serialize)?;
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SearchService =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
http::StatusCode::NOT_FOUND => Err(Error::NotFound404 {}),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SearchServiceListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SearchServiceListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_subscription {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::SearchServiceListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Search/searchServices",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::SearchServiceListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod check_name_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) check_name_availability_input: models::CheckNameAvailabilityInput,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::CheckNameAvailabilityOutput, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Search/checkNameAvailability",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.check_name_availability_input).map_err(Error::Serialize)?;
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CheckNameAvailabilityOutput =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod private_link_resources {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_supported(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
) -> list_supported::Builder {
list_supported::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
x_ms_client_request_id: None,
}
}
}
pub mod list_supported {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::PrivateLinkResourcesResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateLinkResources",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PrivateLinkResourcesResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod private_endpoint_connections {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
private_endpoint_connection_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
private_endpoint_connection_name: private_endpoint_connection_name.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn update(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
private_endpoint_connection_name: impl Into<String>,
private_endpoint_connection: impl Into<models::PrivateEndpointConnection>,
subscription_id: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
private_endpoint_connection_name: private_endpoint_connection_name.into(),
private_endpoint_connection: private_endpoint_connection.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn delete(
&self,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
private_endpoint_connection_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
private_endpoint_connection_name: private_endpoint_connection_name.into(),
subscription_id: subscription_id.into(),
x_ms_client_request_id: None,
}
}
pub fn list_by_service(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
search_service_name: impl Into<String>,
) -> list_by_service::Builder {
list_by_service::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
search_service_name: search_service_name.into(),
x_ms_client_request_id: None,
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse { | },
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) private_endpoint_connection_name: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PrivateEndpointConnection, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateEndpointConnections/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name,
&self.private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PrivateEndpointConnection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) private_endpoint_connection_name: String,
pub(crate) private_endpoint_connection: models::PrivateEndpointConnection,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PrivateEndpointConnection, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateEndpointConnections/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name,
&self.private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.private_endpoint_connection).map_err(Error::Serialize)?;
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PrivateEndpointConnection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Error response #response_type")]
NotFound404 {},
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) private_endpoint_connection_name: String,
pub(crate) subscription_id: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PrivateEndpointConnection, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateEndpointConnections/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name,
&self.private_endpoint_connection_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PrivateEndpointConnection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
http::StatusCode::NOT_FOUND => Err(Error::NotFound404 {}),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_service {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) search_service_name: String,
pub(crate) x_ms_client_request_id: Option<String>,
}
impl Builder {
pub fn x_ms_client_request_id(mut self, x_ms_client_request_id: impl Into<String>) -> Self {
self.x_ms_client_request_id = Some(x_ms_client_request_id.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::PrivateEndpointConnectionListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Search/searchServices/{}/privateEndpointConnections",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.search_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(x_ms_client_request_id) = &self.x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PrivateEndpointConnectionListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
} | status_code: http::StatusCode,
value: models::CloudError, |
numericpattern13.py | # Numeric Pattern 12
"""
Desired Output:
5 10 15 20 25
4 9 14 19 24 | """
x = 5
for i in range(5, 0, -1):
j = i
for _ in range(5):
print(j, end=" ")
j += x
print() | 3 8 13 18 23
2 7 12 17 22
1 6 11 16 21 |
main.rs | use dacapo_latency_dump_hdrh::DaCapoLatencyDump;
use glob::glob;
use indicatif::ParallelProgressIterator;
use indicatif::ProgressBar;
use indicatif::ProgressStyle; | use std::env;
use std::path::PathBuf;
fn main() {
let args: Vec<String> = env::args().collect();
let mut entries: Vec<PathBuf> = (&args[1..])
.iter()
.map(|arg| {
let entries: Result<Vec<PathBuf>, _> =
glob(&format!("{}/**/dacapo-latency-usec-*.csv", arg))
.expect("Failed to read glob pattern")
.collect();
entries.expect("Failed to read entries")
})
.collect::<Vec<Vec<PathBuf>>>()
.concat();
let pb = ProgressBar::new(entries.len() as u64);
pb.set_style(
ProgressStyle::default_bar()
.template(
"{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} ({eta})",
)
.progress_chars("#>-"),
);
entries.par_iter_mut().progress_with(pb).for_each(|entry| {
let dld = DaCapoLatencyDump::new(entry);
entry.set_extension("hdrh");
dld.save_hdrh(entry)
});
} | use rayon::prelude::*; |
angular-material-calendar.js | angular.module("materialCalendar", ["ngMaterial", "ngSanitize"]);
angular.module("materialCalendar").constant("materialCalendar.config", {
version: "0.2.13",
debug: document.domain.indexOf("localhost") > -1
});
angular.module("materialCalendar").config(["materialCalendar.config", "$logProvider", "$compileProvider", function (config, $logProvider, $compileProvider) {
if (config.debug) {
$logProvider.debugEnabled(false);
$compileProvider.debugInfoEnabled(false);
}
}]);
angular.module("materialCalendar").service("materialCalendar.Calendar", [function () {
function Calendar(year, month, options) {
var now = new Date();
this.setWeekStartsOn = function (i) {
var d = parseInt(i || 0, 10);
if (!isNaN(d) && d >= 0 && d <= 6) {
this.weekStartsOn = d;
} else {
this.weekStartsOn = 0;
}
return this.weekStartsOn;
};
this.options = angular.isObject(options) ? options : {};
this.year = now.getFullYear();
this.month = now.getMonth();
this.weeks = [];
this.weekStartsOn = this.setWeekStartsOn(this.options.weekStartsOn);
this.next = function () {
if (this.start.getMonth() < 11) {
this.init(this.start.getFullYear(), this.start.getMonth() + 1);
return;
}
this.init(this.start.getFullYear() + 1, 0);
};
this.prev = function () {
if (this.month) {
this.init(this.start.getFullYear(), this.start.getMonth() - 1);
return;
}
this.init(this.start.getFullYear() - 1, 11);
};
// Month should be the javascript indexed month, 0 is January, etc.
this.init = function (year, month) {
var now = new Date();
this.year = angular.isDefined(year) ? year : now.getFullYear();
this.month = angular.isDefined(month) ? month : now.getMonth();
var daysInMonth = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
var monthLength = daysInMonth[this.month];
// Figure out if is a leap year.
if (this.month === 1) {
if ((this.year % 4 === 0 && this.year % 100 !== 0) || this.year % 400 === 0) {
monthLength = 29;
}
}
// First day of calendar month.
this.start = new Date(this.year, this.month, 1);
var date = angular.copy(this.start);
while (date.getDay() !== this.weekStartsOn) {
date.setDate(date.getDate() - 1);
monthLength++;
}
// Last day of calendar month.
while (monthLength % 7 !== 0) {
monthLength++;
}
this.weeks = [];
for (var i = 0; i < monthLength; ++i) {
// Let's start a new week.
if (i % 7 === 0) {
this.weeks.push([]);
}
// Add copy of the date. If not a copy,
// it will get updated shortly.
this.weeks[this.weeks.length - 1].push(angular.copy(date));
// Increment it.
date.setDate(date.getDate() + 1);
}
};
this.init(year, month);
}
return Calendar;
}]);
angular.module("materialCalendar").service("MaterialCalendarData", [function () {
function CalendarData() {
this.data = {};
this.getDayKey = function(date) {
return [date.getFullYear(), date.getMonth() + 1, date.getDate()].join("-");
};
this.setDayContent = function(date, content) {
this.data[this.getDayKey(date)] = content || this.data[this.getDayKey(date)] || "";
};
}
return new CalendarData();
}]);
angular.module("materialCalendar").directive("calendarMd", ["$compile", "$parse", "$http", "$q", "materialCalendar.Calendar", "MaterialCalendarData", function ($compile, $parse, $http, $q, Calendar, CalendarData) {
var defaultTemplate = "<md-content layout='column' layout-fill md-swipe-left='next()' md-swipe-right='prev()'><md-toolbar><div class='md-toolbar-tools' layout='row'><md-button class='md-icon-button' ng-click='prev()' aria-label='Previous month'><md-tooltip ng-if='::tooltips()'>Previous month</md-tooltip>«</md-button><div flex></div><h2 class='calendar-md-title'><span>{{ calendar.start | date:titleFormat:timezone }}</span></h2><div flex></div><md-button class='md-icon-button' ng-click='next()' aria-label='Next month'><md-tooltip ng-if='::tooltips()'>Next month</md-tooltip>»</md-button></div></md-toolbar><!-- agenda view --><md-content ng-if='weekLayout === columnWeekLayout' class='agenda'><div ng-repeat='week in calendar.weeks track by $index'><div ng-if='sameMonth(day)' ng-class='{"disabled" : isDisabled(day), active: active === day }' ng-click='handleDayClick(day)' ng-repeat='day in week' layout><md-tooltip ng-if='::tooltips()'>{{ day | date:dayTooltipFormat:timezone }}</md-tooltip><div>{{ day | date:dayFormat:timezone }}</div><div flex ng-bind-html='dataService.data[dayKey(day)]'></div></div></div></md-content><!-- calendar view --><md-content ng-if='weekLayout !== columnWeekLayout' flex layout='column' class='calendar'><div layout='row' class='subheader'><div layout-padding class='subheader-day' flex ng-repeat='day in calendar.weeks[0]'><md-tooltip ng-if='::tooltips()'>{{ day | date:dayLabelTooltipFormat }}</md-tooltip>{{ day | date:dayLabelFormat }}</div></div><div ng-if='week.length' ng-repeat='week in calendar.weeks track by $index' flex layout='row'><div tabindex='{{ sameMonth(day) ? (day | date:dayFormat:timezone) : 0 }}' ng-repeat='day in week track by $index' ng-click='handleDayClick(day)' flex layout layout-padding ng-class='{"disabled" : isDisabled(day), "active": isActive(day), "md-whiteframe-12dp": hover || focus }' ng-focus='focus = true;' ng-blur='focus = false;' ng-mouseleave='hover = false' ng-mouseenter='hover = true'><md-tooltip ng-if='::tooltips()'>{{ day | date:dayTooltipFormat }}</md-tooltip><div>{{ day | date:dayFormat }}</div><div flex ng-bind-html='dataService.data[dayKey(day)]'></div></div></div></md-content></md-content>";
var injectCss = function () {
var styleId = "calendarMdCss";
if (!document.getElementById(styleId)) {
var head = document.getElementsByTagName("head")[0];
var css = document.createElement("style");
css.type = "text/css";
css.id = styleId;
css.innerHTML = "calendar-md md-content>md-content.agenda>*>* :not(:first-child),calendar-md md-content>md-content.calendar>:not(:first-child)>* :last-child{overflow:hidden;text-overflow:ellipsis}calendar-md{display:block;max-height:100%}calendar-md .md-toolbar-tools h2{overflow-x:hidden;text-overflow:ellipsis;white-space:nowrap}calendar-md md-content>md-content{border:1px solid rgba(0,0,0,.12)}calendar-md md-content>md-content.agenda>*>*{border-bottom:1px solid rgba(0,0,0,.12)}calendar-md md-content>md-content.agenda>*>.disabled{color:rgba(0,0,0,.3);pointer-events:none;cursor:auto}calendar-md md-content>md-content.agenda>*>* :first-child{padding:12px;width:200px;text-align:right;color:rgba(0,0,0,.75);font-weight:100;overflow-x:hidden;text-overflow:ellipsis;white-space:nowrap}calendar-md md-content>md-content>*>*{min-width:48px}calendar-md md-content>md-content.calendar>:first-child{background:rgba(0,0,0,.02);border-bottom:1px solid rgba(0,0,0,.12);margin-right:0;min-height:36px}calendar-md md-content>md-content.calendar>:not(:first-child)>*{border-bottom:1px solid rgba(0,0,0,.12);border-right:1px solid rgba(0,0,0,.12);cursor:pointer}calendar-md md-content>md-content.calendar>:not(:first-child)>:hover{background:rgba(0,0,0,.04)}calendar-md md-content>md-content.calendar>:not(:first-child)>.disabled{color:rgba(0,0,0,.3);pointer-events:none;cursor:auto}calendar-md md-content>md-content.calendar>:not(:first-child)>.active{box-shadow:0 1px 3px 0 rgba(0,0,0,.2),0 1px 1px 0 rgba(0,0,0,.14),0 2px 1px -1px rgba(0,0,0,.12);background:rgba(0,0,0,.02)}calendar-md md-content>md-content.calendar>:not(:first-child)>* :first-child{padding:0}";
head.insertBefore(css, head.firstChild);
}
};
return {
restrict: "E",
scope: {
ngModel: "=?",
template: "&",
templateUrl: "=?",
onDayClick: "=?",
onPrevMonth: "=?",
onNextMonth: "=?",
calendarDirection: "=?",
dayContent: "&?",
timezone: "=?",
titleFormat: "=?",
dayFormat: "=?",
dayLabelFormat: "=?",
dayLabelTooltipFormat: "=?",
dayTooltipFormat: "=?",
weekStartsOn: "=?",
tooltips: "&?",
clearDataCacheOnLoad: "=?",
disableFutureSelection: "=?"
},
link: function ($scope, $element, $attrs) {
// Add the CSS here.
injectCss();
var date = new Date();
var month = parseInt($attrs.startMonth || date.getMonth());
var year = parseInt($attrs.startYear || date.getFullYear());
$scope.columnWeekLayout = "column";
$scope.weekLayout = "row";
$scope.timezone = $scope.timezone || null;
$scope.noCache = $attrs.clearDataCacheOnLoad || false;
// Parse the parent model to determine if it's an array.
// If it is an array, than we'll automatically be able to select
// more than one date.
if ($attrs.ngModel) {
$scope.active = $scope.$parent.$eval($attrs.ngModel);
if ($attrs.ngModel) {
$scope.$watch("$parent." + $attrs.ngModel, function (val) {
$scope.active = val; | }
} else {
$scope.active = null;
}
// Set the defaults here.
$scope.titleFormat = $scope.titleFormat || "MMMM yyyy";
$scope.dayLabelFormat = $scope.dayLabelFormat || "EEE";
$scope.dayLabelTooltipFormat = $scope.dayLabelTooltipFormat || "EEEE";
$scope.dayFormat = $scope.dayFormat || "d";
$scope.dayTooltipFormat = $scope.dayTooltipFormat || "fullDate";
$scope.disableFutureSelection = $scope.disableFutureSelection || false;
$scope.sameMonth = function (date) {
var d = angular.copy(date);
return d.getFullYear() === $scope.calendar.year &&
d.getMonth() === $scope.calendar.month;
};
$scope.isDisabled = function (date) {
if ($scope.disableFutureSelection && date > new Date()) { return true; }
return !$scope.sameMonth(date);
};
$scope.calendarDirection = $scope.calendarDirection || "horizontal";
$scope.$watch("calendarDirection", function (val) {
$scope.weekLayout = val === "horizontal" ? "row" : "column";
});
$scope.$watch("weekLayout", function () {
year = $scope.calendar.year;
month = $scope.calendar.month;
bootstrap();
});
var handleCb = function (cb, data) {
(cb || angular.noop)(data);
};
var dateFind = function (arr, date) {
var index = -1;
angular.forEach(arr, function (d, k) {
if (index < 0) {
if (angular.equals(date, d)) {
index = k;
}
}
});
return index;
};
$scope.isActive = function (date) {
var match;
var active = angular.copy($scope.active);
if (!angular.isArray(active)) {
match = angular.equals(date, active);
} else {
match = dateFind(active, date) > -1;
}
return match;
};
$scope.prev = function () {
$scope.calendar.prev();
var data = {
year: $scope.calendar.year,
month: $scope.calendar.month + 1
};
setData();
handleCb($scope.onPrevMonth, data);
};
$scope.next = function () {
$scope.calendar.next();
var data = {
year: $scope.calendar.year,
month: $scope.calendar.month + 1
};
setData();
handleCb($scope.onNextMonth, data);
};
$scope.handleDayClick = function (date) {
if($scope.disableFutureSelection && date > new Date()) {
return;
}
var active = angular.copy($scope.active);
if (angular.isArray(active)) {
var idx = dateFind(active, date);
if (idx > -1) {
active.splice(idx, 1);
} else {
active.push(date);
}
} else {
if (angular.equals(active, date)) {
active = null;
} else {
active = date;
}
}
$scope.active = active;
if ($attrs.ngModel) {
$parse($attrs.ngModel).assign($scope.$parent, angular.copy($scope.active));
}
handleCb($scope.onDayClick, angular.copy(date));
};
// Small helper function to set the contents of the template.
var setTemplate = function (contents) {
$element.html(contents);
$compile($element.contents())($scope);
};
var init = function () {
$scope.calendar = new Calendar(year, month, {
weekStartsOn: $scope.weekStartsOn || 0
});
var deferred = $q.defer();
// Allows fetching of dynamic templates via $http.
if ($scope.templateUrl) {
$http
.get($scope.templateUrl)
.success(deferred.resolve)
.error(deferred.reject);
} else {
deferred.resolve($scope.template() || defaultTemplate);
}
return deferred.promise;
};
$scope.dataService = CalendarData;
// Set the html contents of each date.
var getDayKey = function (date) {
return $scope.dataService.getDayKey(date);
};
$scope.dayKey = getDayKey;
var getDayContent = function (date) {
// Initialize the data in the data array.
if ($scope.noCache) {
$scope.dataService.setDayContent(date, "");
} else {
$scope.dataService.setDayContent(date, ($scope.dataService.data[getDayKey(date)] || ""));
}
var cb = ($scope.dayContent || angular.noop)();
var result = (cb || angular.noop)(date);
// Check for async function. This should support $http.get() and also regular $q.defer() functions.
if (angular.isObject(result) && "function" === typeof result.success) {
result.success(function (html) {
$scope.dataService.setDayContent(date, html);
});
} else if (angular.isObject(result) && "function" === typeof result.then) {
result.then(function (html) {
$scope.dataService.setDayContent(date, html);
});
} else {
$scope.dataService.setDayContent(date, result);
}
};
var setData = function () {
angular.forEach($scope.calendar.weeks, function (week) {
angular.forEach(week, getDayContent);
});
};
window.data = $scope.data;
var bootstrap = function () {
init().then(function (contents) {
setTemplate(contents);
setData();
});
};
$scope.$watch("weekStartsOn", init);
bootstrap();
// These are for tests, don't remove them..
$scope._$$init = init;
$scope._$$setTemplate = setTemplate;
$scope._$$bootstrap = bootstrap;
}
};
}]); | }); |
struct_associated_route_tables.go | package vpc
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// AssociatedRouteTables is a nested struct in vpc response
type AssociatedRouteTables struct {
RouteTable []string `json:"RouteTable" xml:"RouteTable"`
} | ||
test_object_tracking_2d_deep_sort.py | # Copyright 2020-2022 OpenDR European Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import shutil
import torch
from opendr.perception.object_tracking_2d import ObjectTracking2DDeepSortLearner
from opendr.perception.object_tracking_2d import (
Market1501Dataset,
Market1501DatasetIterator,
)
from opendr.perception.object_tracking_2d import (
MotDataset,
RawMotWithDetectionsDatasetIterator,
)
import os
DEVICE = os.getenv('TEST_DEVICE') if os.getenv('TEST_DEVICE') else 'cpu'
print("Using device:", DEVICE)
print("Using device:", DEVICE, file=sys.stderr)
def rmfile(path):
try:
os.remove(path)
except OSError as e:
print("Error: %s - %s." % (e.filename, e.strerror))
def rmdir(_dir):
try:
shutil.rmtree(_dir)
except OSError as e:
print("Error: %s - %s." % (e.filename, e.strerror))
class TestObjectTracking2DDeepSortLearner(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.temp_dir = os.path.join("tests", "sources", "tools",
"perception", "object_tracking_2d",
"deep_sort",
"deep_sort_temp")
cls.train_split_paths = {
"nano_mot20": os.path.join(
".", "src", "opendr", "perception", "object_tracking_2d",
"datasets", "splits", "nano_mot20.train"
)
}
cls.model_names = [
"deep_sort",
]
cls.mot_dataset_path = MotDataset.download_nano_mot20(
os.path.join(cls.temp_dir, "mot_dataset"), True
).path
cls.market1501_dataset_path = Market1501Dataset.download_nano_market1501(
os.path.join(cls.temp_dir, "market1501_dataset"), True
).path
print("Dataset downloaded", file=sys.stderr)
for model_name in cls.model_names:
ObjectTracking2DDeepSortLearner.download(
model_name, cls.temp_dir
)
print("Models downloaded", file=sys.stderr)
@classmethod
def tearDownClass(cls):
# Clean up downloaded files
rmdir(os.path.join(cls.temp_dir))
def test_fit(self):
def test_model(name):
dataset = Market1501Dataset(self.market1501_dataset_path)
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
starting_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
learner.fit(
dataset,
epochs=2,
val_epochs=2,
verbose=True,
)
new_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
self.assertFalse(torch.equal(starting_param, new_param))
print("Fit", name, "ok", file=sys.stderr)
for name in self.model_names:
test_model(name)
def test_fit_iterator(self):
def test_model(name):
dataset = Market1501DatasetIterator(
os.path.join(self.market1501_dataset_path, "bounding_box_train"),
)
eval_dataset = Market1501DatasetIterator(
os.path.join(self.market1501_dataset_path, "bounding_box_test"),
)
learner = ObjectTracking2DDeepSortLearner(
checkpoint_after_iter=3,
temp_path=self.temp_dir,
device=DEVICE,
)
starting_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
learner.fit(
dataset,
epochs=2,
val_dataset=eval_dataset,
val_epochs=2,
verbose=True,
)
new_param = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
self.assertFalse(torch.equal(starting_param, new_param))
print("Fit iterator", name, "ok", file=sys.stderr)
for name in self.model_names:
test_model(name)
def test_eval(self):
def test_model(name):
|
for name in self.model_names:
test_model(name)
def test_infer(self):
def test_model(name):
model_path = os.path.join(self.temp_dir, name)
train_split_paths = {
"nano_mot20": os.path.join(
".", "src", "opendr", "perception", "object_tracking_2d",
"datasets", "splits", "nano_mot20.train"
)
}
dataset = RawMotWithDetectionsDatasetIterator(
self.mot_dataset_path,
train_split_paths
)
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner.load(model_path, verbose=True)
result = learner.infer(dataset[0][0], 1)
self.assertTrue(len(result) > 0)
learner.reset()
result = learner.infer([
dataset[0][0],
dataset[1][0],
])
self.assertTrue(len(result) == 2)
self.assertTrue(len(result[0]) > 0)
for name in self.model_names:
test_model(name)
def test_save(self):
def test_model(name):
model_path = os.path.join(self.temp_dir, "test_save_" + name)
save_path = os.path.join(model_path, "save")
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner.save(save_path, True)
starting_param_1 = list(learner.tracker.deepsort.extractor.net.parameters())[0].clone()
learner2 = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner2.load(save_path)
new_param = list(learner2.tracker.deepsort.extractor.net.parameters())[0].clone()
self.assertTrue(torch.equal(starting_param_1, new_param))
for name in self.model_names:
test_model(name)
def test_optimize(self):
def test_model(name):
model_path = os.path.join(self.temp_dir, name)
train_split_paths = {
"nano_mot20": os.path.join(
".", "src", "opendr", "perception", "object_tracking_2d",
"datasets", "splits", "nano_mot20.train"
)
}
dataset = RawMotWithDetectionsDatasetIterator(
self.mot_dataset_path,
train_split_paths
)
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner.load(model_path, verbose=True)
learner.optimize()
result = learner.eval(dataset)
self.assertGreater(len(result["mota"]), 0)
for name in self.model_names:
test_model(name)
if __name__ == "__main__":
unittest.main()
| model_path = os.path.join(self.temp_dir, name)
train_split_paths = {
"nano_mot20": os.path.join(
".", "src", "opendr", "perception", "object_tracking_2d",
"datasets", "splits", "nano_mot20.train"
)
}
dataset = RawMotWithDetectionsDatasetIterator(
self.mot_dataset_path,
train_split_paths
)
learner = ObjectTracking2DDeepSortLearner(
temp_path=self.temp_dir,
device=DEVICE,
)
learner.load(model_path, verbose=True)
result = learner.eval(dataset)
self.assertGreater(len(result["mota"]), 0) |
exc.py | # encoding: utf-8
from __future__ import unicode_literals
class TranslationError(Exception):
| """Failure to translate source."""
pass |
|
database.js | const AWS = require('aws-sdk');
const _ = require('lodash');
function lower(obj) {
let key, keys = Object.keys(obj);
let n = keys.length;
const newobj={};
while (n--) {
key = keys[n];
newobj[key.toLowerCase()] = obj[key];
}
return newobj;
}
module.exports = function(config) {
let firehose;
if (config.firehose && config.firehose.stream) {
AWS.config = config.firehose;
firehose = new AWS.Firehose();
} else {
console.warn('No Firehose configuration present. Skipping firehose storage.')
}
return {
put: function(pageUrl, clientId, connectionId, clientFeatures, connectionFeatures) {
const d = new Date().getTime();
const item = {
Date: d - (d % (86400 * 1000)), // just the UTC day
DateTime: d,
ClientId: clientId,
ConnectionId: clientId + '_' + connectionId,
PageUrl: pageUrl,
};
_.forEach(clientFeatures, (value, key) => {
item[key] = value;
});
_.forEach(connectionFeatures, (value, key) => {
item[key] = value;
});
if (firehose) {
firehose.putRecord({
DeliveryStreamName: config.firehose.stream, /* required */
Record: {
Data: JSON.stringify(lower(item))
},
}, (err, data) => {
if (err) {
console.log("Error firehosing data: ", err, JSON.stringify(lower(item)));
} else {
console.log("Successfully firehosed data"); | }
});
}
},
};
} | |
views.go | package customerinsights
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// ViewsClient is the the Azure Customer Insights management API provides a RESTful set of web services that interact
// with Azure Customer Insights service to manage your resources. The API has entities that capture the relationship
// between an end user and the Azure Customer Insights service.
type ViewsClient struct {
BaseClient
}
// NewViewsClient creates an instance of the ViewsClient client.
func NewViewsClient(subscriptionID string) ViewsClient |
// NewViewsClientWithBaseURI creates an instance of the ViewsClient client using a custom endpoint. Use this when
// interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewViewsClientWithBaseURI(baseURI string, subscriptionID string) ViewsClient {
return ViewsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates a view or updates an existing view in the hub.
// Parameters:
// resourceGroupName - the name of the resource group.
// hubName - the name of the hub.
// viewName - the name of the view.
// parameters - parameters supplied to the CreateOrUpdate View operation.
func (client ViewsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, hubName string, viewName string, parameters ViewResourceFormat) (result ViewResourceFormat, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ViewsClient.CreateOrUpdate")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: viewName,
Constraints: []validation.Constraint{{Target: "viewName", Name: validation.MaxLength, Rule: 512, Chain: nil},
{Target: "viewName", Name: validation.MinLength, Rule: 1, Chain: nil}}},
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.View", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.View.Definition", Name: validation.Null, Rule: true, Chain: nil}}}}}}); err != nil {
return result, validation.NewError("customerinsights.ViewsClient", "CreateOrUpdate", err.Error())
}
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, hubName, viewName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
resp, err := client.CreateOrUpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "CreateOrUpdate", resp, "Failure sending request")
return
}
result, err = client.CreateOrUpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "CreateOrUpdate", resp, "Failure responding to request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client ViewsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, hubName string, viewName string, parameters ViewResourceFormat) (*http.Request, error) {
pathParameters := map[string]interface{}{
"hubName": autorest.Encode("path", hubName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"viewName": autorest.Encode("path", viewName),
}
const APIVersion = "2017-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/views/{viewName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client ViewsClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client ViewsClient) CreateOrUpdateResponder(resp *http.Response) (result ViewResourceFormat, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes a view in the specified hub.
// Parameters:
// resourceGroupName - the name of the resource group.
// hubName - the name of the hub.
// viewName - the name of the view.
// userID - the user ID. Use * to retrieve hub level view.
func (client ViewsClient) Delete(ctx context.Context, resourceGroupName string, hubName string, viewName string, userID string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ViewsClient.Delete")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.DeletePreparer(ctx, resourceGroupName, hubName, viewName, userID)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "Delete", nil, "Failure preparing request")
return
}
resp, err := client.DeleteSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "Delete", resp, "Failure sending request")
return
}
result, err = client.DeleteResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "Delete", resp, "Failure responding to request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client ViewsClient) DeletePreparer(ctx context.Context, resourceGroupName string, hubName string, viewName string, userID string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"hubName": autorest.Encode("path", hubName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"viewName": autorest.Encode("path", viewName),
}
const APIVersion = "2017-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
"userId": autorest.Encode("query", userID),
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/views/{viewName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client ViewsClient) DeleteSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client ViewsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets a view in the hub.
// Parameters:
// resourceGroupName - the name of the resource group.
// hubName - the name of the hub.
// viewName - the name of the view.
// userID - the user ID. Use * to retrieve hub level view.
func (client ViewsClient) Get(ctx context.Context, resourceGroupName string, hubName string, viewName string, userID string) (result ViewResourceFormat, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ViewsClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, resourceGroupName, hubName, viewName, userID)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "Get", resp, "Failure responding to request")
return
}
return
}
// GetPreparer prepares the Get request.
func (client ViewsClient) GetPreparer(ctx context.Context, resourceGroupName string, hubName string, viewName string, userID string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"hubName": autorest.Encode("path", hubName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"viewName": autorest.Encode("path", viewName),
}
const APIVersion = "2017-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
"userId": autorest.Encode("query", userID),
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/views/{viewName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client ViewsClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client ViewsClient) GetResponder(resp *http.Response) (result ViewResourceFormat, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListByHub gets all available views for given user in the specified hub.
// Parameters:
// resourceGroupName - the name of the resource group.
// hubName - the name of the hub.
// userID - the user ID. Use * to retrieve hub level views.
func (client ViewsClient) ListByHub(ctx context.Context, resourceGroupName string, hubName string, userID string) (result ViewListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ViewsClient.ListByHub")
defer func() {
sc := -1
if result.vlr.Response.Response != nil {
sc = result.vlr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listByHubNextResults
req, err := client.ListByHubPreparer(ctx, resourceGroupName, hubName, userID)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "ListByHub", nil, "Failure preparing request")
return
}
resp, err := client.ListByHubSender(req)
if err != nil {
result.vlr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "ListByHub", resp, "Failure sending request")
return
}
result.vlr, err = client.ListByHubResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "ListByHub", resp, "Failure responding to request")
return
}
if result.vlr.hasNextLink() && result.vlr.IsEmpty() {
err = result.NextWithContext(ctx)
}
return
}
// ListByHubPreparer prepares the ListByHub request.
func (client ViewsClient) ListByHubPreparer(ctx context.Context, resourceGroupName string, hubName string, userID string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"hubName": autorest.Encode("path", hubName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-01-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
"userId": autorest.Encode("query", userID),
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.CustomerInsights/hubs/{hubName}/views", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByHubSender sends the ListByHub request. The method will close the
// http.Response Body if it receives an error.
func (client ViewsClient) ListByHubSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// ListByHubResponder handles the response to the ListByHub request. The method always
// closes the http.Response Body.
func (client ViewsClient) ListByHubResponder(resp *http.Response) (result ViewListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listByHubNextResults retrieves the next set of results, if any.
func (client ViewsClient) listByHubNextResults(ctx context.Context, lastResults ViewListResult) (result ViewListResult, err error) {
req, err := lastResults.viewListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "listByHubNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListByHubSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "listByHubNextResults", resp, "Failure sending next results request")
}
result, err = client.ListByHubResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "customerinsights.ViewsClient", "listByHubNextResults", resp, "Failure responding to next results request")
return
}
return
}
// ListByHubComplete enumerates all values, automatically crossing page boundaries as required.
func (client ViewsClient) ListByHubComplete(ctx context.Context, resourceGroupName string, hubName string, userID string) (result ViewListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/ViewsClient.ListByHub")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListByHub(ctx, resourceGroupName, hubName, userID)
return
}
| {
return NewViewsClientWithBaseURI(DefaultBaseURI, subscriptionID)
} |
floats.rs | #[cfg(feature = "object")]
use std::any::Any;
use super::private;
use super::IntoSeries;
use super::SeriesTrait;
use super::SeriesWrap;
use crate::chunked_array::comparison::*;
#[cfg(feature = "rolling_window")]
use crate::chunked_array::ops::rolling_window::RollingOptions;
use crate::chunked_array::{
ops::{
aggregate::{ChunkAggSeries, VarAggSeries},
compare_inner::{IntoPartialEqInner, IntoPartialOrdInner, PartialEqInner, PartialOrdInner},
explode::ExplodeByOffsets,
},
AsSinglePtr, ChunkIdIter,
};
use crate::fmt::FmtList;
#[cfg(feature = "pivot")]
use crate::frame::groupby::pivot::*;
use crate::frame::groupby::*;
use crate::frame::hash_join::{HashJoin, ZipOuterJoinColumn};
use crate::prelude::*;
#[cfg(feature = "checked_arithmetic")]
use crate::series::arithmetic::checked::NumOpsDispatchChecked;
use ahash::RandomState;
use arrow::array::ArrayRef;
use std::borrow::Cow;
use std::ops::{BitAnd, BitOr, BitXor};
macro_rules! impl_dyn_series {
($ca: ident) => {
impl IntoSeries for $ca {
fn into_series(self) -> Series {
Series(Arc::new(SeriesWrap(self)))
}
}
impl private::PrivateSeries for SeriesWrap<$ca> {
fn _field(&self) -> Cow<Field> {
Cow::Borrowed(self.0.ref_field())
}
fn _dtype(&self) -> &DataType {
self.0.ref_field().data_type()
}
fn explode_by_offsets(&self, offsets: &[i64]) -> Series {
self.0.explode_by_offsets(offsets)
}
#[cfg(feature = "rolling_window")]
fn _rolling_mean(&self, options: RollingOptions) -> Result<Series> {
self.0.rolling_mean(options)
}
#[cfg(feature = "rolling_window")]
fn _rolling_sum(&self, options: RollingOptions) -> Result<Series> {
self.0.rolling_sum(options).map(|ca| ca.into())
}
#[cfg(feature = "rolling_window")]
fn _rolling_min(&self, options: RollingOptions) -> Result<Series> {
self.0.rolling_min(options).map(|ca| ca.into())
}
#[cfg(feature = "rolling_window")]
fn _rolling_max(&self, options: RollingOptions) -> Result<Series> {
self.0.rolling_max(options).map(|ca| ca.into())
}
#[cfg(feature = "rolling_window")]
fn _rolling_std(&self, options: RollingOptions) -> Result<Series> {
self.0.rolling_std(options)
}
#[cfg(feature = "rolling_window")]
fn _rolling_var(&self, options: RollingOptions) -> Result<Series> {
self.0.rolling_var(options)
}
#[cfg(feature = "cum_agg")]
fn _cummax(&self, reverse: bool) -> Series {
self.0.cummax(reverse).into_series()
}
#[cfg(feature = "cum_agg")]
fn _cummin(&self, reverse: bool) -> Series {
self.0.cummin(reverse).into_series()
}
#[cfg(feature = "cum_agg")]
fn _cumsum(&self, reverse: bool) -> Series {
self.0.cumsum(reverse).into_series()
}
#[cfg(feature = "asof_join")]
fn join_asof(&self, other: &Series) -> Result<Vec<Option<u32>>> {
self.0.join_asof(other)
}
fn set_sorted(&mut self, reverse: bool) {
self.0.set_sorted(reverse)
}
unsafe fn equal_element(
&self,
idx_self: usize,
idx_other: usize,
other: &Series,
) -> bool {
self.0.equal_element(idx_self, idx_other, other)
}
#[cfg(feature = "zip_with")]
fn zip_with_same_type(&self, mask: &BooleanChunked, other: &Series) -> Result<Series> {
ChunkZip::zip_with(&self.0, mask, other.as_ref().as_ref())
.map(|ca| ca.into_series())
}
fn into_partial_eq_inner<'a>(&'a self) -> Box<dyn PartialEqInner + 'a> {
(&self.0).into_partial_eq_inner()
}
fn into_partial_ord_inner<'a>(&'a self) -> Box<dyn PartialOrdInner + 'a> {
(&self.0).into_partial_ord_inner()
}
fn vec_hash(&self, random_state: RandomState) -> AlignedVec<u64> {
self.0.vec_hash(random_state)
}
fn vec_hash_combine(&self, build_hasher: RandomState, hashes: &mut [u64]) {
self.0.vec_hash_combine(build_hasher, hashes)
}
fn agg_mean(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.agg_mean(groups)
}
fn agg_min(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.0.agg_min(groups)
}
fn agg_max(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.0.agg_max(groups)
}
fn agg_sum(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.0.agg_sum(groups)
}
fn agg_first(&self, groups: &[(u32, Vec<u32>)]) -> Series {
self.0.agg_first(groups)
}
fn agg_last(&self, groups: &[(u32, Vec<u32>)]) -> Series {
self.0.agg_last(groups)
}
fn agg_std(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.agg_std(groups)
}
fn agg_var(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.agg_var(groups)
}
fn agg_n_unique(&self, groups: &[(u32, Vec<u32>)]) -> Option<UInt32Chunked> {
self.0.agg_n_unique(groups)
}
fn agg_list(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.0.agg_list(groups)
}
fn agg_quantile(&self, groups: &[(u32, Vec<u32>)], quantile: f64) -> Option<Series> {
self.0.agg_quantile(groups, quantile)
}
fn agg_median(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.0.agg_median(groups)
}
#[cfg(feature = "lazy")]
fn agg_valid_count(&self, groups: &[(u32, Vec<u32>)]) -> Option<Series> {
self.0.agg_valid_count(groups)
}
#[cfg(feature = "pivot")]
fn pivot<'a>(
&self,
pivot_series: &'a Series,
keys: Vec<Series>,
groups: &[(u32, Vec<u32>)],
agg_type: PivotAgg,
) -> Result<DataFrame> {
self.0.pivot(pivot_series, keys, groups, agg_type)
}
#[cfg(feature = "pivot")]
fn pivot_count<'a>(
&self,
pivot_series: &'a Series,
keys: Vec<Series>,
groups: &[(u32, Vec<u32>)],
) -> Result<DataFrame> {
self.0.pivot_count(pivot_series, keys, groups)
}
fn hash_join_inner(&self, other: &Series) -> Vec<(u32, u32)> {
HashJoin::hash_join_inner(&self.0, other.as_ref().as_ref())
}
fn hash_join_left(&self, other: &Series) -> Vec<(u32, Option<u32>)> {
HashJoin::hash_join_left(&self.0, other.as_ref().as_ref())
}
fn hash_join_outer(&self, other: &Series) -> Vec<(Option<u32>, Option<u32>)> {
HashJoin::hash_join_outer(&self.0, other.as_ref().as_ref())
}
fn zip_outer_join_column(
&self,
right_column: &Series,
opt_join_tuples: &[(Option<u32>, Option<u32>)],
) -> Series {
ZipOuterJoinColumn::zip_outer_join_column(&self.0, right_column, opt_join_tuples)
}
fn subtract(&self, rhs: &Series) -> Result<Series> {
NumOpsDispatch::subtract(&self.0, rhs)
}
fn add_to(&self, rhs: &Series) -> Result<Series> {
NumOpsDispatch::add_to(&self.0, rhs)
}
fn multiply(&self, rhs: &Series) -> Result<Series> {
NumOpsDispatch::multiply(&self.0, rhs)
}
fn divide(&self, rhs: &Series) -> Result<Series> {
NumOpsDispatch::divide(&self.0, rhs)
}
fn remainder(&self, rhs: &Series) -> Result<Series> {
NumOpsDispatch::remainder(&self.0, rhs)
}
fn group_tuples(&self, multithreaded: bool) -> GroupTuples {
IntoGroupTuples::group_tuples(&self.0, multithreaded)
}
#[cfg(feature = "sort_multiple")]
fn argsort_multiple(&self, by: &[Series], reverse: &[bool]) -> Result<UInt32Chunked> {
self.0.argsort_multiple(by, reverse)
}
fn str_value(&self, index: usize) -> Cow<str> {
// get AnyValue
Cow::Owned(format!("{}", self.get(index)))
}
}
impl SeriesTrait for SeriesWrap<$ca> {
#[cfg(feature = "rolling_window")]
fn rolling_apply(
&self,
_window_size: usize,
_f: &dyn Fn(&Series) -> Series,
) -> Result<Series> {
ChunkRollApply::rolling_apply(&self.0, _window_size, _f).map(|ca| ca.into_series())
}
#[cfg(feature = "interpolate")]
fn interpolate(&self) -> Series {
self.0.interpolate().into_series()
}
fn bitand(&self, other: &Series) -> Result<Series> {
let other = if other.len() == 1 {
Cow::Owned(other.cast(self.dtype())?)
} else {
Cow::Borrowed(other)
};
let other = self.0.unpack_series_matching_type(&other)?;
Ok(self.0.bitand(&other).into_series())
}
fn bitor(&self, other: &Series) -> Result<Series> {
let other = if other.len() == 1 {
Cow::Owned(other.cast(self.dtype())?)
} else {
Cow::Borrowed(other)
};
let other = self.0.unpack_series_matching_type(&other)?;
Ok(self.0.bitor(&other).into_series())
}
fn bitxor(&self, other: &Series) -> Result<Series> {
let other = if other.len() == 1 {
Cow::Owned(other.cast(self.dtype())?)
} else {
Cow::Borrowed(other)
};
let other = self.0.unpack_series_matching_type(&other)?;
Ok(self.0.bitxor(&other).into_series())
}
fn rename(&mut self, name: &str) {
self.0.rename(name);
}
fn chunk_lengths(&self) -> ChunkIdIter {
self.0.chunk_id()
}
fn name(&self) -> &str {
self.0.name()
}
fn chunks(&self) -> &Vec<ArrayRef> {
self.0.chunks()
}
fn shrink_to_fit(&mut self) {
self.0.shrink_to_fit()
}
fn f32(&self) -> Result<&Float32Chunked> {
if matches!(self.0.dtype(), DataType::Float32) {
unsafe { Ok(&*(self as *const dyn SeriesTrait as *const Float32Chunked)) }
} else {
Err(PolarsError::DataTypeMisMatch(
format!(
"cannot unpack Series: {:?} of type {:?} into f32",
self.name(),
self.dtype(),
)
.into(),
))
}
}
fn f64(&self) -> Result<&Float64Chunked> {
if matches!(self.0.dtype(), DataType::Float64) {
unsafe { Ok(&*(self as *const dyn SeriesTrait as *const Float64Chunked)) }
} else {
Err(PolarsError::DataTypeMisMatch(
format!(
"cannot unpack Series: {:?} of type {:?} into f64",
self.name(),
self.dtype(),
)
.into(),
))
}
}
fn append_array(&mut self, other: ArrayRef) -> Result<()> {
self.0.append_array(other)
}
fn slice(&self, offset: i64, length: usize) -> Series {
return self.0.slice(offset, length).into_series();
}
fn append(&mut self, other: &Series) -> Result<()> {
if self.0.dtype() == other.dtype() {
// todo! add object
self.0.append(other.as_ref().as_ref());
Ok(())
} else {
Err(PolarsError::DataTypeMisMatch(
"cannot append Series; data types don't match".into(),
))
}
}
fn filter(&self, filter: &BooleanChunked) -> Result<Series> {
ChunkFilter::filter(&self.0, filter).map(|ca| ca.into_series())
}
fn mean(&self) -> Option<f64> {
self.0.mean()
}
fn median(&self) -> Option<f64> {
self.0.median()
}
fn take(&self, indices: &UInt32Chunked) -> Result<Series> {
let indices = if indices.chunks.len() > 1 {
Cow::Owned(indices.rechunk())
} else {
Cow::Borrowed(indices)
};
Ok(ChunkTake::take(&self.0, (&*indices).into())?.into_series())
}
fn take_iter(&self, iter: &mut dyn TakeIterator) -> Result<Series> {
Ok(ChunkTake::take(&self.0, iter.into())?.into_series())
}
fn take_every(&self, n: usize) -> Series {
self.0.take_every(n).into_series()
}
unsafe fn take_iter_unchecked(&self, iter: &mut dyn TakeIterator) -> Series {
ChunkTake::take_unchecked(&self.0, iter.into()).into_series()
}
unsafe fn take_unchecked(&self, idx: &UInt32Chunked) -> Result<Series> {
let idx = if idx.chunks.len() > 1 {
Cow::Owned(idx.rechunk())
} else {
Cow::Borrowed(idx)
};
Ok(ChunkTake::take_unchecked(&self.0, (&*idx).into()).into_series())
}
unsafe fn take_opt_iter_unchecked(&self, iter: &mut dyn TakeIteratorNulls) -> Series {
ChunkTake::take_unchecked(&self.0, iter.into()).into_series()
}
#[cfg(feature = "take_opt_iter")]
fn take_opt_iter(&self, iter: &mut dyn TakeIteratorNulls) -> Result<Series> {
Ok(ChunkTake::take(&self.0, iter.into())?.into_series())
}
fn len(&self) -> usize {
self.0.len()
}
fn rechunk(&self) -> Series {
ChunkOps::rechunk(&self.0).into_series()
}
fn head(&self, length: Option<usize>) -> Series {
self.0.head(length).into_series()
}
fn tail(&self, length: Option<usize>) -> Series {
self.0.tail(length).into_series()
}
fn expand_at_index(&self, index: usize, length: usize) -> Series {
ChunkExpandAtIndex::expand_at_index(&self.0, index, length).into_series()
}
fn cast(&self, data_type: &DataType) -> Result<Series> {
self.0.cast(data_type)
}
fn to_dummies(&self) -> Result<DataFrame> {
ToDummies::to_dummies(&self.0)
}
fn value_counts(&self) -> Result<DataFrame> {
ChunkUnique::value_counts(&self.0)
}
fn get(&self, index: usize) -> AnyValue {
self.0.get_any_value(index)
}
#[inline]
unsafe fn get_unchecked(&self, index: usize) -> AnyValue {
self.0.get_any_value_unchecked(index)
}
fn sort_in_place(&mut self, reverse: bool) {
ChunkSort::sort_in_place(&mut self.0, reverse);
}
fn sort(&self, reverse: bool) -> Series {
ChunkSort::sort(&self.0, reverse).into_series()
}
fn argsort(&self, reverse: bool) -> UInt32Chunked {
ChunkSort::argsort(&self.0, reverse)
}
fn null_count(&self) -> usize {
self.0.null_count()
}
fn has_validity(&self) -> bool {
self.0.has_validity()
}
fn unique(&self) -> Result<Series> {
ChunkUnique::unique(&self.0).map(|ca| ca.into_series())
}
fn n_unique(&self) -> Result<usize> {
ChunkUnique::n_unique(&self.0)
}
fn arg_unique(&self) -> Result<UInt32Chunked> {
ChunkUnique::arg_unique(&self.0)
}
fn arg_min(&self) -> Option<usize> {
ArgAgg::arg_min(&self.0)
} | fn arg_max(&self) -> Option<usize> {
ArgAgg::arg_max(&self.0)
}
fn is_null(&self) -> BooleanChunked {
self.0.is_null()
}
fn is_not_null(&self) -> BooleanChunked {
self.0.is_not_null()
}
fn is_unique(&self) -> Result<BooleanChunked> {
ChunkUnique::is_unique(&self.0)
}
fn is_duplicated(&self) -> Result<BooleanChunked> {
ChunkUnique::is_duplicated(&self.0)
}
fn reverse(&self) -> Series {
ChunkReverse::reverse(&self.0).into_series()
}
fn as_single_ptr(&mut self) -> Result<usize> {
self.0.as_single_ptr()
}
fn shift(&self, periods: i64) -> Series {
ChunkShift::shift(&self.0, periods).into_series()
}
fn fill_null(&self, strategy: FillNullStrategy) -> Result<Series> {
ChunkFillNull::fill_null(&self.0, strategy).map(|ca| ca.into_series())
}
fn sum_as_series(&self) -> Series {
ChunkAggSeries::sum_as_series(&self.0)
}
fn max_as_series(&self) -> Series {
ChunkAggSeries::max_as_series(&self.0)
}
fn min_as_series(&self) -> Series {
ChunkAggSeries::min_as_series(&self.0)
}
fn mean_as_series(&self) -> Series {
ChunkAggSeries::mean_as_series(&self.0)
}
fn median_as_series(&self) -> Series {
ChunkAggSeries::median_as_series(&self.0)
}
fn var_as_series(&self) -> Series {
VarAggSeries::var_as_series(&self.0)
}
fn std_as_series(&self) -> Series {
VarAggSeries::std_as_series(&self.0)
}
fn quantile_as_series(&self, quantile: f64) -> Result<Series> {
ChunkAggSeries::quantile_as_series(&self.0, quantile)
}
fn fmt_list(&self) -> String {
FmtList::fmt_list(&self.0)
}
fn clone_inner(&self) -> Arc<dyn SeriesTrait> {
Arc::new(SeriesWrap(Clone::clone(&self.0)))
}
#[cfg(feature = "random")]
#[cfg_attr(docsrs, doc(cfg(feature = "random")))]
fn sample_n(&self, n: usize, with_replacement: bool) -> Result<Series> {
self.0
.sample_n(n, with_replacement)
.map(|ca| ca.into_series())
}
#[cfg(feature = "random")]
#[cfg_attr(docsrs, doc(cfg(feature = "random")))]
fn sample_frac(&self, frac: f64, with_replacement: bool) -> Result<Series> {
self.0
.sample_frac(frac, with_replacement)
.map(|ca| ca.into_series())
}
fn pow(&self, exponent: f64) -> Result<Series> {
let f_err = || {
Err(PolarsError::InvalidOperation(
format!("power operation not supported on dtype {:?}", self.dtype()).into(),
))
};
match self.dtype() {
DataType::Utf8 | DataType::List(_) | DataType::Boolean => f_err(),
DataType::Float32 => Ok(self.0.pow_f32(exponent as f32).into_series()),
_ => Ok(self.0.pow_f64(exponent).into_series()),
}
}
fn peak_max(&self) -> BooleanChunked {
self.0.peak_max()
}
fn peak_min(&self) -> BooleanChunked {
self.0.peak_min()
}
#[cfg(feature = "is_in")]
fn is_in(&self, other: &Series) -> Result<BooleanChunked> {
IsIn::is_in(&self.0, other)
}
#[cfg(feature = "repeat_by")]
fn repeat_by(&self, by: &UInt32Chunked) -> ListChunked {
RepeatBy::repeat_by(&self.0, by)
}
#[cfg(feature = "checked_arithmetic")]
fn checked_div(&self, rhs: &Series) -> Result<Series> {
self.0.checked_div(rhs)
}
#[cfg(feature = "is_first")]
fn is_first(&self) -> Result<BooleanChunked> {
self.0.is_first()
}
#[cfg(feature = "object")]
fn as_any(&self) -> &dyn Any {
&self.0
}
#[cfg(feature = "mode")]
fn mode(&self) -> Result<Series> {
Ok(self.0.mode()?.into_series())
}
}
};
}
impl_dyn_series!(Float32Chunked);
impl_dyn_series!(Float64Chunked); | |
slides.tsx | import { Component, ComponentInterface, Element, Event, EventEmitter, Host, Method, Prop, Watch, h } from '@stencil/core';
import { getIonMode } from '../../global/ionic-global';
import { SwiperInterface, SwiperOptions } from './swiper/swiper-interface';
/**
* @virtualProp {"ios" | "md"} mode - The mode determines which platform styles to use.
*/
@Component({
tag: 'ion-slides',
styleUrls: {
ios: 'slides.ios.scss',
md: 'slides.md.scss'
},
assetsDirs: ['swiper'],
})
export class | implements ComponentInterface {
private scrollbarEl?: HTMLElement;
private paginationEl?: HTMLElement;
private swiperReady = false;
private mutationO?: MutationObserver;
private readySwiper!: (swiper: SwiperInterface) => void;
private swiper: Promise<SwiperInterface> = new Promise(resolve => { this.readySwiper = resolve; });
private syncSwiper?: SwiperInterface;
private didInit = false;
@Element() el!: HTMLIonSlidesElement;
/**
* Options to pass to the swiper instance.
* See http://idangero.us/swiper/api/ for valid options
*/
@Prop() options: any = {}; // SwiperOptions; // TODO
@Watch('options')
async optionsChanged() {
if (this.swiperReady) {
const swiper = await this.getSwiper();
Object.assign(swiper.params, this.options);
await this.update();
}
}
/**
* If `true`, show the pagination.
*/
@Prop() pager = false;
/**
* If `true`, show the scrollbar.
*/
@Prop() scrollbar = false;
/**
* Emitted after Swiper initialization
*/
@Event() ionSlidesDidLoad!: EventEmitter<void>;
/**
* Emitted when the user taps/clicks on the slide's container.
*/
@Event() ionSlideTap!: EventEmitter<void>;
/**
* Emitted when the user double taps on the slide's container.
*/
@Event() ionSlideDoubleTap!: EventEmitter<void>;
/**
* Emitted before the active slide has changed.
*/
@Event() ionSlideWillChange!: EventEmitter<void>;
/**
* Emitted after the active slide has changed.
*/
@Event() ionSlideDidChange!: EventEmitter<void>;
/**
* Emitted when the next slide has started.
*/
@Event() ionSlideNextStart!: EventEmitter<void>;
/**
* Emitted when the previous slide has started.
*/
@Event() ionSlidePrevStart!: EventEmitter<void>;
/**
* Emitted when the next slide has ended.
*/
@Event() ionSlideNextEnd!: EventEmitter<void>;
/**
* Emitted when the previous slide has ended.
*/
@Event() ionSlidePrevEnd!: EventEmitter<void>;
/**
* Emitted when the slide transition has started.
*/
@Event() ionSlideTransitionStart!: EventEmitter<void>;
/**
* Emitted when the slide transition has ended.
*/
@Event() ionSlideTransitionEnd!: EventEmitter<void>;
/**
* Emitted when the slider is actively being moved.
*/
@Event() ionSlideDrag!: EventEmitter<void>;
/**
* Emitted when the slider is at its initial position.
*/
@Event() ionSlideReachStart!: EventEmitter<void>;
/**
* Emitted when the slider is at the last slide.
*/
@Event() ionSlideReachEnd!: EventEmitter<void>;
/**
* Emitted when the user first touches the slider.
*/
@Event() ionSlideTouchStart!: EventEmitter<void>;
/**
* Emitted when the user releases the touch.
*/
@Event() ionSlideTouchEnd!: EventEmitter<void>;
connectedCallback() {
// tslint:disable-next-line: strict-type-predicates
if (typeof MutationObserver !== 'undefined') {
const mut = this.mutationO = new MutationObserver(() => {
if (this.swiperReady) {
this.update();
}
});
mut.observe(this.el, {
childList: true,
subtree: true
});
this.el.componentOnReady().then(() => {
if (!this.didInit) {
this.didInit = true;
this.initSwiper();
}
});
}
}
disconnectedCallback() {
if (this.mutationO) {
this.mutationO.disconnect();
this.mutationO = undefined;
}
/**
* We need to synchronously destroy
* swiper otherwise it is possible
* that it will be left in a
* destroyed state if connectedCallback
* is called multiple times
*/
const swiper = this.syncSwiper;
if (swiper !== undefined) {
swiper.destroy(true, true);
this.swiper = new Promise(resolve => { this.readySwiper = resolve; });
this.swiperReady = false;
this.syncSwiper = undefined;
}
this.didInit = false;
}
/**
* Update the underlying slider implementation. Call this if you've added or removed
* child slides.
*/
@Method()
async update() {
const [swiper] = await Promise.all([
this.getSwiper(),
waitForSlides(this.el)
]);
swiper.update();
}
/**
* Force swiper to update its height (when autoHeight is enabled) for the duration
* equal to 'speed' parameter.
*
* @param speed The transition duration (in ms).
*/
@Method()
async updateAutoHeight(speed?: number) {
const swiper = await this.getSwiper();
swiper.updateAutoHeight(speed);
}
/**
* Transition to the specified slide.
*
* @param index The index of the slide to transition to.
* @param speed The transition duration (in ms).
* @param runCallbacks If true, the transition will produce [Transition/SlideChange][Start/End] transition events.
*/
@Method()
async slideTo(index: number, speed?: number, runCallbacks?: boolean) {
const swiper = await this.getSwiper();
swiper.slideTo(index, speed, runCallbacks);
}
/**
* Transition to the next slide.
*
* @param speed The transition duration (in ms).
* @param runCallbacks If true, the transition will produce [Transition/SlideChange][Start/End] transition events.
*/
@Method()
async slideNext(speed?: number, runCallbacks?: boolean) {
const swiper = await this.getSwiper();
swiper.slideNext(speed!, runCallbacks!);
}
/**
* Transition to the previous slide.
*
* @param speed The transition duration (in ms).
* @param runCallbacks If true, the transition will produce the [Transition/SlideChange][Start/End] transition events.
*/
@Method()
async slidePrev(speed?: number, runCallbacks?: boolean) {
const swiper = await this.getSwiper();
swiper.slidePrev(speed, runCallbacks);
}
/**
* Get the index of the active slide.
*/
@Method()
async getActiveIndex(): Promise<number> {
const swiper = await this.getSwiper();
return swiper.activeIndex;
}
/**
* Get the index of the previous slide.
*/
@Method()
async getPreviousIndex(): Promise<number> {
const swiper = await this.getSwiper();
return swiper.previousIndex;
}
/**
* Get the total number of slides.
*/
@Method()
async length(): Promise<number> {
const swiper = await this.getSwiper();
return swiper.slides.length;
}
/**
* Get whether or not the current slide is the last slide.
*/
@Method()
async isEnd(): Promise<boolean> {
const swiper = await this.getSwiper();
return swiper.isEnd;
}
/**
* Get whether or not the current slide is the first slide.
*/
@Method()
async isBeginning(): Promise<boolean> {
const swiper = await this.getSwiper();
return swiper.isBeginning;
}
/**
* Start auto play.
*/
@Method()
async startAutoplay() {
const swiper = await this.getSwiper();
if (swiper.autoplay) {
swiper.autoplay.start();
}
}
/**
* Stop auto play.
*/
@Method()
async stopAutoplay() {
const swiper = await this.getSwiper();
if (swiper.autoplay) {
swiper.autoplay.stop();
}
}
/**
* Lock or unlock the ability to slide to the next slide.
*
* @param lock If `true`, disable swiping to the next slide.
*/
@Method()
async lockSwipeToNext(lock: boolean) {
const swiper = await this.getSwiper();
swiper.allowSlideNext = !lock;
}
/**
* Lock or unlock the ability to slide to the previous slide.
*
* @param lock If `true`, disable swiping to the previous slide.
*/
@Method()
async lockSwipeToPrev(lock: boolean) {
const swiper = await this.getSwiper();
swiper.allowSlidePrev = !lock;
}
/**
* Lock or unlock the ability to slide to the next or previous slide.
*
* @param lock If `true`, disable swiping to the next and previous slide.
*/
@Method()
async lockSwipes(lock: boolean) {
const swiper = await this.getSwiper();
swiper.allowSlideNext = !lock;
swiper.allowSlidePrev = !lock;
swiper.allowTouchMove = !lock;
}
/**
* Get the Swiper instance.
* Use this to access the full Swiper API.
* See https://idangero.us/swiper/api/ for all API options.
*/
@Method()
async getSwiper(): Promise<any> {
return this.swiper;
}
private async initSwiper() {
const finalOptions = this.normalizeOptions();
// init swiper core
// @ts-ignore
const { Swiper } = await import('./swiper/swiper.bundle.js');
await waitForSlides(this.el);
const swiper = new Swiper(this.el, finalOptions);
this.swiperReady = true;
this.syncSwiper = swiper;
this.readySwiper(swiper);
}
private normalizeOptions(): SwiperOptions {
// Base options, can be changed
// TODO Add interface SwiperOptions
const swiperOptions: SwiperOptions = {
effect: undefined,
direction: 'horizontal',
initialSlide: 0,
loop: false,
parallax: false,
slidesPerView: 1,
spaceBetween: 0,
speed: 300,
slidesPerColumn: 1,
slidesPerColumnFill: 'column',
slidesPerGroup: 1,
centeredSlides: false,
slidesOffsetBefore: 0,
slidesOffsetAfter: 0,
touchEventsTarget: 'container',
autoplay: false,
freeMode: false,
freeModeMomentum: true,
freeModeMomentumRatio: 1,
freeModeMomentumBounce: true,
freeModeMomentumBounceRatio: 1,
freeModeMomentumVelocityRatio: 1,
freeModeSticky: false,
freeModeMinimumVelocity: 0.02,
autoHeight: false,
setWrapperSize: false,
zoom: {
maxRatio: 3,
minRatio: 1,
toggle: false,
},
touchRatio: 1,
touchAngle: 45,
simulateTouch: true,
touchStartPreventDefault: false,
shortSwipes: true,
longSwipes: true,
longSwipesRatio: 0.5,
longSwipesMs: 300,
followFinger: true,
threshold: 0,
touchMoveStopPropagation: true,
touchReleaseOnEdges: false,
iOSEdgeSwipeDetection: false,
iOSEdgeSwipeThreshold: 20,
resistance: true,
resistanceRatio: 0.85,
watchSlidesProgress: false,
watchSlidesVisibility: false,
preventClicks: true,
preventClicksPropagation: true,
slideToClickedSlide: false,
loopAdditionalSlides: 0,
noSwiping: true,
runCallbacksOnInit: true,
coverflowEffect: {
rotate: 50,
stretch: 0,
depth: 100,
modifier: 1,
slideShadows: true
},
flipEffect: {
slideShadows: true,
limitRotation: true
},
cubeEffect: {
slideShadows: true,
shadow: true,
shadowOffset: 20,
shadowScale: 0.94
},
fadeEffect: {
crossFade: false
},
a11y: {
prevSlideMessage: 'Previous slide',
nextSlideMessage: 'Next slide',
firstSlideMessage: 'This is the first slide',
lastSlideMessage: 'This is the last slide'
}
};
if (this.pager) {
swiperOptions.pagination = {
el: this.paginationEl!,
type: 'bullets',
clickable: false,
hideOnClick: false,
};
}
if (this.scrollbar) {
swiperOptions.scrollbar = {
el: this.scrollbarEl!,
hide: true,
};
}
// Keep the event options separate, we dont want users
// overwriting these
const eventOptions: SwiperOptions = {
on: {
init: () => {
setTimeout(() => {
this.ionSlidesDidLoad.emit();
}, 20);
},
slideChangeTransitionStart: this.ionSlideWillChange.emit as any,
slideChangeTransitionEnd: this.ionSlideDidChange.emit as any,
slideNextTransitionStart: this.ionSlideNextStart.emit as any,
slidePrevTransitionStart: this.ionSlidePrevStart.emit as any,
slideNextTransitionEnd: this.ionSlideNextEnd.emit as any,
slidePrevTransitionEnd: this.ionSlidePrevEnd.emit as any,
transitionStart: this.ionSlideTransitionStart.emit as any,
transitionEnd: this.ionSlideTransitionEnd.emit as any,
sliderMove: this.ionSlideDrag.emit as any,
reachBeginning: this.ionSlideReachStart.emit as any,
reachEnd: this.ionSlideReachEnd.emit as any,
touchStart: this.ionSlideTouchStart.emit as any,
touchEnd: this.ionSlideTouchEnd.emit as any,
tap: this.ionSlideTap.emit as any,
doubleTap: this.ionSlideDoubleTap.emit as any
}
};
const customEvents = (!!this.options && !!this.options.on) ? this.options.on : {};
// merge "on" event listeners, while giving our event listeners priority
const mergedEventOptions = { on: { ...customEvents, ...eventOptions.on } };
// Merge the base, user options, and events together then pas to swiper
return { ...swiperOptions, ...this.options, ...mergedEventOptions };
}
render() {
const mode = getIonMode(this);
return (
<Host
class={{
[`${mode}`]: true,
// Used internally for styling
[`slides-${mode}`]: true,
'swiper-container': true
}}
>
<div class="swiper-wrapper">
<slot></slot>
</div>
{this.pager && <div class="swiper-pagination" ref={el => this.paginationEl = el}></div>}
{this.scrollbar && <div class="swiper-scrollbar" ref={el => this.scrollbarEl = el}></div>}
</Host>
);
}
}
const waitForSlides = (el: HTMLElement) => {
return Promise.all(
Array.from(el.querySelectorAll('ion-slide')).map(s => s.componentOnReady())
);
};
| Slides |
deepDistinct.ts | const computeDeepDistinct = <T>(array: T[]): T[] =>
Array.from(new Set(array.map((element: T): string => JSON.stringify(element))))
.map((element: string): T => JSON.parse(element))
export {
computeDeepDistinct, | } |
|
serializers.py | import re
from urllib.parse import urlsplit, urlunsplit
from django.http.request import QueryDict
from django.urls import reverse
from rest_framework import exceptions, serializers
import olympia.core.logger
from olympia import activity, amo
from olympia.accounts.serializers import (
BaseUserSerializer,
UserProfileBasketSyncSerializer,
)
from olympia.amo.templatetags.jinja_helpers import absolutify
from olympia.amo.utils import sorted_groupby
from olympia.api.fields import (
ESTranslationSerializerField,
GetTextTranslationSerializerField,
OutgoingTranslationField,
OutgoingURLField,
ReverseChoiceField,
SplitField,
TranslationSerializerField,
)
from olympia.api.serializers import BaseESSerializer
from olympia.api.utils import is_gate_active
from olympia.applications.models import AppVersion
from olympia.bandwagon.models import Collection
from olympia.blocklist.models import Block
from olympia.constants.applications import APPS, APPS_ALL, APP_IDS
from olympia.constants.base import ADDON_TYPE_CHOICES_API
from olympia.constants.categories import CATEGORIES, CATEGORIES_BY_ID
from olympia.constants.promoted import PROMOTED_GROUPS, RECOMMENDED
from olympia.files.models import File, FileUpload
from olympia.files.utils import parse_addon
from olympia.promoted.models import PromotedAddon
from olympia.search.filters import AddonAppVersionQueryParam
from olympia.ratings.utils import get_grouped_ratings
from olympia.users.models import UserProfile
from olympia.versions.models import (
ApplicationsVersions,
License,
Version,
VersionPreview,
)
from .models import Addon, Preview, ReplacementAddon, attach_tags
class FileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField()
platform = serializers.SerializerMethodField()
status = ReverseChoiceField(choices=list(amo.STATUS_CHOICES_API.items()))
permissions = serializers.ListField(child=serializers.CharField())
optional_permissions = serializers.ListField(child=serializers.CharField())
is_restart_required = serializers.SerializerMethodField()
is_webextension = serializers.SerializerMethodField()
class Meta:
model = File
fields = (
'id',
'created',
'hash',
'is_restart_required',
'is_webextension',
'is_mozilla_signed_extension',
'platform',
'size',
'status',
'url',
'permissions',
'optional_permissions',
)
def get_url(self, obj):
return obj.get_absolute_url()
def to_representation(self, obj):
data = super().to_representation(obj)
request = self.context.get('request', None)
if request and not is_gate_active(request, 'platform-shim'):
data.pop('platform', None)
if request and not is_gate_active(request, 'is-restart-required-shim'):
data.pop('is_restart_required', None)
if request and not is_gate_active(request, 'is-webextension-shim'):
data.pop('is_webextension', None)
return data
def get_platform(self, obj):
# platform is gone, but we need to keep the API backwards compatible so
# fake it by just returning 'all' all the time.
return 'all'
def get_is_restart_required(self, obj):
# is_restart_required is gone from the model and all addons are restartless now
# so fake it for older API clients with False
return False
def get_is_webextension(self, obj):
# is_webextension is always True these days because all addons are webextensions
# but fake it for older API clients.
return True
class PreviewSerializer(serializers.ModelSerializer):
caption = TranslationSerializerField()
image_url = serializers.SerializerMethodField()
thumbnail_url = serializers.SerializerMethodField()
image_size = serializers.ReadOnlyField(source='image_dimensions')
thumbnail_size = serializers.ReadOnlyField(source='thumbnail_dimensions')
class Meta:
# Note: this serializer can also be used for VersionPreview.
model = Preview
fields = (
'id',
'caption',
'image_size',
'image_url',
'thumbnail_size',
'thumbnail_url',
)
def get_image_url(self, obj):
return absolutify(obj.image_url)
def get_thumbnail_url(self, obj):
return absolutify(obj.thumbnail_url)
class ESPreviewSerializer(BaseESSerializer, PreviewSerializer):
# Because we have translated fields and dates coming from ES, we can't use
# a regular PreviewSerializer to handle previews for ESAddonSerializer.
# Unfortunately we also need to get the class right (it can be either
# Preview or VersionPreview) so fake_object() implementation in this class
# does nothing, the instance has already been created by a parent
# serializer.
datetime_fields = ('modified',)
translated_fields = ('caption',)
def fake_object(self, data):
return data
class LicenseNameSerializerField(serializers.Field):
"""Field to handle license name translations.
Builtin licenses, for better or worse, don't necessarily have their name
translated in the database like custom licenses. Instead, the string is in
this repos, and translated using gettext. This field deals with that
difference, delegating the rendering to TranslationSerializerField or
GetTextTranslationSerializerField depending on what the license instance
is.
"""
builtin_translation_field_class = GetTextTranslationSerializerField
custom_translation_field_class = TranslationSerializerField
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.builtin_translation_field = self.builtin_translation_field_class()
self.custom_translation_field = self.custom_translation_field_class()
def bind(self, field_name, parent):
super().bind(field_name, parent)
self.builtin_translation_field.bind(field_name, parent)
self.custom_translation_field.bind(field_name, parent)
def get_attribute(self, obj):
if obj._constant:
return self.builtin_translation_field.get_attribute(obj._constant)
else:
return self.custom_translation_field.get_attribute(obj)
def to_representation(self, obj):
# Like TranslationSerializerField, the bulk of the logic is in
# get_attribute(), we just have to return the data at this point.
return obj
class ESLicenseNameSerializerField(LicenseNameSerializerField):
"""Like LicenseNameSerializerField, but uses the data from ES to avoid
a database query for custom licenses.
BaseESSerializer automatically changes
TranslationSerializerField to ESTranslationSerializerField for all base
fields on the serializer, but License name has its own special field to
handle builtin licences so it's done separately."""
custom_translation_field_class = ESTranslationSerializerField
def attach_translations(self, obj, data, field_name):
return self.custom_translation_field.attach_translations(obj, data, field_name)
class LicenseSerializer(serializers.ModelSerializer):
is_custom = serializers.SerializerMethodField()
name = LicenseNameSerializerField()
text = TranslationSerializerField()
url = serializers.SerializerMethodField()
class Meta:
model = License
fields = ('id', 'is_custom', 'name', 'text', 'url')
def get_is_custom(self, obj):
return not bool(obj.builtin)
def get_url(self, obj):
return obj.url or self.get_version_license_url(obj)
def get_version_license_url(self, obj):
# We need the version associated with the license, because that's where
# the license_url() method lives. The problem is, normally we would not
# be able to do that, because there can be multiple versions for a
# given License. However, since we're serializing through a nested
# serializer, we cheat and use `instance.version_instance` which is
# set by SimpleVersionSerializer.to_representation() while serializing.
# Only get the version license url for non-builtin licenses.
if not obj.builtin and hasattr(obj, 'version_instance'):
return absolutify(obj.version_instance.license_url())
return None
def to_representation(self, instance):
data = super().to_representation(instance)
request = self.context.get('request', None)
if request and is_gate_active(request, 'del-version-license-is-custom'):
data.pop('is_custom', None)
return data
class CompactLicenseSerializer(LicenseSerializer):
class Meta:
model = License
fields = ('id', 'is_custom', 'name', 'url')
class MinimalVersionSerializer(serializers.ModelSerializer):
file = FileSerializer(read_only=True)
class Meta:
model = Version
fields = ('id', 'file', 'reviewed', 'version')
read_only_fields = fields
def to_representation(self, instance):
repr = super().to_representation(instance)
request = self.context.get('request', None)
if 'file' in repr and request and is_gate_active(request, 'version-files'):
# In v3/v4 files is expected to be a list but now we only have one file.
repr['files'] = [repr.pop('file')]
return repr
class VersionCompatabilityField(serializers.Field):
def to_internal_value(self, data):
"""Note: this returns unsaved and incomplete ApplicationsVersions objects that
need to have version set, and may have missing min or max AppVersion instances
for new Version instances. (As intended - we want to be able to partially
specify min or max and have the manifest or defaults be instead used).
"""
try:
if isinstance(data, list):
# if it's a list of apps, normalize into a dict first
data = {key: {} for key in data}
if isinstance(data, dict):
version = self.parent.instance
existing = version.compatible_apps if version else {}
qs = AppVersion.objects
internal = {}
for app_name, min_max in data.items():
app = amo.APPS[app_name]
apps_versions = existing.get(
app, ApplicationsVersions(application=app.id)
)
app_qs = qs.filter(application=app.id)
if 'max' in min_max:
apps_versions.max = app_qs.get(version=min_max['max'])
elif version:
apps_versions.max = app_qs.get(
version=amo.DEFAULT_WEBEXT_MAX_VERSION
)
app_qs = app_qs.exclude(version='*')
if 'min' in min_max:
apps_versions.min = app_qs.get(version=min_max['min'])
elif version:
apps_versions.min = app_qs.get(
version=amo.DEFAULT_WEBEXT_MIN_VERSIONS[app]
)
internal[app] = apps_versions
return internal
else:
# if it's neither it's not a valid input
raise exceptions.ValidationError('Invalid value')
except KeyError:
raise exceptions.ValidationError('Invalid app specified')
except AppVersion.DoesNotExist:
raise exceptions.ValidationError('Unknown app version specified')
def to_representation(self, value):
return {
app.short: (
{
'min': compat.min.version,
'max': compat.max.version,
}
if compat
else {
'min': amo.D2C_MIN_VERSIONS.get(app.id, '1.0'),
'max': amo.FAKE_MAX_VERSION,
}
)
for app, compat in value.items()
}
class SimpleVersionSerializer(MinimalVersionSerializer):
compatibility = VersionCompatabilityField(
# default to just Desktop Firefox; most of the times developers don't develop
# their WebExtensions for Android. See https://bit.ly/2QaMicU
source='compatible_apps',
default=serializers.CreateOnlyDefault(
{
amo.APPS['firefox']: ApplicationsVersions( | )
}
),
)
edit_url = serializers.SerializerMethodField()
is_strict_compatibility_enabled = serializers.BooleanField(
source='file.strict_compatibility', read_only=True
)
license = CompactLicenseSerializer()
release_notes = TranslationSerializerField(required=False)
class Meta:
model = Version
fields = (
'id',
'compatibility',
'edit_url',
'file',
'is_strict_compatibility_enabled',
'license',
'release_notes',
'reviewed',
'version',
)
read_only_fields = fields
def to_representation(self, instance):
# Help the LicenseSerializer find the version we're currently serializing.
if 'license' in self.fields and instance.license:
instance.license.version_instance = instance
return super().to_representation(instance)
def get_edit_url(self, obj):
return absolutify(
obj.addon.get_dev_url('versions.edit', args=[obj.pk], prefix_only=True)
)
class VersionSerializer(SimpleVersionSerializer):
channel = ReverseChoiceField(
choices=list(amo.CHANNEL_CHOICES_API.items()), read_only=True
)
license = SplitField(
serializers.PrimaryKeyRelatedField(queryset=License.objects.builtins()),
LicenseSerializer(),
)
upload = serializers.SlugRelatedField(
slug_field='uuid', queryset=FileUpload.objects.all(), write_only=True
)
class Meta:
model = Version
fields = (
'id',
'channel',
'compatibility',
'edit_url',
'file',
'is_strict_compatibility_enabled',
'license',
'release_notes',
'reviewed',
'upload',
'version',
)
writeable_fields = (
'compatibility',
'license',
'release_notes',
'upload',
)
read_only_fields = tuple(set(fields) - set(writeable_fields))
def __init__(self, instance=None, data=serializers.empty, **kwargs):
self.addon = kwargs.pop('addon', None)
if instance and isinstance(data, dict):
data.pop('upload', None) # we only support upload field for create
super().__init__(instance=instance, data=data, **kwargs)
def validate_upload(self, value):
own_upload = (request := self.context.get('request')) and (
request.user == value.user
)
if not own_upload or not value.valid or value.validation_timeout:
raise exceptions.ValidationError('Upload is not valid.')
return value
def _check_blocklist(self, guid, version_string):
# check the guid/version isn't in the addon blocklist
block_qs = Block.objects.filter(guid=guid) if guid else ()
if block_qs and block_qs.first().is_version_blocked(version_string):
msg = (
'Version {version} matches {block_link} for this add-on. '
'You can contact {amo_admins} for additional information.'
)
raise exceptions.ValidationError(
msg.format(
version=version_string,
block_link=absolutify(reverse('blocklist.block', args=[guid])),
amo_admins='[email protected]',
),
)
def validate(self, data):
if not self.instance:
# Parse the file to get and validate package data with the addon.
self.parsed_data = parse_addon(
data.get('upload'), addon=self.addon, user=self.context['request'].user
)
guid = self.addon.guid if self.addon else self.parsed_data.get('guid')
self._check_blocklist(guid, self.parsed_data.get('version'))
else:
data.pop('upload', None) # upload can only be set during create
return data
def create(self, validated_data):
upload = validated_data.get('upload')
parsed_and_validated_data = {
**self.parsed_data,
**validated_data,
'license_id': validated_data['license'].id,
}
version = Version.from_upload(
upload=upload,
addon=self.addon or validated_data.get('addon'),
channel=upload.channel,
compatibility=validated_data.get('compatible_apps'),
parsed_data=parsed_and_validated_data,
)
upload.update(addon=version.addon)
return version
def update(self, instance, validated_data):
instance = super().update(instance, validated_data)
if 'compatible_apps' in validated_data:
instance.set_compatible_apps(validated_data['compatible_apps'])
return instance
class VersionListSerializer(VersionSerializer):
# When we're listing versions, we don't want to include the full license
# text every time: we only do this for the version detail endpoint.
license = CompactLicenseSerializer()
class CurrentVersionSerializer(SimpleVersionSerializer):
def to_representation(self, obj):
# If the add-on is a langpack, and `appversion` is passed, try to
# determine the latest public compatible version and replace the obj
# with the result. Because of the perf impact, only done for langpacks
# in the detail API.
request = self.context.get('request')
view = self.context.get('view')
addon = obj.addon
if (
request
and request.GET.get('appversion')
and getattr(view, 'action', None) == 'retrieve'
and addon.type == amo.ADDON_LPAPP
):
obj = self.get_current_compatible_version(addon)
return super().to_representation(obj)
def get_current_compatible_version(self, addon):
"""
Return latest public version compatible with the app & appversion
passed through the request, or fall back to addon.current_version if
none is found.
Only use on langpacks if the appversion parameter is present.
"""
request = self.context.get('request')
try:
# AddonAppVersionQueryParam.get_values() returns (app_id, min, max)
# but we want {'min': min, 'max': max}.
value = AddonAppVersionQueryParam(request.GET).get_values()
application = value[0]
appversions = dict(zip(('min', 'max'), value[1:]))
except ValueError as exc:
raise exceptions.ParseError(str(exc))
version_qs = Version.objects.latest_public_compatible_with(
application, appversions
).filter(addon=addon)
return version_qs.first() or addon.current_version
class ESCompactLicenseSerializer(BaseESSerializer, CompactLicenseSerializer):
name = ESLicenseNameSerializerField()
translated_fields = ('name',)
def fake_object(self, data):
# We just pass the data as the fake object will have been created
# before by ESAddonSerializer.fake_version_object()
return data
class ESCurrentVersionSerializer(BaseESSerializer, CurrentVersionSerializer):
license = ESCompactLicenseSerializer()
datetime_fields = ('reviewed',)
translated_fields = ('release_notes',)
def fake_object(self, data):
# We just pass the data as the fake object will have been created
# before by ESAddonSerializer.fake_version_object()
return data
class AddonEulaPolicySerializer(serializers.ModelSerializer):
eula = TranslationSerializerField()
privacy_policy = TranslationSerializerField()
class Meta:
model = Addon
fields = (
'eula',
'privacy_policy',
)
class AddonDeveloperSerializer(BaseUserSerializer):
picture_url = serializers.SerializerMethodField()
class Meta(BaseUserSerializer.Meta):
fields = BaseUserSerializer.Meta.fields + ('picture_url',)
read_only_fields = fields
class PromotedAddonSerializer(serializers.ModelSerializer):
GROUP_CHOICES = [(group.id, group.api_name) for group in PROMOTED_GROUPS]
apps = serializers.SerializerMethodField()
category = ReverseChoiceField(choices=GROUP_CHOICES, source='group_id')
class Meta:
model = PromotedAddon
fields = (
'apps',
'category',
)
def get_apps(self, obj):
return [app.short for app in obj.approved_applications]
class CategoriesSerializerField(serializers.Field):
def to_internal_value(self, data):
try:
categories = []
for app_name, category_names in data.items():
app_cats = CATEGORIES[APPS[app_name].id]
# We don't know the addon_type at this point, so try them all and we'll
# drop anything that's wrong later in AddonSerializer.validate
all_cat_slugs = set()
for type_cats in app_cats.values():
categories.extend(
type_cats[name] for name in category_names if name in type_cats
)
all_cat_slugs.update(type_cats.keys())
# Now double-check all the category names were found
if not all_cat_slugs.issuperset(category_names):
raise exceptions.ValidationError('Invalid category name.')
return categories
except KeyError:
raise exceptions.ValidationError('Invalid app name.')
def to_representation(self, value):
grouped = sorted_groupby(
sorted(value),
key=lambda x: getattr(amo.APP_IDS.get(x.application), 'short', ''),
)
return {
app_name: [cat.slug for cat in categories]
for app_name, categories in grouped
}
class ContributionSerializerField(OutgoingURLField):
def to_representation(self, value):
if not value:
# don't add anything when it's not set.
return value
parts = urlsplit(value)
query = QueryDict(parts.query, mutable=True)
query.update(amo.CONTRIBUTE_UTM_PARAMS)
return super().to_representation(
urlunsplit(
(
parts.scheme,
parts.netloc,
parts.path,
query.urlencode(),
parts.fragment,
)
)
)
class AddonSerializer(serializers.ModelSerializer):
authors = AddonDeveloperSerializer(
many=True, source='listed_authors', read_only=True
)
categories = CategoriesSerializerField(source='all_categories')
contributions_url = ContributionSerializerField(
source='contributions', read_only=True
)
current_version = CurrentVersionSerializer(read_only=True)
description = TranslationSerializerField(required=False)
developer_comments = TranslationSerializerField(required=False)
edit_url = serializers.SerializerMethodField()
has_eula = serializers.SerializerMethodField()
has_privacy_policy = serializers.SerializerMethodField()
homepage = OutgoingTranslationField(required=False)
icon_url = serializers.SerializerMethodField()
icons = serializers.SerializerMethodField()
is_source_public = serializers.SerializerMethodField()
is_featured = serializers.SerializerMethodField()
name = TranslationSerializerField(required=False)
previews = PreviewSerializer(many=True, source='current_previews', read_only=True)
promoted = PromotedAddonSerializer(read_only=True)
ratings = serializers.SerializerMethodField()
ratings_url = serializers.SerializerMethodField()
review_url = serializers.SerializerMethodField()
status = ReverseChoiceField(
choices=list(amo.STATUS_CHOICES_API.items()), read_only=True
)
summary = TranslationSerializerField(required=False)
support_email = TranslationSerializerField(required=False)
support_url = OutgoingTranslationField(required=False)
tags = serializers.SerializerMethodField()
type = ReverseChoiceField(
choices=list(amo.ADDON_TYPE_CHOICES_API.items()), read_only=True
)
url = serializers.SerializerMethodField()
version = VersionSerializer(write_only=True)
versions_url = serializers.SerializerMethodField()
class Meta:
model = Addon
fields = (
'id',
'authors',
'average_daily_users',
'categories',
'contributions_url',
'created',
'current_version',
'default_locale',
'description',
'developer_comments',
'edit_url',
'guid',
'has_eula',
'has_privacy_policy',
'homepage',
'icon_url',
'icons',
'is_disabled',
'is_experimental',
'is_featured',
'is_source_public',
'last_updated',
'name',
'previews',
'promoted',
'ratings',
'ratings_url',
'requires_payment',
'review_url',
'slug',
'status',
'summary',
'support_email',
'support_url',
'tags',
'type',
'url',
'version',
'versions_url',
'weekly_downloads',
)
writeable_fields = (
'categories',
'description',
'developer_comments',
'homepage',
'name',
'slug',
'summary',
'support_email',
'support_url',
'version',
)
read_only_fields = tuple(set(fields) - set(writeable_fields))
def __init__(self, instance=None, data=serializers.empty, **kwargs):
if instance and isinstance(data, dict):
data.pop('version', None) # we only support version field for create
super().__init__(instance=instance, data=data, **kwargs)
def to_representation(self, obj):
data = super().to_representation(obj)
request = self.context.get('request', None)
if request and is_gate_active(request, 'del-addons-created-field'):
data.pop('created', None)
if request and not is_gate_active(request, 'is-source-public-shim'):
data.pop('is_source_public', None)
if request and not is_gate_active(request, 'is-featured-addon-shim'):
data.pop('is_featured', None)
return data
def get_has_eula(self, obj):
return bool(getattr(obj, 'has_eula', obj.eula))
def get_is_featured(self, obj):
# featured is gone, but we need to keep the API backwards compatible so
# fake it with promoted status instead.
return bool(obj.promoted and obj.promoted.group == RECOMMENDED)
def get_has_privacy_policy(self, obj):
return bool(getattr(obj, 'has_privacy_policy', obj.privacy_policy))
def get_tags(self, obj):
if not hasattr(obj, 'tag_list'):
attach_tags([obj])
# attach_tags() might not have attached anything to the addon, if it
# had no tags.
return getattr(obj, 'tag_list', [])
def get_url(self, obj):
# Use absolutify(get_detail_url()), get_absolute_url() calls
# get_url_path() which does an extra check on current_version that is
# annoying in subclasses which don't want to load that version.
return absolutify(obj.get_detail_url())
def get_edit_url(self, obj):
return absolutify(obj.get_dev_url())
def get_ratings_url(self, obj):
return absolutify(obj.ratings_url)
def get_versions_url(self, obj):
return absolutify(obj.versions_url)
def get_review_url(self, obj):
return absolutify(reverse('reviewers.review', args=[obj.pk]))
def get_icon_url(self, obj):
return absolutify(obj.get_icon_url(64))
def get_icons(self, obj):
get_icon = obj.get_icon_url
return {str(size): absolutify(get_icon(size)) for size in amo.ADDON_ICON_SIZES}
def get_ratings(self, obj):
ratings = {
'average': obj.average_rating,
'bayesian_average': obj.bayesian_rating,
'count': obj.total_ratings,
'text_count': obj.text_ratings_count,
}
if (request := self.context.get('request', None)) and (
grouped := get_grouped_ratings(request, obj)
):
ratings['grouped_counts'] = grouped
return ratings
def get_is_source_public(self, obj):
return False
def validate(self, data):
if not self.instance:
addon_type = self.fields['version'].parsed_data['type']
else:
addon_type = self.instance.type
if 'all_categories' in data:
# filter out categories for the wrong type.
# There might be dupes, e.g. "other" is a category for 2 types
slugs = {cat.slug for cat in data['all_categories']}
data['all_categories'] = [
cat for cat in data['all_categories'] if cat.type == addon_type
]
# double check we didn't lose any
if slugs != {cat.slug for cat in data['all_categories']}:
raise exceptions.ValidationError(
{'categories': 'Invalid category name.'}
)
return data
def create(self, validated_data):
upload = validated_data.get('version').get('upload')
addon = Addon.initialize_addon_from_upload(
data={**self.fields['version'].parsed_data, **validated_data},
upload=upload,
channel=upload.channel,
user=self.context['request'].user,
)
# Add categories
addon.set_categories(validated_data.get('all_categories', []))
self.fields['version'].create(
{**validated_data.get('version', {}), 'addon': addon}
)
activity.log_create(amo.LOG.CREATE_ADDON, addon)
olympia.core.logger.getLogger('z.addons').info(
f'New addon {addon!r} from {upload!r}'
)
if (
addon.status == amo.STATUS_NULL
and addon.has_complete_metadata()
and upload.channel == amo.RELEASE_CHANNEL_LISTED
):
addon.update(status=amo.STATUS_NOMINATED)
return addon
def update(self, instance, validated_data):
instance = super().update(instance, validated_data)
if 'all_categories' in validated_data:
del instance.all_categories # super.update will have set it.
instance.set_categories(validated_data['all_categories'])
return instance
class AddonSerializerWithUnlistedData(AddonSerializer):
latest_unlisted_version = SimpleVersionSerializer(read_only=True)
class Meta:
model = Addon
fields = AddonSerializer.Meta.fields + ('latest_unlisted_version',)
read_only_fields = tuple(
set(fields) - set(AddonSerializer.Meta.writeable_fields)
)
class SimpleAddonSerializer(AddonSerializer):
class Meta:
model = Addon
fields = ('id', 'slug', 'name', 'icon_url')
class ESAddonSerializer(BaseESSerializer, AddonSerializer):
# Override various fields for related objects which we don't want to expose
# data the same way than the regular serializer does (usually because we
# some of the data is not indexed in ES).
authors = BaseUserSerializer(many=True, source='listed_authors')
current_version = ESCurrentVersionSerializer()
previews = ESPreviewSerializer(many=True, source='current_previews')
_score = serializers.SerializerMethodField()
datetime_fields = ('created', 'last_updated', 'modified')
translated_fields = (
'name',
'description',
'developer_comments',
'homepage',
'summary',
'support_email',
'support_url',
)
class Meta:
model = Addon
fields = AddonSerializer.Meta.fields + ('_score',)
def fake_preview_object(self, obj, data, model_class=Preview):
# This is what ESPreviewSerializer.fake_object() would do, but we do
# it here and make that fake_object() method a no-op in order to have
# access to the right model_class to use - VersionPreview for static
# themes, Preview for the rest.
preview = model_class(id=data['id'], sizes=data.get('sizes', {}))
preview.addon = obj
preview.version = obj.current_version
preview_serializer = self.fields['previews'].child
# Attach base attributes that have the same name/format in ES and in
# the model.
preview_serializer._attach_fields(preview, data, ('modified',))
# Attach translations.
preview_serializer._attach_translations(
preview, data, preview_serializer.translated_fields
)
return preview
def fake_file_object(self, obj, data):
file_ = File(
id=data['id'],
created=self.handle_date(data['created']),
hash=data['hash'],
filename=data['filename'],
is_mozilla_signed_extension=data.get('is_mozilla_signed_extension'),
size=data['size'],
status=data['status'],
strict_compatibility=data.get('strict_compatibility', False),
version=obj,
)
file_.permissions = data.get(
'permissions', data.get('webext_permissions_list', [])
)
file_.optional_permissions = data.get('optional_permissions', [])
return file_
def fake_version_object(self, obj, data, channel):
if data:
version = Version(
addon=obj,
id=data['id'],
reviewed=self.handle_date(data['reviewed']),
version=data['version'],
channel=channel,
)
version.file = self.fake_file_object(version, data['files'][0])
# In ES we store integers for the appversion info, we need to
# convert it back to strings.
compatible_apps = {}
for app_id, compat_dict in data.get('compatible_apps', {}).items():
app_name = APPS_ALL[int(app_id)]
compatible_apps[app_name] = ApplicationsVersions(
min=AppVersion(version=compat_dict.get('min_human', '')),
max=AppVersion(version=compat_dict.get('max_human', '')),
)
version.compatible_apps = compatible_apps
version_serializer = self.fields.get('current_version') or None
if version_serializer:
version_serializer._attach_translations(
version, data, version_serializer.translated_fields
)
if 'license' in data and version_serializer:
license_serializer = version_serializer.fields['license']
version.license = License(id=data['license']['id'])
license_serializer._attach_fields(
version.license, data['license'], ('builtin', 'url')
)
license_serializer._attach_translations(
version.license, data['license'], ('name',)
)
else:
version.license = None
else:
version = None
return version
def fake_object(self, data):
"""Create a fake instance of Addon and related models from ES data."""
obj = Addon(id=data['id'], slug=data['slug'])
# Attach base attributes that have the same name/format in ES and in
# the model.
self._attach_fields(
obj,
data,
(
'average_daily_users',
'bayesian_rating',
'contributions',
'created',
'default_locale',
'guid',
'has_eula',
'has_privacy_policy',
'hotness',
'icon_hash',
'icon_type',
'is_experimental',
'last_updated',
'modified',
'requires_payment',
'slug',
'status',
'type',
'weekly_downloads',
),
)
# Attach attributes that do not have the same name/format in ES.
obj.tag_list = data.get('tags', [])
obj.all_categories = [
CATEGORIES_BY_ID[cat_id] for cat_id in data.get('category', [])
]
# Not entirely accurate, but enough in the context of the search API.
obj.disabled_by_user = data.get('is_disabled', False)
# Attach translations (they require special treatment).
self._attach_translations(obj, data, self.translated_fields)
# Attach related models (also faking them). `current_version` is a
# property we can't write to, so we use the underlying field which
# begins with an underscore.
data_version = data.get('current_version') or {}
obj._current_version = self.fake_version_object(
obj, data_version, amo.RELEASE_CHANNEL_LISTED
)
obj._current_version_id = data_version.get('id')
data_authors = data.get('listed_authors', [])
obj.listed_authors = [
UserProfile(
id=data_author['id'],
display_name=data_author['name'],
username=data_author['username'],
is_public=data_author.get('is_public', False),
)
for data_author in data_authors
]
is_static_theme = data.get('type') == amo.ADDON_STATICTHEME
preview_model_class = VersionPreview if is_static_theme else Preview
obj.current_previews = [
self.fake_preview_object(obj, preview_data, model_class=preview_model_class)
for preview_data in data.get('previews', [])
]
promoted = data.get('promoted', None)
if promoted:
# set .approved_for_groups cached_property because it's used in
# .approved_applications.
approved_for_apps = promoted.get('approved_for_apps')
obj.promoted = PromotedAddon(
addon=obj,
approved_application_ids=approved_for_apps,
group_id=promoted['group_id'],
)
# we can safely regenerate these tuples because
# .appproved_applications only cares about the current group
obj._current_version.approved_for_groups = (
(obj.promoted.group, APP_IDS.get(app_id))
for app_id in approved_for_apps
)
else:
obj.promoted = None
ratings = data.get('ratings', {})
obj.average_rating = ratings.get('average')
obj.total_ratings = ratings.get('count')
obj.text_ratings_count = ratings.get('text_count')
return obj
def get__score(self, obj):
# es_meta is added by BaseESSerializer.to_representation() before DRF's
# to_representation() is called, so it's present on all objects.
return obj._es_meta['score']
def get_ratings(self, obj):
return {
'average': obj.average_rating,
'bayesian_average': obj.bayesian_rating,
'count': obj.total_ratings,
'text_count': obj.text_ratings_count,
}
def to_representation(self, obj):
data = super().to_representation(obj)
request = self.context.get('request')
if (
request
and '_score' in data
and not is_gate_active(request, 'addons-search-_score-field')
):
data.pop('_score')
return data
class ESAddonAutoCompleteSerializer(ESAddonSerializer):
class Meta(ESAddonSerializer.Meta):
fields = ('id', 'icon_url', 'name', 'promoted', 'type', 'url')
model = Addon
def get_url(self, obj):
# Addon.get_absolute_url() calls get_url_path(), which wants
# _current_version_id to exist, but that's just a safeguard. We don't
# care and don't want to fetch the current version field to improve
# perf, so give it a fake one.
obj._current_version_id = 1
return obj.get_absolute_url()
class StaticCategorySerializer(serializers.Serializer):
"""Serializes a `StaticCategory` as found in constants.categories"""
id = serializers.IntegerField()
name = serializers.CharField()
slug = serializers.CharField()
application = serializers.SerializerMethodField()
misc = serializers.BooleanField()
type = serializers.SerializerMethodField()
weight = serializers.IntegerField()
description = serializers.CharField()
def get_application(self, obj):
return APPS_ALL[obj.application].short
def get_type(self, obj):
return ADDON_TYPE_CHOICES_API[obj.type]
class LanguageToolsSerializer(AddonSerializer):
target_locale = serializers.CharField()
current_compatible_version = serializers.SerializerMethodField()
class Meta:
model = Addon
fields = (
'id',
'current_compatible_version',
'default_locale',
'guid',
'name',
'slug',
'target_locale',
'type',
'url',
)
def get_current_compatible_version(self, obj):
compatible_versions = getattr(obj, 'compatible_versions', None)
if compatible_versions is not None:
data = MinimalVersionSerializer(
compatible_versions, context=self.context, many=True
).data
try:
# 99% of the cases there will only be one result, since most
# language packs are automatically uploaded for a given app
# version. If there are more, pick the most recent one.
return data[0]
except IndexError:
# This should not happen, because the queryset in the view is
# supposed to filter results to only return add-ons that do
# have at least one compatible version, but let's not fail
# too loudly if the unthinkable happens...
pass
return None
def to_representation(self, obj):
data = super().to_representation(obj)
request = self.context['request']
if (
AddonAppVersionQueryParam.query_param not in request.GET
and 'current_compatible_version' in data
):
data.pop('current_compatible_version')
if request and is_gate_active(request, 'addons-locale_disambiguation-shim'):
data['locale_disambiguation'] = None
return data
class VersionBasketSerializer(SimpleVersionSerializer):
class Meta:
model = Version
fields = ('id', 'compatibility', 'is_strict_compatibility_enabled', 'version')
class AddonBasketSyncSerializer(AddonSerializerWithUnlistedData):
# We want to send all authors to basket, not just listed ones, and have
# the full basket-specific serialization.
authors = UserProfileBasketSyncSerializer(many=True)
current_version = VersionBasketSerializer()
is_recommended = serializers.SerializerMethodField()
latest_unlisted_version = VersionBasketSerializer()
name = serializers.SerializerMethodField()
class Meta:
model = Addon
fields = (
'authors',
'average_daily_users',
'categories',
'current_version',
'default_locale',
'guid',
'id',
'is_disabled',
'is_recommended',
'last_updated',
'latest_unlisted_version',
'name',
'ratings',
'slug',
'status',
'type',
)
read_only_fields = fields
def get_name(self, obj):
# Basket doesn't want translations, we run the serialization task under
# the add-on default locale so we can just return the name as string.
return str(obj.name)
def get_is_recommended(self, obj):
# Borrow the logic from is_featured so we don't have to define it twice
return self.get_is_featured(obj)
class ReplacementAddonSerializer(serializers.ModelSerializer):
replacement = serializers.SerializerMethodField()
ADDON_PATH_REGEX = r"""/addon/(?P<addon_id>[^/<>"']+)/$"""
COLLECTION_PATH_REGEX = (
r"""/collections/(?P<user_id>[^/<>"']+)/(?P<coll_slug>[^/]+)/$"""
)
class Meta:
model = ReplacementAddon
fields = ('guid', 'replacement')
def _get_addon_guid(self, addon_id):
try:
addon = Addon.objects.public().id_or_slug(addon_id).get()
except Addon.DoesNotExist:
return []
return [addon.guid]
def _get_collection_guids(self, user_id, collection_slug):
try:
get_args = {'slug': collection_slug, 'listed': True}
if isinstance(user_id, str) and not user_id.isdigit():
get_args.update(**{'author__username': user_id})
else:
get_args.update(**{'author': user_id})
collection = Collection.objects.get(**get_args)
except Collection.DoesNotExist:
return []
valid_q = Addon.objects.get_queryset().valid_q([amo.STATUS_APPROVED])
return list(collection.addons.filter(valid_q).values_list('guid', flat=True))
def get_replacement(self, obj):
if obj.has_external_url():
# It's an external url so no guids.
return []
addon_match = re.search(self.ADDON_PATH_REGEX, obj.path)
if addon_match:
return self._get_addon_guid(addon_match.group('addon_id'))
coll_match = re.search(self.COLLECTION_PATH_REGEX, obj.path)
if coll_match:
return self._get_collection_guids(
coll_match.group('user_id'), coll_match.group('coll_slug')
)
return [] | application=amo.APPS['firefox'].id |
samlsp.go | // Package samlsp provides helpers that can be used to protect web
// services using SAML.
package samlsp
import (
"crypto/rsa"
"crypto/x509"
"encoding/xml"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"time"
"github.com/peterdeka/saml"
"github.com/peterdeka/saml/logger"
)
const defaultTokenMaxAge = time.Hour
// Options represents the parameters for creating a new middleware
type Options struct {
URL url.URL
Key *rsa.PrivateKey
Logger logger.Interface
Certificate *x509.Certificate
AllowIDPInitiated bool
IDPMetadata *saml.EntityDescriptor
IDPMetadataURL *url.URL
HTTPClient *http.Client
CookieMaxAge time.Duration
CookieSecure bool
ForceAuthn bool
AuthnRequestsSigned bool
}
// New creates a new Middleware
func New(opts Options) (*Middleware, error) {
metadataURL := opts.URL
metadataURL.Path = metadataURL.Path + "/saml/metadata"
acsURL := opts.URL
acsURL.Path = acsURL.Path + "/saml/acs"
logr := opts.Logger
if logr == nil {
logr = logger.DefaultLogger
}
tokenMaxAge := opts.CookieMaxAge
if opts.CookieMaxAge == 0 {
tokenMaxAge = defaultTokenMaxAge
}
m := &Middleware{
ServiceProvider: saml.ServiceProvider{
Key: opts.Key,
Logger: logr,
Certificate: opts.Certificate,
MetadataURL: metadataURL,
AcsURL: acsURL,
IDPMetadata: opts.IDPMetadata,
ForceAuthn: &opts.ForceAuthn,
AuthnRequestsSigned: opts.AuthnRequestsSigned,
},
AllowIDPInitiated: opts.AllowIDPInitiated,
TokenMaxAge: tokenMaxAge,
}
cookieStore := ClientCookies{
ServiceProvider: &m.ServiceProvider,
Name: defaultCookieName,
Domain: opts.URL.Host,
Secure: opts.CookieSecure,
}
m.ClientState = &cookieStore
m.ClientToken = &cookieStore
// fetch the IDP metadata if needed.
if opts.IDPMetadataURL == nil {
return m, nil
}
c := opts.HTTPClient
if c == nil {
c = http.DefaultClient
}
req, err := http.NewRequest("GET", opts.IDPMetadataURL.String(), nil)
if err != nil {
return nil, err
}
// Some providers (like OneLogin) do not work properly unless the User-Agent header is specified.
// Setting the user agent prevents the 403 Forbidden errors.
req.Header.Set("User-Agent", "Golang; github.com/peterdeka/saml")
for i := 0; true; i++ {
resp, err := c.Do(req)
if err == nil && resp.StatusCode != http.StatusOK {
err = fmt.Errorf("%d %s", resp.StatusCode, resp.Status)
}
var data []byte
if err == nil {
data, err = ioutil.ReadAll(resp.Body)
resp.Body.Close()
}
if err != nil {
if i > 10 {
return nil, err
}
logr.Printf("ERROR: %s: %s (will retry)", opts.IDPMetadataURL, err)
time.Sleep(5 * time.Second)
continue
}
entity := &saml.EntityDescriptor{} | err = xml.Unmarshal(data, entity)
// this comparison is ugly, but it is how the error is generated in encoding/xml
if err != nil && err.Error() == "expected element type <EntityDescriptor> but have <EntitiesDescriptor>" {
entities := &saml.EntitiesDescriptor{}
if err := xml.Unmarshal(data, entities); err != nil {
return nil, err
}
err = fmt.Errorf("no entity found with IDPSSODescriptor")
for i, e := range entities.EntityDescriptors {
if len(e.IDPSSODescriptors) > 0 {
entity = &entities.EntityDescriptors[i]
err = nil
}
}
}
if err != nil {
return nil, err
}
m.ServiceProvider.IDPMetadata = entity
return m, nil
}
panic("unreachable")
} | |
Footer.ts | import { h, S1Node } from 'stage1';
export type FooterComponent = S1Node & HTMLDivElement;
const view = h(`
<footer class=microdoc-footer>
Powered by <a href=https://microdoc.js.org class=microdoc-footer-link target=_blank rel=noopener>microdoc</a>
</footer>
`);
export function Footer(): FooterComponent {
const root = view as FooterComponent;
return root; | } |
|
comment.rs | use once_cell::sync::Lazy;
use percent_encoding::{NON_ALPHANUMERIC, utf8_percent_encode};
use regex::Regex;
use crate::{StrExt, string_ext::Indent, StringExt};
pub fn strip_comment_markers(comment: &str) -> String {
const MULTILINE_PREFIX: &str = "/*";
const MULTILINE_CONT: &str = "*";
const MULTILINE_SUFFIX: &str = "*/";
const SINGLELINE: &str = "//";
const DETAIL: &str = "!";
const SINGLELINE_DETAIL: &str = "/";
const SINGLELINE_SIDE: &str = "<";
let mut singleline_delimited = true;
let mut asterisk_indented = false;
let mut lines = Vec::with_capacity(128);
// first pass:
// 1. checks whether the comment is single-line or multi-line delimited
// 2. checks whether multiline comment is asterisk prefixed
// 3. strips comment delimiters for multiline and single line comments
// 4. collects resulting stripped lines into `lines` Vec
for (i, mut line) in comment.lines_with_nl().enumerate() {
let mut line_clean = line.trim_start();
if i == 0 {
if line_clean.starts_with(MULTILINE_PREFIX) {
singleline_delimited = false;
line = &line_clean[MULTILINE_PREFIX.len()..];
if let Some(new_line) = line.strip_str_prefix(MULTILINE_CONT) {
line = new_line;
asterisk_indented = true;
} else {
asterisk_indented = false;
}
line_clean = line.trim_start();
} else {
singleline_delimited = true;
asterisk_indented = false;
}
} else if let Some(line_clean) = line_clean.strip_str_prefix(MULTILINE_PREFIX) {
line = line_clean
.trim_start_matches(DETAIL)
.trim_start_matches(MULTILINE_CONT)
.trim_start();
}
if singleline_delimited && line_clean.starts_with(SINGLELINE) {
line = &line_clean[SINGLELINE.len()..];
if let Some(new_line) = line.strip_str_prefix(SINGLELINE_DETAIL) {
line = new_line;
} else if let Some(new_line) = line.strip_str_prefix(DETAIL) {
line = new_line;
}
if let Some(new_line) = line.strip_str_prefix(SINGLELINE_SIDE) {
line = new_line;
}
} else if asterisk_indented && i == 1 && !line_clean.starts_with(MULTILINE_CONT) {
asterisk_indented = false;
}
lines.push(line);
}
let trim_last_empty_lines = |lines: &mut Vec<&str>| {
while let Some(last_line) = lines.last() {
if last_line.is_empty() {
lines.pop();
} else {
break;
}
}
};
trim_last_empty_lines(&mut lines);
// trim ending multiline delimiter
if let Some(last_line) = lines.last_mut() {
if !singleline_delimited {
*last_line = last_line.trim_end();
if last_line.ends_with(MULTILINE_SUFFIX) {
*last_line = last_line[..last_line.len() - MULTILINE_SUFFIX.len()]
.trim_end();
}
}
}
trim_last_empty_lines(&mut lines);
// second pass:
// 1. calculates common indent
// 2. for multiline asterisk prefixed comments, strips this prefix modifying `lines`
let mut first_line_indent = None;
let mut common_indent: Option<Indent> = None;
for line in &mut lines {
if !singleline_delimited && asterisk_indented {
let line_trimmed = line.trim_start();
if let Some(line_trimmed) = line_trimmed.strip_str_prefix(MULTILINE_CONT) {
*line = line_trimmed;
} else {
let trim_start = line.trim_start_idx().min(2);
*line = &line[trim_start..];
}
}
if first_line_indent.is_none() {
first_line_indent = Some(line.detect_indent());
} else {
let detected_indent = line.detect_indent();
if !line[detected_indent.len..].trim_start().is_empty() {
if let Some(common_indent) = common_indent.as_mut() {
*common_indent = (*common_indent).min(detected_indent);
} else {
common_indent = Some(line.detect_indent());
}
}
}
}
let mut out = String::with_capacity(comment.len());
for (i, mut line) in lines.into_iter().enumerate() {
if i == 0 {
line = &line[first_line_indent.unwrap_or_default().len..];
if line.trim().is_empty() {
continue;
}
} else {
let indent_len = common_indent.unwrap_or_default().len;
if line.len() > indent_len {
line = &line[indent_len..];
} else {
line = "\n";
}
}
let line_clean_end = line.trim_end();
if line_clean_end.ends_with(MULTILINE_SUFFIX) {
out += line_clean_end[..line_clean_end.len() - MULTILINE_SUFFIX.len()].trim_end();
out.push('\n');
} else {
out += line;
}
}
let out_trim_end = out.trim_end_idx();
out.drain(out_trim_end..);
out
}
fn | (formula: &str) -> String {
const ARG_REGEX: &str = r"\s*\{([^}]*?)\}";
static MACROS: Lazy<Vec<(Regex, &str)>> = Lazy::new(|| vec![
(
Regex::new(&format!("\\\\matTT{}", ARG_REGEX.repeat(9))).unwrap(),
"\\[ \\left|\\begin{array}{ccc} $1 & $2 & $3\\\\ $4 & $5 & $6\\\\ $7 & $8 & $9 \\end{array}\\right| \\]",
),
(
Regex::new(&format!("\\\\fork{}", ARG_REGEX.repeat(4))).unwrap(),
"\\left\\{ \\begin{array}{l l} $1 & \\mbox{$2}\\\\ $3 & \\mbox{$4}\\\\ \\end{array} \\right.",
),
(
Regex::new(&format!("\\\\forkthree{}", ARG_REGEX.repeat(6))).unwrap(),
"\\left\\{ \\begin{array}{l l} $1 & \\mbox{$2}\\\\ $3 & \\mbox{$4}\\\\ $5 & \\mbox{$6}\\\\ \\end{array} \\right.",
),
(
Regex::new(&format!("\\\\forkfour{}", ARG_REGEX.repeat(8))).unwrap(),
"\\left\\{ \\begin{array}{l l} $1 & \\mbox{$2}\\\\ $3 & \\mbox{$4}\\\\ $5 & \\mbox{$6}\\\\ $7 & \\mbox{$8}\\\\ \\end{array} \\right.",
),
(
Regex::new(&format!("\\\\vecthree{}", ARG_REGEX.repeat(3))).unwrap(),
"\\begin{bmatrix} $1\\\\ $2\\\\ $3 \\end{bmatrix}",
),
(
Regex::new(&format!("\\\\vecthreethree{}", ARG_REGEX.repeat(9))).unwrap(),
"\\begin{bmatrix} $1 & $2 & $3\\\\ $4 & $5 & $6\\\\ $7 & $8 & $9 \\end{bmatrix}",
),
(
Regex::new(&format!("\\\\hdotsfor{}", ARG_REGEX.repeat(1))).unwrap(),
"\\dots",
),
(
Regex::new(&format!("\\\\mathbbm{}", ARG_REGEX.repeat(1))).unwrap(),
"\\mathbb{$1}",
),
(
Regex::new(&format!("\\\\bordermatrix{}", ARG_REGEX.repeat(9))).unwrap(),
"\\matrix{$1}",
),
]);
let mut out = formula.to_string();
for (re, repl) in &*MACROS {
out.replace_in_place_regex(re, *repl);
}
out
}
pub fn render_doc_comment(doc_comment: &str, prefix: &str, opencv_version: &str) -> String {
render_doc_comment_with_processor(doc_comment, prefix, opencv_version, |_| {})
}
pub fn render_doc_comment_with_processor(doc_comment: &str, prefix: &str, opencv_version: &str, mut post_processor: impl FnMut(&mut String)) -> String {
let mut out = strip_comment_markers(doc_comment);
out.replace_in_place("\r\n", "\n");
// module titles
static MODULE_TITLE_1: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?m)\s*@[}{].*$"#).unwrap());
static MODULE_TITLE_2: Lazy<Regex> = Lazy::new(|| Regex::new(r#"@defgroup [^ ]+ (.*)"#).unwrap());
static MODULE_TITLE_3: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?m)^.*?@addtogroup\s+.+"#).unwrap());
out.replace_in_place_regex(&MODULE_TITLE_1, "");
out.replace_in_place_regex(&MODULE_TITLE_2, r#"# $1"#);
out.replace_in_place_regex(&MODULE_TITLE_3, "");
let trimmed = out.trim();
if trimmed.len() != out.len() {
out = trimmed.to_string()
}
// comment body markers
out.replace_in_place("@brief ", "");
out.replace_in_place("@note", "\nNote:");
// code blocks, don't run them during tests
static CODE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"@code(?: ?\{.+?})?"#).unwrap());
out.replace_in_place_regex(&CODE, "```ignore");
out.replace_in_place("@endcode", "```\n");
// snippets
static SNIPPET: Lazy<Regex> = Lazy::new(|| Regex::new(r#"@snippet\s+([\w/.]+)\s+([\w-]+)"#).unwrap());
out.replace_in_place_regex_cb(&SNIPPET, |s, caps| {
let (path_start, path_end) = caps.get(1).expect("Impossible");
let path = &s[path_start..path_end];
let (name_start, name_end) = caps.get(2).expect("Impossible");
if path.starts_with("samples/") { // fixme: hack to detect hdf snippets
Some(format!(
"[{name}](https://github.com/opencv/opencv_contrib/blob/{version}/modules/hdf/{path}#L1)",
name=&s[name_start..name_end],
version=opencv_version,
path=path,
).into())
} else {
Some(format!(
"[{name}](https://github.com/opencv/opencv/blob/{version}/samples/cpp/tutorial_code/{path}#L1)",
name=&s[name_start..name_end],
version=opencv_version,
path=path,
).into())
}
});
// some special casing for docs.rs build failures
out.replace_in_place("'fps'", r#""fps""#);
out.replace_in_place("'cv::Exception'", r#""cv::Exception""#);
// see also block
static SEE_ALSO_BLOCK: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?m)^\s*@(sa|see)\s+"#).unwrap());
static SEE_ALSO_INLINE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"@(sa|see)\s+"#).unwrap());
if out.replacen_in_place_regex(&SEE_ALSO_BLOCK, 1, "## See also\n") {
out.replace_in_place_regex(&SEE_ALSO_INLINE, "");
} else {
out.replace_in_place_regex(&SEE_ALSO_INLINE, "see also: ");
}
// citation links
static CITE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"@cite\s+([\w:]+)"#).unwrap());
out.replace_in_place_regex(&CITE, &format!("[$1](https://docs.opencv.org/{}/d0/de3/citelist.html#CITEREF_$1)", opencv_version));
// images
static IMAGE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"!\[(.*?)]\((?:.*/)?(.+)?\)"#).unwrap());
out.replace_in_place_regex(&IMAGE, &format!("", opencv_version));
// returns
static RETURNS: Lazy<Regex> = Lazy::new(|| Regex::new(r#".*?@returns?\s*"#).unwrap());
out.replace_in_place_regex(&RETURNS, "## Returns\n");
// parameter list
static PARAM_HEADER: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?m)^(.*?@param)"#).unwrap());
static PARAM: Lazy<Regex> = Lazy::new(|| Regex::new(r#".*?@param\s*(?:\[in]|(\[out]))?\s+(\w+) *(.*)"#).unwrap());
out.replacen_in_place_regex(&PARAM_HEADER, 1, "## Parameters\n$1");
out.replace_in_place_regex(&PARAM, "* $2:$1 $3");
// deprecated
static DEPRECATED: Lazy<Regex> = Lazy::new(|| Regex::new(r#".*?@deprecated\s+(.+)"#).unwrap());
let mut deprecated = None;
out.replace_in_place_regex_cb(&DEPRECATED, |out, caps| {
let (cap_start, cap_end) = caps.get(1).expect("Impossible");
let deprecated_msg = out[cap_start..cap_end].to_string();
let out = format!("\n**Deprecated**: {}", deprecated_msg);
deprecated = Some(deprecated_msg);
Some(out.into())
});
// leading dashes
static LEADING_DASH: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?m)^(\s*)-(\s{2,})"#).unwrap());
out.replace_in_place_regex(&LEADING_DASH, "$1*$2");
// math expressions
static BLOCK_FORMULA: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?s)\\f\[(.*?)\\f]"#).unwrap());
static INLINE_FORMULA: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?s)\\f\$(.*?)\\f\$"#).unwrap());
out.replace_in_place_regex_cb(&BLOCK_FORMULA, |out, caps| {
let (cap_start, cap_end) = caps.get(1).expect("Impossible");
let formula = preprocess_formula(&out[cap_start..cap_end]);
let encoded = utf8_percent_encode(&formula, NON_ALPHANUMERIC);
Some(format!("", encoded).into())
});
out.replace_in_place_regex_cb(&INLINE_FORMULA, |out, caps| {
let (cap_start, cap_end) = caps.get(1).expect("Impossible");
let formula = preprocess_formula(&out[cap_start..cap_end]);
let encoded = utf8_percent_encode(&formula, NON_ALPHANUMERIC);
Some(format!("", encoded).into())
});
// escapes
static ESCAPE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?m)\\n$"#).unwrap());
out.replace_in_place_regex(&ESCAPE, "\n");
// catch sequences of 4 indents and reduce them to avoid cargo test running them as code
static INDENTS: Lazy<Regex> = Lazy::new(|| Regex::new(r#"(?m)^(\s{3}|\s{7}|\s{11}|\s{15}|\s{19})\s(\S)"#).unwrap());
out.replace_in_place_regex(&INDENTS, "$1$2");
post_processor(&mut out);
let mut out = if out.is_empty() || prefix.is_empty() {
out
} else {
out.lines_with_nl()
.fold(
String::with_capacity(out.len() + (prefix.len() + 1) * 128),
|mut out_prefixed, line| {
out_prefixed.push_str(prefix);
out_prefixed.push(' ');
out_prefixed + line
},
)
};
if let Some(deprecated) = deprecated {
out += &format!("\n#[deprecated = \"{}\"]", deprecated);
}
out
}
| preprocess_formula |
gcp.go | /*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fuzz
import (
"context"
"errors"
"fmt"
"net/http"
"strings"
computealpha "google.golang.org/api/compute/v0.alpha"
computebeta "google.golang.org/api/compute/v0.beta"
compute "google.golang.org/api/compute/v1"
"google.golang.org/api/googleapi"
"k8s.io/klog"
"github.com/GoogleCloudPlatform/k8s-cloud-provider/pkg/cloud"
"github.com/GoogleCloudPlatform/k8s-cloud-provider/pkg/cloud/filter"
"github.com/GoogleCloudPlatform/k8s-cloud-provider/pkg/cloud/meta"
"k8s.io/ingress-gce/pkg/utils"
)
const (
NegResourceType = "networkEndpointGroup"
IgResourceType = "instanceGroup"
HttpProtocol = Protocol("HTTP")
HttpsProtocol = Protocol("HTTPS")
targetHTTPProxyResource = "targetHttpProxies"
targetHTTPSProxyResource = "targetHttpsProxies"
kubeSystemNS = "kube-system"
defaultHTTPBackend = "default-http-backend"
)
// Protocol specifies GCE loadbalancer protocol.
type Protocol string
// ForwardingRule is a union of the API version types.
type ForwardingRule struct {
GA *compute.ForwardingRule
Alpha *computealpha.ForwardingRule
Beta *computebeta.ForwardingRule
}
// TargetHTTPProxy is a union of the API version types.
type TargetHTTPProxy struct {
GA *compute.TargetHttpProxy
Alpha *computealpha.TargetHttpProxy
Beta *computebeta.TargetHttpProxy
}
// TargetHTTPSProxy is a union of the API version types.
type TargetHTTPSProxy struct {
GA *compute.TargetHttpsProxy
Alpha *computealpha.TargetHttpsProxy
Beta *computebeta.TargetHttpsProxy
}
// URLMap is a union of the API version types.
type URLMap struct {
GA *compute.UrlMap
Alpha *computealpha.UrlMap
Beta *computebeta.UrlMap
}
// BackendService is a union of the API version types.
type BackendService struct {
GA *compute.BackendService
Alpha *computealpha.BackendService
Beta *computebeta.BackendService
}
// HealthCheck is a union of the API version types.
type HealthCheck struct {
GA *compute.HealthCheck
}
// NetworkEndpointGroup is a union of the API version types.
type NetworkEndpointGroup struct {
GA *compute.NetworkEndpointGroup
Alpha *computealpha.NetworkEndpointGroup
Beta *computebeta.NetworkEndpointGroup
}
// InstanceGroup is a union of the API version types.
type InstanceGroup struct {
GA *compute.InstanceGroup
}
// NetworkEndpoints contains the NEG definition and the network Endpoints in NEG
type NetworkEndpoints struct {
NEG *compute.NetworkEndpointGroup
Endpoints []*compute.NetworkEndpointWithHealthStatus
}
// GCLB contains the resources for a load balancer.
type GCLB struct {
VIP string
ForwardingRule map[meta.Key]*ForwardingRule
TargetHTTPProxy map[meta.Key]*TargetHTTPProxy
TargetHTTPSProxy map[meta.Key]*TargetHTTPSProxy
URLMap map[meta.Key]*URLMap
BackendService map[meta.Key]*BackendService
NetworkEndpointGroup map[meta.Key]*NetworkEndpointGroup
InstanceGroup map[meta.Key]*InstanceGroup
HealthCheck map[meta.Key]*HealthCheck
}
// NewGCLB returns an empty GCLB.
func NewGCLB(vip string) *GCLB {
return &GCLB{
VIP: vip,
ForwardingRule: map[meta.Key]*ForwardingRule{},
TargetHTTPProxy: map[meta.Key]*TargetHTTPProxy{},
TargetHTTPSProxy: map[meta.Key]*TargetHTTPSProxy{},
URLMap: map[meta.Key]*URLMap{},
BackendService: map[meta.Key]*BackendService{},
NetworkEndpointGroup: map[meta.Key]*NetworkEndpointGroup{},
InstanceGroup: map[meta.Key]*InstanceGroup{},
HealthCheck: map[meta.Key]*HealthCheck{},
}
}
// GCLBDeleteOptions may be provided when cleaning up GCLB resource.
type GCLBDeleteOptions struct {
// SkipDefaultBackend indicates whether to skip checking for the
// system default backend.
SkipDefaultBackend bool
// SkipBackends indicates whether to skip checking for the backends.
// This is enabled only when we know that backends are shared among multiple ingresses
// in which case shared backends are not cleaned up on ingress deletion.
SkipBackends bool
// CheckHttpFrontendResources indicates whether to check just the http
// frontend resources.
CheckHttpFrontendResources bool
// CheckHttpsFrontendResources indicates whether to check just the https
// frontend resources.
CheckHttpsFrontendResources bool
}
// CheckResourceDeletion checks the existence of the resources. Returns nil if
// all of the associated resources no longer exist.
func (g *GCLB) CheckResourceDeletion(ctx context.Context, c cloud.Cloud, options *GCLBDeleteOptions) error {
var resources []meta.Key
for k := range g.ForwardingRule {
var err error
if k.Region != "" {
_, err = c.ForwardingRules().Get(ctx, &k)
} else {
_, err = c.GlobalForwardingRules().Get(ctx, &k)
}
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("ForwardingRule %s is not deleted/error to get: %s", k.Name, err)
}
} else {
resources = append(resources, k)
}
}
for k := range g.TargetHTTPProxy {
var err error
if k.Region != "" {
// Use beta since GA isn't available yet
_, err = c.BetaRegionTargetHttpProxies().Get(ctx, &k)
} else {
_, err = c.TargetHttpProxies().Get(ctx, &k)
}
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("TargetHTTPProxy %s is not deleted/error to get: %s", k.Name, err)
}
} else {
resources = append(resources, k)
}
}
for k := range g.TargetHTTPSProxy {
var err error
if k.Region != "" {
// Use beta since GA isn't available yet
_, err = c.BetaRegionTargetHttpsProxies().Get(ctx, &k)
} else {
_, err = c.TargetHttpsProxies().Get(ctx, &k)
}
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("TargetHTTPSProxy %s is not deleted/error to get: %s", k.Name, err)
}
} else {
resources = append(resources, k)
}
}
for k := range g.URLMap {
var err error
if k.Region != "" {
_, err = c.BetaRegionUrlMaps().Get(ctx, &k)
} else {
_, err = c.UrlMaps().Get(ctx, &k)
}
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("URLMap %s is not deleted/error to get: %s", k.Name, err)
}
} else {
resources = append(resources, k)
}
}
if options == nil || !options.SkipBackends {
for k := range g.BackendService {
var err error
var bs *compute.BackendService
if k.Region != "" {
bs, err = c.RegionBackendServices().Get(ctx, &k)
} else {
bs, err = c.BackendServices().Get(ctx, &k)
}
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("BackendService %s is not deleted/error to get: %s", k.Name, err)
}
} else {
if options != nil && options.SkipDefaultBackend {
desc := utils.DescriptionFromString(bs.Description)
if desc.ServiceName == fmt.Sprintf("%s/%s", kubeSystemNS, defaultHTTPBackend) {
continue
}
}
resources = append(resources, k)
}
}
for k := range g.NetworkEndpointGroup {
ns, err := c.BetaNetworkEndpointGroups().Get(ctx, &k)
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("NetworkEndpointGroup %s is not deleted/error to get: %s", k.Name, err)
}
} else {
// TODO(smatti): Add NEG description to make this less error prone.
// This is to ensure that ILB tests that use NEGs are not blocked on default NEG deletion.
// Also, the default NEG may not get recognized here if default http backend name is changed
// to cause truncation.
if options != nil && options.SkipDefaultBackend &&
strings.Contains(ns.Name, fmt.Sprintf("%s-%s", kubeSystemNS, defaultHTTPBackend)) {
continue
}
resources = append(resources, k)
}
}
}
if len(resources) != 0 {
var s []string
for _, r := range resources {
s = append(s, r.String())
}
return fmt.Errorf("resources still exist (%s)", strings.Join(s, ", "))
}
return nil
}
// CheckResourceDeletionByProtocol checks the existence of the resources for given protocol.
// Returns nil if all of the associated frontend resources no longer exist.
func (g *GCLB) CheckResourceDeletionByProtocol(ctx context.Context, c cloud.Cloud, options *GCLBDeleteOptions, protocol Protocol) error {
var resources []meta.Key
for k, gfr := range g.ForwardingRule {
// Check if forwarding rule matches given protocol.
if gfrProtocol, err := getForwardingRuleProtocol(gfr.GA); err != nil {
return err
} else if gfrProtocol != protocol {
continue
}
var err error
if k.Region != "" {
_, err = c.ForwardingRules().Get(ctx, &k)
} else {
_, err = c.GlobalForwardingRules().Get(ctx, &k)
}
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("ForwardingRule %s is not deleted/error to get: %s", k.Name, err)
}
} else {
resources = append(resources, k)
}
}
switch protocol {
case HttpProtocol:
for k := range g.TargetHTTPProxy {
_, err := c.TargetHttpProxies().Get(ctx, &k)
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("TargetHTTPProxy %s is not deleted/error to get: %s", k.Name, err)
}
} else {
resources = append(resources, k)
}
}
case HttpsProtocol:
for k := range g.TargetHTTPSProxy {
_, err := c.TargetHttpsProxies().Get(ctx, &k)
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return fmt.Errorf("TargetHTTPSProxy %s is not deleted/error to get: %s", k.Name, err)
}
} else {
resources = append(resources, k)
}
}
default:
return fmt.Errorf("invalid protocol %q", protocol)
}
if len(resources) != 0 {
var s []string
for _, r := range resources {
s = append(s, r.String())
}
return fmt.Errorf("resources still exist (%s)", strings.Join(s, ", "))
}
return nil
}
// getForwardingRuleProtocol returns the protocol for given forwarding rule.
func getForwardingRuleProtocol(forwardingRule *compute.ForwardingRule) (Protocol, error) {
resID, err := cloud.ParseResourceURL(forwardingRule.Target)
if err != nil {
return "", fmt.Errorf("error parsing Target (%q): %v", forwardingRule.Target, err)
}
switch resID.Resource {
case targetHTTPProxyResource:
return HttpProtocol, nil
case targetHTTPSProxyResource:
return HttpsProtocol, nil
default:
return "", fmt.Errorf("unhandled resource %q", resID.Resource)
}
}
// CheckNEGDeletion checks that all NEGs associated with the GCLB have been deleted
func (g *GCLB) CheckNEGDeletion(ctx context.Context, c cloud.Cloud, options *GCLBDeleteOptions) error {
var resources []meta.Key
for k := range g.NetworkEndpointGroup {
_, err := c.BetaNetworkEndpointGroups().Get(ctx, &k)
if err != nil {
if err.(*googleapi.Error) == nil || err.(*googleapi.Error).Code != http.StatusNotFound {
return err
}
} else {
resources = append(resources, k)
}
}
if len(resources) != 0 {
var s []string
for _, r := range resources {
s = append(s, r.String())
}
return fmt.Errorf("NEGs still exist (%s)", strings.Join(s, ", "))
}
return nil
}
func hasAlphaResource(resourceType string, validators []FeatureValidator) bool {
for _, val := range validators {
if val.HasAlphaResource(resourceType) {
return true
}
}
return false
}
func hasBetaResource(resourceType string, validators []FeatureValidator) bool {
for _, val := range validators {
if val.HasBetaResource(resourceType) {
return true
}
}
return false
}
type GCLBForVIPParams struct {
VIP string
Region string
Network string
Validators []FeatureValidator
}
// GCLBForVIP retrieves all of the resources associated with the GCLB for a given VIP.
func GCLBForVIP(ctx context.Context, c cloud.Cloud, params *GCLBForVIPParams) (*GCLB, error) |
// GCLBForVIP retrieves all of the resources associated with the GCLB for a given VIP.
func RegionalGCLBForVIP(ctx context.Context, c cloud.Cloud, gclb *GCLB, params *GCLBForVIPParams) error {
allRFRs, err := c.ForwardingRules().List(ctx, params.Region, filter.None)
if err != nil {
klog.Warningf("Error listing forwarding rules: %v", err)
return err
}
var rfrs []*compute.ForwardingRule
for _, rfr := range allRFRs {
netResID, err := cloud.ParseResourceURL(rfr.Network)
if err != nil {
klog.Warningf("Error parsing Network (%q): %v", rfr.Network, err)
return err
}
if rfr.IPAddress == params.VIP && netResID.Key.Name == params.Network {
rfrs = append(rfrs, rfr)
}
}
if len(rfrs) == 0 {
klog.Warningf("No regional forwarding rules found, can't get all GCLB resources")
return nil
}
var urlMapKey *meta.Key
for _, rfr := range rfrs {
frKey := meta.RegionalKey(rfr.Name, params.Region)
gclb.ForwardingRule[*frKey] = &ForwardingRule{GA: rfr}
if hasAlphaResource("forwardingRule", params.Validators) {
fr, err := c.AlphaForwardingRules().Get(ctx, frKey)
if err != nil {
klog.Warningf("Error getting alpha forwarding rules: %v", err)
return err
}
gclb.ForwardingRule[*frKey].Alpha = fr
}
if hasBetaResource("forwardingRule", params.Validators) {
fr, err := c.BetaForwardingRules().Get(ctx, frKey)
if err != nil {
klog.Warningf("Error getting alpha forwarding rules: %v", err)
return err
}
gclb.ForwardingRule[*frKey].Beta = fr
}
// ForwardingRule => TargetProxy
resID, err := cloud.ParseResourceURL(rfr.Target)
if err != nil {
klog.Warningf("Error parsing Target (%q): %v", rfr.Target, err)
return err
}
switch resID.Resource {
case "targetHttpProxies":
// Use beta by default since not GA yet
p, err := c.BetaRegionTargetHttpProxies().Get(ctx, resID.Key)
if err != nil {
klog.Warningf("Error getting TargetHttpProxy %s: %v", resID.Key, err)
return err
}
gclb.TargetHTTPProxy[*resID.Key] = &TargetHTTPProxy{Beta: p}
if hasAlphaResource("targetHttpProxy", params.Validators) || hasBetaResource("targetHttpProxy", params.Validators) {
return errors.New("unsupported targetHttpProxy version")
}
urlMapResID, err := cloud.ParseResourceURL(p.UrlMap)
if err != nil {
klog.Warningf("Error parsing urlmap URL (%q): %v", p.UrlMap, err)
return err
}
if urlMapKey == nil {
urlMapKey = urlMapResID.Key
}
if *urlMapKey != *urlMapResID.Key {
klog.Warningf("Error targetHttpProxy references are not the same (%s != %s)", *urlMapKey, *urlMapResID.Key)
return fmt.Errorf("targetHttpProxy references are not the same: %+v != %+v", *urlMapKey, *urlMapResID.Key)
}
case "targetHttpsProxies":
// Use Beta by default since not GA yet
p, err := c.BetaRegionTargetHttpsProxies().Get(ctx, resID.Key)
if err != nil {
klog.Warningf("Error getting targetHttpsProxy (%s): %v", resID.Key, err)
return err
}
gclb.TargetHTTPSProxy[*resID.Key] = &TargetHTTPSProxy{Beta: p}
if hasAlphaResource("targetHttpsProxy", params.Validators) || hasBetaResource("targetHttpsProxy", params.Validators) {
return errors.New("unsupported targetHttpsProxy version")
}
urlMapResID, err := cloud.ParseResourceURL(p.UrlMap)
if err != nil {
klog.Warningf("Error parsing urlmap URL (%q): %v", p.UrlMap, err)
return err
}
if urlMapKey == nil {
urlMapKey = urlMapResID.Key
}
if *urlMapKey != *urlMapResID.Key {
klog.Warningf("Error targetHttpsProxy references are not the same (%s != %s)", *urlMapKey, *urlMapResID.Key)
return fmt.Errorf("targetHttpsProxy references are not the same: %+v != %+v", *urlMapKey, *urlMapResID.Key)
}
default:
klog.Errorf("Unhandled resource: %q, grf = %+v", resID.Resource, rfr)
return fmt.Errorf("unhandled resource %q", resID.Resource)
}
}
// TargetProxy => URLMap
// Use beta since params.Region is not GA yet
urlMap, err := c.BetaRegionUrlMaps().Get(ctx, urlMapKey)
if err != nil {
return err
}
gclb.URLMap[*urlMapKey] = &URLMap{Beta: urlMap}
if hasAlphaResource("urlMap", params.Validators) || hasBetaResource("urlMap", params.Validators) {
return errors.New("unsupported urlMap version")
}
// URLMap => BackendService(s)
var bsKeys []*meta.Key
resID, err := cloud.ParseResourceURL(urlMap.DefaultService)
if err != nil {
return err
}
bsKeys = append(bsKeys, resID.Key)
for _, pm := range urlMap.PathMatchers {
resID, err := cloud.ParseResourceURL(pm.DefaultService)
if err != nil {
return err
}
bsKeys = append(bsKeys, resID.Key)
for _, pr := range pm.PathRules {
resID, err := cloud.ParseResourceURL(pr.Service)
if err != nil {
return err
}
bsKeys = append(bsKeys, resID.Key)
}
}
for _, bsKey := range bsKeys {
bs, err := c.RegionBackendServices().Get(ctx, bsKey)
if err != nil {
return err
}
gclb.BackendService[*bsKey] = &BackendService{GA: bs}
if hasAlphaResource("backendService", params.Validators) {
bs, err := c.AlphaRegionBackendServices().Get(ctx, bsKey)
if err != nil {
return err
}
gclb.BackendService[*bsKey].Alpha = bs
}
if hasBetaResource("backendService", params.Validators) {
bs, err := c.BetaRegionBackendServices().Get(ctx, bsKey)
if err != nil {
return err
}
gclb.BackendService[*bsKey].Beta = bs
}
for _, hcURL := range bs.HealthChecks {
rID, err := cloud.ParseResourceURL(hcURL)
if err != nil {
return err
}
hc, err := c.RegionHealthChecks().Get(ctx, rID.Key)
if err != nil {
return err
}
gclb.HealthCheck[*rID.Key] = &HealthCheck{
GA: hc,
}
}
}
var negKeys []*meta.Key
var igKeys []*meta.Key
// Fetch NEG Backends
for _, bsKey := range bsKeys {
var beGroups []string
if hasAlphaResource("backendService", params.Validators) {
bs, err := c.AlphaRegionBackendServices().Get(ctx, bsKey)
if err != nil {
return err
}
for _, be := range bs.Backends {
beGroups = append(beGroups, be.Group)
}
} else {
bs, err := c.BetaRegionBackendServices().Get(ctx, bsKey)
if err != nil {
return err
}
for _, be := range bs.Backends {
beGroups = append(beGroups, be.Group)
}
}
for _, group := range beGroups {
if strings.Contains(group, NegResourceType) {
resourceId, err := cloud.ParseResourceURL(group)
if err != nil {
return err
}
negKeys = append(negKeys, resourceId.Key)
}
if strings.Contains(group, IgResourceType) {
resourceId, err := cloud.ParseResourceURL(group)
if err != nil {
return err
}
igKeys = append(igKeys, resourceId.Key)
}
}
}
for _, negKey := range negKeys {
neg, err := c.NetworkEndpointGroups().Get(ctx, negKey)
if err != nil {
return err
}
gclb.NetworkEndpointGroup[*negKey] = &NetworkEndpointGroup{GA: neg}
if hasAlphaResource(NegResourceType, params.Validators) {
neg, err := c.AlphaNetworkEndpointGroups().Get(ctx, negKey)
if err != nil {
return err
}
gclb.NetworkEndpointGroup[*negKey].Alpha = neg
}
if hasBetaResource(NegResourceType, params.Validators) {
neg, err := c.BetaNetworkEndpointGroups().Get(ctx, negKey)
if err != nil {
return err
}
gclb.NetworkEndpointGroup[*negKey].Beta = neg
}
}
for _, igKey := range igKeys {
ig, err := c.InstanceGroups().Get(ctx, igKey)
if err != nil {
return err
}
gclb.InstanceGroup[*igKey] = &InstanceGroup{GA: ig}
}
return err
}
// NetworkEndpointsInNegs retrieves the network Endpoints from NEGs with one name in multiple zones
func NetworkEndpointsInNegs(ctx context.Context, c cloud.Cloud, name string, zones []string) (map[meta.Key]*NetworkEndpoints, error) {
ret := map[meta.Key]*NetworkEndpoints{}
for _, zone := range zones {
key := meta.ZonalKey(name, zone)
neg, err := c.NetworkEndpointGroups().Get(ctx, key)
if err != nil {
return nil, err
}
networkEndpoints := &NetworkEndpoints{
NEG: neg,
}
nes, err := c.NetworkEndpointGroups().ListNetworkEndpoints(ctx, key, &compute.NetworkEndpointGroupsListEndpointsRequest{HealthStatus: "SHOW"}, nil)
if err != nil {
return nil, err
}
networkEndpoints.Endpoints = nes
ret[*key] = networkEndpoints
}
return ret, nil
}
| {
gclb := NewGCLB(params.VIP)
if params.Region != "" {
err := RegionalGCLBForVIP(ctx, c, gclb, params)
return gclb, err
}
allGFRs, err := c.GlobalForwardingRules().List(ctx, filter.None)
if err != nil {
klog.Warningf("Error listing forwarding rules: %v", err)
return nil, err
}
var gfrs []*compute.ForwardingRule
for _, gfr := range allGFRs {
if gfr.IPAddress == params.VIP {
gfrs = append(gfrs, gfr)
}
}
// Return immediately if there are no forwarding rules exist.
if len(gfrs) == 0 {
klog.Warningf("No global forwarding rules found, can't get all GCLB resources")
return gclb, nil
}
var urlMapKey *meta.Key
for _, gfr := range gfrs {
frKey := meta.GlobalKey(gfr.Name)
gclb.ForwardingRule[*frKey] = &ForwardingRule{GA: gfr}
if hasAlphaResource("forwardingRule", params.Validators) {
fr, err := c.AlphaForwardingRules().Get(ctx, frKey)
if err != nil {
klog.Warningf("Error getting alpha forwarding rules: %v", err)
return nil, err
}
gclb.ForwardingRule[*frKey].Alpha = fr
}
if hasBetaResource("forwardingRule", params.Validators) {
return nil, errors.New("unsupported forwardingRule version")
}
// ForwardingRule => TargetProxy
resID, err := cloud.ParseResourceURL(gfr.Target)
if err != nil {
klog.Warningf("Error parsing Target (%q): %v", gfr.Target, err)
return nil, err
}
switch resID.Resource {
case targetHTTPProxyResource:
p, err := c.TargetHttpProxies().Get(ctx, resID.Key)
if err != nil {
klog.Warningf("Error getting TargetHttpProxy %s: %v", resID.Key, err)
return nil, err
}
gclb.TargetHTTPProxy[*resID.Key] = &TargetHTTPProxy{GA: p}
if hasAlphaResource("targetHttpProxy", params.Validators) || hasBetaResource("targetHttpProxy", params.Validators) {
return nil, errors.New("unsupported targetHttpProxy version")
}
urlMapResID, err := cloud.ParseResourceURL(p.UrlMap)
if err != nil {
klog.Warningf("Error parsing urlmap URL (%q): %v", p.UrlMap, err)
return nil, err
}
if urlMapKey == nil {
urlMapKey = urlMapResID.Key
}
if *urlMapKey != *urlMapResID.Key {
klog.Warningf("Error targetHttpProxy references are not the same (%s != %s)", *urlMapKey, *urlMapResID.Key)
return nil, fmt.Errorf("targetHttpProxy references are not the same: %+v != %+v", *urlMapKey, *urlMapResID.Key)
}
case targetHTTPSProxyResource:
p, err := c.TargetHttpsProxies().Get(ctx, resID.Key)
if err != nil {
klog.Warningf("Error getting targetHttpsProxy (%s): %v", resID.Key, err)
return nil, err
}
gclb.TargetHTTPSProxy[*resID.Key] = &TargetHTTPSProxy{GA: p}
if hasAlphaResource("targetHttpsProxy", params.Validators) || hasBetaResource("targetHttpsProxy", params.Validators) {
return nil, errors.New("unsupported targetHttpsProxy version")
}
urlMapResID, err := cloud.ParseResourceURL(p.UrlMap)
if err != nil {
klog.Warningf("Error parsing urlmap URL (%q): %v", p.UrlMap, err)
return nil, err
}
if urlMapKey == nil {
urlMapKey = urlMapResID.Key
}
if *urlMapKey != *urlMapResID.Key {
klog.Warningf("Error targetHttpsProxy references are not the same (%s != %s)", *urlMapKey, *urlMapResID.Key)
return nil, fmt.Errorf("targetHttpsProxy references are not the same: %+v != %+v", *urlMapKey, *urlMapResID.Key)
}
default:
klog.Errorf("Unhandled resource: %q, grf = %+v", resID.Resource, gfr)
return nil, fmt.Errorf("unhandled resource %q", resID.Resource)
}
}
// TargetProxy => URLMap
urlMap, err := c.UrlMaps().Get(ctx, urlMapKey)
if err != nil {
return nil, err
}
gclb.URLMap[*urlMapKey] = &URLMap{GA: urlMap}
if hasAlphaResource("urlMap", params.Validators) || hasBetaResource("urlMap", params.Validators) {
return nil, errors.New("unsupported urlMap version")
}
// URLMap => BackendService(s)
var bsKeys []*meta.Key
resID, err := cloud.ParseResourceURL(urlMap.DefaultService)
if err != nil {
return nil, err
}
bsKeys = append(bsKeys, resID.Key)
for _, pm := range urlMap.PathMatchers {
resID, err := cloud.ParseResourceURL(pm.DefaultService)
if err != nil {
return nil, err
}
bsKeys = append(bsKeys, resID.Key)
for _, pr := range pm.PathRules {
resID, err := cloud.ParseResourceURL(pr.Service)
if err != nil {
return nil, err
}
bsKeys = append(bsKeys, resID.Key)
}
}
for _, bsKey := range bsKeys {
bs, err := c.BackendServices().Get(ctx, bsKey)
if err != nil {
return nil, err
}
gclb.BackendService[*bsKey] = &BackendService{GA: bs}
if hasAlphaResource("backendService", params.Validators) {
bs, err := c.AlphaBackendServices().Get(ctx, bsKey)
if err != nil {
return nil, err
}
gclb.BackendService[*bsKey].Alpha = bs
}
if hasBetaResource("backendService", params.Validators) {
bs, err := c.BetaBackendServices().Get(ctx, bsKey)
if err != nil {
return nil, err
}
gclb.BackendService[*bsKey].Beta = bs
}
for _, hcURL := range bs.HealthChecks {
rID, err := cloud.ParseResourceURL(hcURL)
if err != nil {
return nil, err
}
hc, err := c.HealthChecks().Get(ctx, rID.Key)
if err != nil {
return nil, err
}
gclb.HealthCheck[*rID.Key] = &HealthCheck{
GA: hc,
}
}
}
var negKeys []*meta.Key
var igKeys []*meta.Key
// Fetch NEG Backends
for _, bsKey := range bsKeys {
var beGroups []string
if hasAlphaResource("backendService", params.Validators) {
bs, err := c.AlphaBackendServices().Get(ctx, bsKey)
if err != nil {
return nil, err
}
for _, be := range bs.Backends {
beGroups = append(beGroups, be.Group)
}
} else {
bs, err := c.BetaBackendServices().Get(ctx, bsKey)
if err != nil {
return nil, err
}
for _, be := range bs.Backends {
beGroups = append(beGroups, be.Group)
}
}
for _, group := range beGroups {
if strings.Contains(group, NegResourceType) {
resourceId, err := cloud.ParseResourceURL(group)
if err != nil {
return nil, err
}
negKeys = append(negKeys, resourceId.Key)
}
if strings.Contains(group, IgResourceType) {
resourceId, err := cloud.ParseResourceURL(group)
if err != nil {
return nil, err
}
igKeys = append(igKeys, resourceId.Key)
}
}
}
for _, negKey := range negKeys {
neg, err := c.NetworkEndpointGroups().Get(ctx, negKey)
if err != nil {
return nil, err
}
gclb.NetworkEndpointGroup[*negKey] = &NetworkEndpointGroup{GA: neg}
if hasAlphaResource(NegResourceType, params.Validators) {
neg, err := c.AlphaNetworkEndpointGroups().Get(ctx, negKey)
if err != nil {
return nil, err
}
gclb.NetworkEndpointGroup[*negKey].Alpha = neg
}
if hasBetaResource(NegResourceType, params.Validators) {
neg, err := c.BetaNetworkEndpointGroups().Get(ctx, negKey)
if err != nil {
return nil, err
}
gclb.NetworkEndpointGroup[*negKey].Beta = neg
}
}
for _, igKey := range igKeys {
ig, err := c.InstanceGroups().Get(ctx, igKey)
if err != nil {
return nil, err
}
gclb.InstanceGroup[*igKey] = &InstanceGroup{GA: ig}
}
return gclb, err
} |
backend.go | package backend
import (
"context"
"fmt"
"os"
"runtime/debug"
"sync"
"time"
"github.com/coreos/etcd/clientv3"
"github.com/coreos/etcd/pkg/transport"
"github.com/google/uuid"
corev2 "github.com/sensu/sensu-go/api/core/v2"
"github.com/sensu/sensu-go/asset"
"github.com/sensu/sensu-go/backend/agentd"
"github.com/sensu/sensu-go/backend/api"
"github.com/sensu/sensu-go/backend/apid"
"github.com/sensu/sensu-go/backend/apid/actions"
"github.com/sensu/sensu-go/backend/apid/graphql"
"github.com/sensu/sensu-go/backend/authentication"
"github.com/sensu/sensu-go/backend/authentication/jwt"
"github.com/sensu/sensu-go/backend/authentication/providers/basic"
"github.com/sensu/sensu-go/backend/authorization/rbac"
"github.com/sensu/sensu-go/backend/daemon"
"github.com/sensu/sensu-go/backend/dashboardd"
"github.com/sensu/sensu-go/backend/etcd"
"github.com/sensu/sensu-go/backend/eventd"
"github.com/sensu/sensu-go/backend/keepalived"
"github.com/sensu/sensu-go/backend/liveness"
"github.com/sensu/sensu-go/backend/messaging"
"github.com/sensu/sensu-go/backend/pipelined"
"github.com/sensu/sensu-go/backend/queue"
"github.com/sensu/sensu-go/backend/ringv2"
"github.com/sensu/sensu-go/backend/schedulerd"
"github.com/sensu/sensu-go/backend/store"
etcdstore "github.com/sensu/sensu-go/backend/store/etcd"
"github.com/sensu/sensu-go/backend/tessend"
"github.com/sensu/sensu-go/rpc"
"github.com/sensu/sensu-go/system"
"github.com/sensu/sensu-go/types"
"github.com/spf13/viper"
)
// Backend represents the backend server, which is used to hold the datastore
// and coordinating the daemons
type Backend struct {
Client *clientv3.Client
Daemons []daemon.Daemon
Etcd *etcd.Etcd
Store store.Store
EventStore EventStoreUpdater
GraphQLService *graphql.Service
done chan struct{}
ctx context.Context
cancel context.CancelFunc
}
// EventStoreUpdater offers a way to update an event store to a different
// implementation in-place.
type EventStoreUpdater interface {
UpdateEventStore(to store.EventStore)
}
func newClient(config *Config, backend *Backend) (*clientv3.Client, error) {
if config.NoEmbedEtcd {
tlsInfo := (transport.TLSInfo)(config.EtcdClientTLSInfo)
tlsConfig, err := tlsInfo.ClientConfig()
if err != nil {
return nil, err
}
clientURLs := config.EtcdClientURLs
if len(clientURLs) == 0 {
clientURLs = config.EtcdAdvertiseClientURLs
}
// Don't start up an embedded etcd, return a client that connects to an
// external etcd instead.
return clientv3.New(clientv3.Config{
Endpoints: clientURLs,
DialTimeout: 5 * time.Second,
TLS: tlsConfig,
})
}
// Initialize and start etcd, because we'll need to provide an etcd client to
// the Wizard bus, which requires etcd to be started.
cfg := etcd.NewConfig()
cfg.DataDir = config.StateDir
cfg.ListenClientURLs = config.EtcdListenClientURLs
cfg.ListenPeerURLs = config.EtcdListenPeerURLs
cfg.InitialCluster = config.EtcdInitialCluster
cfg.InitialClusterState = config.EtcdInitialClusterState
cfg.InitialAdvertisePeerURLs = config.EtcdInitialAdvertisePeerURLs
cfg.AdvertiseClientURLs = config.EtcdAdvertiseClientURLs
cfg.Discovery = config.EtcdDiscovery
cfg.DiscoverySrv = config.EtcdDiscoverySrv
cfg.Name = config.EtcdName
// Heartbeat interval
if config.EtcdHeartbeatInterval > 0 {
cfg.TickMs = config.EtcdHeartbeatInterval
}
// Election timeout
if config.EtcdElectionTimeout > 0 {
cfg.ElectionMs = config.EtcdElectionTimeout
}
// Etcd TLS config
cfg.ClientTLSInfo = config.EtcdClientTLSInfo
cfg.PeerTLSInfo = config.EtcdPeerTLSInfo
cfg.CipherSuites = config.EtcdCipherSuites
if config.EtcdQuotaBackendBytes != 0 {
cfg.QuotaBackendBytes = config.EtcdQuotaBackendBytes
}
if config.EtcdMaxRequestBytes != 0 {
cfg.MaxRequestBytes = config.EtcdMaxRequestBytes
}
// Start etcd
e, err := etcd.NewEtcd(cfg)
if err != nil {
return nil, fmt.Errorf("error starting etcd: %s", err)
}
backend.Etcd = e
// Create an etcd client
return e.NewClient()
}
// Initialize instantiates a Backend struct with the provided config, by
// configuring etcd and establishing a list of daemons, which constitute our
// backend. The daemons will later be started according to their position in the
// b.Daemons list, and stopped in reverse order
func Initialize(config *Config) (*Backend, error) {
var err error
// Initialize a Backend struct
b := &Backend{}
b.done = make(chan struct{})
b.ctx, b.cancel = context.WithCancel(context.Background())
b.Client, err = newClient(config, b)
if err != nil {
return nil, err
}
// Create the store, which lives on top of etcd
stor := etcdstore.NewStore(b.Client, config.EtcdName)
b.Store = stor
if _, err := stor.GetClusterID(b.ctx); err != nil {
switch err := err.(type) {
case *store.ErrNotFound:
if storeErr := stor.CreateClusterID(b.ctx, uuid.New().String()); storeErr != nil {
return nil, fmt.Errorf("error assigning a sensu cluster id: %s", err)
}
default:
return nil, fmt.Errorf("error retrieving sensu cluster id: %s", err)
}
}
eventStoreProxy := store.NewEventStoreProxy(stor)
b.EventStore = eventStoreProxy
logger.Debug("Registering backend...")
backendID := etcd.NewBackendIDGetter(b.ctx, b.Client)
logger.Debug("Done registering backend.")
// Initialize an etcd getter
queueGetter := queue.EtcdGetter{Client: b.Client, BackendIDGetter: backendID}
// Initialize the bus
bus, err := messaging.NewWizardBus(messaging.WizardBusConfig{})
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", bus.Name(), err)
}
b.Daemons = append(b.Daemons, bus)
// Initialize asset manager
backendEntity := b.getBackendEntity(config)
logger.WithField("entity", backendEntity).Info("backend entity information")
assetManager := asset.NewManager(config.CacheDir, backendEntity, &sync.WaitGroup{})
assetGetter, err := assetManager.StartAssetManager(b.ctx)
if err != nil {
return nil, fmt.Errorf("error initializing asset manager: %s", err)
}
// Initialize pipelined
pipeline, err := pipelined.New(pipelined.Config{
Store: stor,
Bus: bus,
ExtensionExecutorGetter: rpc.NewGRPCExtensionExecutor,
AssetGetter: assetGetter,
BufferSize: viper.GetInt(FlagPipelinedBufferSize),
WorkerCount: viper.GetInt(FlagPipelinedWorkers),
})
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", pipeline.Name(), err)
}
b.Daemons = append(b.Daemons, pipeline)
// Initialize eventd
event, err := eventd.New(
b.ctx,
eventd.Config{
Store: stor,
EventStore: eventStoreProxy,
Bus: bus,
LivenessFactory: liveness.EtcdFactory(b.ctx, b.Client),
Client: b.Client,
BufferSize: viper.GetInt(FlagEventdBufferSize),
WorkerCount: viper.GetInt(FlagEventdWorkers),
},
)
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", event.Name(), err)
}
b.Daemons = append(b.Daemons, event)
ringPool := ringv2.NewPool(b.Client)
// Initialize schedulerd
scheduler, err := schedulerd.New(
b.ctx,
schedulerd.Config{
Store: stor,
Bus: bus,
QueueGetter: queueGetter,
RingPool: ringPool,
Client: b.Client,
})
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", scheduler.Name(), err)
}
b.Daemons = append(b.Daemons, scheduler)
// Use the common TLS flags for agentd if wasn't explicitely configured with
// its own TLS configuration
if config.TLS != nil && config.AgentTLSOptions == nil {
config.AgentTLSOptions = config.TLS
}
// Initialize agentd
agent, err := agentd.New(agentd.Config{
Host: config.AgentHost,
Port: config.AgentPort,
Bus: bus,
Store: stor,
TLS: config.AgentTLSOptions,
RingPool: ringPool,
WriteTimeout: config.AgentWriteTimeout,
})
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", agent.Name(), err)
}
b.Daemons = append(b.Daemons, agent)
// Initialize keepalived
keepalive, err := keepalived.New(keepalived.Config{
DeregistrationHandler: config.DeregistrationHandler,
Bus: bus,
Store: stor,
EventStore: stor,
LivenessFactory: liveness.EtcdFactory(b.ctx, b.Client),
RingPool: ringPool,
BufferSize: viper.GetInt(FlagKeepalivedBufferSize),
WorkerCount: viper.GetInt(FlagKeepalivedWorkers),
})
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", keepalive.Name(), err)
}
b.Daemons = append(b.Daemons, keepalive)
// Prepare the etcd client TLS config
etcdClientTLSInfo := (transport.TLSInfo)(config.EtcdClientTLSInfo)
etcdClientTLSConfig, err := etcdClientTLSInfo.ClientConfig()
if err != nil {
return nil, err
}
// Prepare the authentication providers
authenticator := &authentication.Authenticator{}
basic := &basic.Provider{
ObjectMeta: corev2.ObjectMeta{Name: basic.Type},
Store: stor,
}
authenticator.AddProvider(basic)
var clusterVersion string
// only retrieve the cluster version if etcd is embedded
if !config.NoEmbedEtcd {
clusterVersion = b.Etcd.GetClusterVersion()
}
// Load the JWT key pair
if err := jwt.LoadKeyPair(viper.GetString(FlagJWTPrivateKeyFile), viper.GetString(FlagJWTPublicKeyFile)); err != nil {
logger.WithError(err).Error("could not load the key pair for the JWT signature")
}
// Initialize GraphQL service
auth := &rbac.Authorizer{Store: stor}
b.GraphQLService, err = graphql.NewService(graphql.ServiceConfig{
AssetClient: api.NewAssetClient(stor, auth),
CheckClient: api.NewCheckClient(stor, actions.NewCheckController(stor, queueGetter), auth),
EntityClient: api.NewEntityClient(stor, eventStoreProxy, auth),
EventClient: api.NewEventClient(eventStoreProxy, auth, bus),
EventFilterClient: api.NewEventFilterClient(stor, auth),
HandlerClient: api.NewHandlerClient(stor, auth),
HealthController: actions.NewHealthController(stor, b.Client.Cluster, etcdClientTLSConfig),
MutatorClient: api.NewMutatorClient(stor, auth),
SilencedClient: api.NewSilencedClient(stor, auth),
NamespaceClient: api.NewNamespaceClient(stor, auth),
HookClient: api.NewHookConfigClient(stor, auth),
UserClient: api.NewUserClient(stor, auth),
RBACClient: api.NewRBACClient(stor, auth),
VersionController: actions.NewVersionController(clusterVersion),
GenericClient: &api.GenericClient{Store: stor, Auth: auth},
})
if err != nil {
return nil, fmt.Errorf("error initializing graphql.Service: %s", err)
}
// Initialize apid
apidConfig := apid.Config{
ListenAddress: config.APIListenAddress,
URL: config.APIURL,
Bus: bus,
Store: stor,
EventStore: eventStoreProxy,
QueueGetter: queueGetter,
TLS: config.TLS,
Cluster: b.Client.Cluster,
EtcdClientTLSConfig: etcdClientTLSConfig,
Authenticator: authenticator,
ClusterVersion: clusterVersion,
GraphQLService: b.GraphQLService,
}
api, err := apid.New(apidConfig)
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", api.Name(), err)
}
b.Daemons = append(b.Daemons, api)
// Initialize tessend
tessen, err := tessend.New(
b.ctx,
tessend.Config{
Store: stor,
RingPool: ringPool,
Client: b.Client,
Bus: bus,
})
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", tessen.Name(), err)
}
b.Daemons = append(b.Daemons, tessen)
// Initialize dashboardd TLS config
var dashboardTLSConfig *types.TLSOptions
// Always use dashboard tls options when they are specified
if config.DashboardTLSCertFile != "" && config.DashboardTLSKeyFile != "" {
dashboardTLSConfig = &types.TLSOptions{
CertFile: config.DashboardTLSCertFile,
KeyFile: config.DashboardTLSKeyFile,
}
} else if config.TLS != nil {
// use apid tls config if no dashboard tls options are specified
dashboardTLSConfig = &types.TLSOptions{
CertFile: config.TLS.GetCertFile(),
KeyFile: config.TLS.GetKeyFile(),
}
}
dashboard, err := dashboardd.New(dashboardd.Config{
APIDConfig: apidConfig,
Host: config.DashboardHost,
Port: config.DashboardPort,
TLS: dashboardTLSConfig,
})
if err != nil {
return nil, fmt.Errorf("error initializing %s: %s", dashboard.Name(), err)
}
b.Daemons = append(b.Daemons, dashboard)
return b, nil
}
// Run starts all of the Backend server's daemons
func (b *Backend) Run() error {
eg := errGroup{
out: make(chan error),
}
sg := stopGroup{}
// Loop across the daemons in order to start them, then add them to our groups
for _, d := range b.Daemons {
if err := d.Start(); err != nil {
return fmt.Errorf("error starting %s: %s", d.Name(), err)
}
// Add the daemon to our errGroup
eg.errors = append(eg.errors, d)
// Add the daemon to our stopGroup
sg = append(sg, daemonStopper{
Name: d.Name(),
stopper: d,
})
}
// Reverse the order of our stopGroup so daemons are stopped in the proper
// order (last one started is first one stopped)
for i := len(sg)/2 - 1; i >= 0; i-- {
opp := len(sg) - 1 - i
sg[i], sg[opp] = sg[opp], sg[i]
}
if b.Etcd != nil {
// Add etcd to our errGroup, since it's not included in the daemon list
eg.errors = append(eg.errors, b.Etcd)
}
eg.Go()
select {
case err := <-eg.Err():
logger.WithError(err).Error("error in error group")
case <-b.ctx.Done():
logger.Info("backend shutting down")
}
var derr error
if err := sg.Stop(); err != nil {
if derr == nil {
derr = err
}
}
if b.Etcd != nil {
logger.Info("shutting down etcd")
defer func() {
if err := recover(); err != nil {
trace := string(debug.Stack())
logger.WithField("panic", trace).WithError(err.(error)).
Error("recovering from panic due to error, shutting down etcd")
}
err := b.Etcd.Shutdown()
if derr == nil {
derr = err
}
}()
}
// we allow inErrChan to leak to avoid panics from other
// goroutines writing errors to either after shutdown has been initiated.
close(b.done)
return derr
}
type stopper interface {
Stop() error
}
type daemonStopper struct {
stopper
Name string
}
type stopGroup []daemonStopper
func (s stopGroup) Stop() (err error) {
for _, stopper := range s {
logger.Info("shutting down ", stopper.Name)
e := stopper.Stop()
if err == nil {
err = e
}
}
return err
}
type errorer interface {
Err() <-chan error
}
type errGroup struct {
out chan error
errors []errorer
}
func (e errGroup) Go() {
for _, err := range e.errors {
err := err
go func() {
e.out <- <-err.Err()
}()
}
}
func (e errGroup) Err() <-chan error {
return e.out
}
// Stop the Backend cleanly.
func (b *Backend) Stop() {
b.cancel()
<-b.done
}
func (b *Backend) getBackendEntity(config *Config) *corev2.Entity {
entity := &corev2.Entity{
EntityClass: corev2.EntityBackendClass,
System: getSystemInfo(),
ObjectMeta: corev2.NewObjectMeta(getDefaultBackendID(), ""),
}
if config.DeregistrationHandler != "" {
entity.Deregistration = corev2.Deregistration{
Handler: config.DeregistrationHandler,
}
}
return entity
}
// getDefaultBackendID returns the default backend ID
func | () string {
defaultBackendID, err := os.Hostname()
if err != nil {
logger.WithError(err).Error("error getting hostname")
defaultBackendID = "unidentified-sensu-backend"
}
return defaultBackendID
}
// getSystemInfo returns the system info of the backend
func getSystemInfo() corev2.System {
info, err := system.Info()
if err != nil {
logger.WithError(err).Error("error getting system info")
}
return info
}
| getDefaultBackendID |
chunk-1b59.25ec408c.js | (window.webpackJsonp=window.webpackJsonp||[]).push([["chunk-1b59"],{"/Ark":function(e,t,a){"use strict";var i=a("6H6b");a.n(i).a},"6H6b":function(e,t,a){},Ew9n:function(e,t,a){"use strict";var i=a("7Qib"),n={name:"slide-view",components:{},computed:{},watch:{},data:function(){return{zIndex:Object(i.g)()}},props:{bodyStyle:{type:Object,default:{padding:0}},listenerIDs:{type:Array,default:function(){return[]}},noListenerIDs:{type:Array,default:function(){return[]}},noListenerClass:{type:Array,default:function(){return[]}},appendToBody:{type:Boolean,default:!1}},mounted:function(){var e=this;this.appendToBody&&document.body.appendChild(this.$el),this.listenerIDs.forEach(function(t){document.getElementById(t)&&document.getElementById(t).addEventListener("click",e.handleDocumentClick,!1)})},methods:{handleDocumentClick:function(e){var t=!0;this.noListenerIDs.forEach(function(a){document.getElementById(a)&&document.getElementById(a).contains(e.target)&&(t=!1)}),this.noListenerClass.forEach(function(a){var i=document.getElementsByClassName(a);if(i&&t)for(var n=0;n<i.length;n++){if(i[n].contains(e.target)){t=!1;break}}}),document.getElementById("slide")&&document.getElementById("slide").contains(e.target)&&(t=!1),t&&this.$emit("side-close")},afterEnter:function(){this.$emit("afterEnter")}},beforeDestroy:function(){this.appendToBody&&this.$el&&this.$el.parentNode&&this.$el.parentNode.removeChild(this.$el)}},s=(a("nhiu"),a("KHd+")),o=Object(s.a)(n,function(){var e=this.$createElement,t=this._self._c||e;return t("transition",{attrs:{name:"slide-fade"},on:{"after-enter":this.afterEnter}},[t("el-card",{ref:"slide",staticClass:"slide-detail-card-container",style:{"z-index":this.zIndex},attrs:{id:"slide","body-style":this.bodyStyle}},[this._t("default")],2)],1)},[],!1,null,"20831c94",null);o.options.__file="SlideView.vue";t.a=o.exports},HJld:function(e,t,a){"use strict";a.d(t,"a",function(){return n}),a.d(t,"d",function(){return s}),a.d(t,"e",function(){return o}),a.d(t,"b",function(){return r}),a.d(t,"c",function(){return l});var i=a("t3Un");function n(e){return Object(i.a)({url:"oa/examine/category",method:"post",data:e})}function s(e){return Object(i.a)({url:"oa/examine/categorySave",method:"post",data:e})}function | (e){return Object(i.a)({url:"oa/examine/categoryUpdate",method:"post",data:e})}function r(e){return Object(i.a)({url:"oa/examine/categoryDelete",method:"post",data:e})}function l(e){return Object(i.a)({url:"oa/examine/categoryEnables",method:"post",data:e})}},IPtX:function(e,t,a){"use strict";var i=a("OHqg");a.n(i).a},NJhl:function(e,t,a){"use strict";var i=a("yVnO");a.n(i).a},OHqg:function(e,t,a){},SfBd:function(e,t,a){"use strict";var i=a("jOSx");a.n(i).a},axw9:function(e,t,a){"use strict";var i=a("qIAI");a.n(i).a},ejzQ:function(e,t,a){},iNuc:function(e,t,a){"use strict";a.r(t);var i=a("HJld"),n=a("ViDN"),s=a("8GhS"),o=a("Ktth"),r=a.n(o),l={name:"create-examine-category",components:{CreateView:n.a,XhInput:s.g,XhTextarea:s.o,XhSelect:s.l,XhUserCell:s.p,XhStrucUserCell:s.m},computed:{title:function(){return"save"===this.handle.action?"新建审批类型":"update"===this.handle.action?"编辑审批类型":void 0}},data:function(){return{loading:!1,crmRules:{},crmForm:{crmFields:[]},currentPage:1,examineType:1,examineList:[{type:1,value:[],show:!1,options:[{name:"负责人主管",value:1},{name:"指定用户(任意一人)",value:2},{name:"指定用户(多人会签)",value:3}]}]}},filters:{typeToComponentName:function(e){return"text"==e?"XhInput":"textarea"==e?"XhTextarea":"select"==e?"XhSelect":"structure"==e?"XhStrucUserCell":void 0},numberToZh:function(e){return r.a.encodeS(e)}},props:{handle:{type:Object,default:function(){return{type:"examineflow",action:"save",id:"",data:null}}}},mounted:function(){if(document.body.appendChild(this.$el),this.getField(),this.handle.data)if(this.handle.data.config&&1===this.handle.data.config){this.examineList=[];for(var e=0;e<this.handle.data.stepList.length;e++){var t=this.handle.data.stepList[e],a={};a.type=t.status,2===t.status||3===t.status?(a.show=!0,a.value=t.user_id_info):(a.show=!1,a.value=[]),a.options=0===e?[{name:"负责人主管",value:1},{name:"指定用户(任意一人)",value:2},{name:"指定用户(多人会签)",value:3}]:[{name:"负责人主管",value:1},{name:"指定用户(任意一人)",value:2},{name:"指定用户(多人会签)",value:3},{name:"上一级审批人主管",value:4}],this.examineList.push(a)}}else this.examineType=0},methods:{fieldValueChange:function(e){this.crmForm.crmFields[e.index].value=e.value},getField:function(){var e=[];e.push({field:"title",form_type:"text",is_null:1,name:"审批类型名称",setting:[],input_tips:"",value:this.handle.data?this.handle.data.title:""}),e.push({field:"structure",form_type:"structure",is_null:0,name:"可视范围",setting:[],input_tips:"默认全公司",value:{users:this.handle.data?this.handle.data.user_ids_info:[],strucs:this.handle.data?this.handle.data.structure_ids_info:[]}}),e.push({field:"remark",form_type:"textarea",is_null:0,name:"审批类型说明",setting:[],input_tips:"",value:this.handle.data?this.handle.data.remark:""}),this.getcrmRulesAndModel(e)},getcrmRulesAndModel:function(e){for(var t=0;t<e.length;t++){var a=e[t],i=[];1==a.is_null&&i.push({required:!0,message:a.name+"不能为空",trigger:["blur","change"]}),this.crmRules[a.field]=i;var n={};n.value=a.value,n.key=a.field,n.data=a,"textarea"==a.form_type&&(n.showblock=!0),this.crmForm.crmFields.push(n)}},saveField:function(){var e=this;this.$refs.crmForm.validate(function(t){if(!t)return e.$message.error("请完善必填信息"),!1;e.validStepsInfo()&&e.submiteParams(e.crmForm.crmFields)})},validStepsInfo:function(){for(var e=0;e<this.examineList.length;e++){var t=this.examineList[e];if((2===t.type||3===t.type)&&0===t.value.length)return this.$message.error("请添加员工"),!1}return!0},submiteParams:function(e){var t=this;this.loading=!0;var a=this.getSubmiteParams(e);"update"==this.handle.action&&(a.id=this.handle.id),this.getRequest()(a).then(function(e){t.loading=!1,t.$emit("save"),"save"==t.handle.action?t.$confirm("您将继续完成审批表单的创建","创建成功",{showCancelButton:!1,confirmButtonText:"确定",type:"warning"}).then(function(){t.hidenView(),t.$router.push({name:"handlefield",params:{type:"oa_examine",id:e.data.category_id}})}).catch(function(){}):(t.$message.success(e.data),t.hidenView())}).catch(function(){t.loading=!1})},getRequest:function(){return"update"===this.handle.action?i.e:i.d},getSubmiteParams:function(e){for(var t={},a=0;a<e.length;a++){var i=e[a];"structure"===i.key?(t.user_ids=i.value.users.map(function(e){return e.id}),t.structure_ids=i.value.strucs.map(function(e){return e.id})):t[i.key]=i.value}for(var n=[],s=0;s<this.examineList.length;s++){var o=this.examineList[s];n.push({status:o.type,user_id:o.value.map(function(e){return e.id})})}return t.config=this.examineType,t.step=n,t},nextPage:function(){var e=this;this.$refs.crmForm.validate(function(t){t&&(e.currentPage=2)})},selectOptionsChange:function(e){2==e.type||3==e.type?e.show=!0:e.show=!1},selectOptionsFocus:function(e,t){if(this.examineList.length>1){var a=t>0?this.examineList[t-1]:null,i=null;if(t<this.examineList.length-1)i=this.examineList[t+1];var n=!1,s=!1,o=!1;!a||2!==a.type&&3!==a.type||(o=!0),i&&4===i.type&&(n=!0,s=!0);var r=[{name:"负责人主管",value:1}];n||r.push({name:"指定用户(任意一人)",value:2}),s||r.push({name:"指定用户(多人会签)",value:3}),o||r.push({name:"上一级审批人主管",value:4}),e.options=r}},examineItemsAdd:function(){this.examineList.push({type:1,value:[],show:!1,options:[{name:"负责人主管",value:1},{name:"指定用户(任意一人)",value:2},{name:"指定用户(多人会签)",value:3},{name:"上一级审批人主管",value:4}]})},flowUserSelect:function(e){this.examineList[e.index].value=e.value},deleteExamineItems:function(e){this.examineList.splice(e,1)},hidenView:function(){this.$emit("hiden-view")},getPaddingLeft:function(e,t){return e.showblock&&1==e.showblock?"0":t%2==0?"0":"25px"},getPaddingRight:function(e,t){return e.showblock&&1==e.showblock?"0":t%2==0?"25px":"0"}},destroyed:function(){this.$el&&this.$el.parentNode&&this.$el.parentNode.removeChild(this.$el)}},c=(a("IPtX"),a("KHd+")),d=Object(c.a)(l,function(){var e=this,t=e.$createElement,i=e._self._c||t;return i("create-view",{attrs:{loading:e.loading,"body-style":{height:"100%"}}},[i("flexbox",{staticClass:"crm-create-container",attrs:{direction:"column",align:"stretch"}},[i("flexbox",{staticClass:"crm-create-header"},[i("div",{staticStyle:{flex:"1","font-size":"17px",color:"#333"}},[e._v(e._s(e.title))]),e._v(" "),i("img",{staticClass:"close",attrs:{src:a("cjwK")},on:{click:e.hidenView}})]),e._v(" "),i("flexbox",{staticClass:"crm-create-flex",attrs:{direction:"column",align:"stretch"}},[i("div",{directives:[{name:"show",rawName:"v-show",value:1==e.currentPage,expression:"currentPage == 1"}],staticClass:"crm-create-body"},[i("div",{staticClass:"create-name"},[e._v("基本信息")]),e._v(" "),i("el-form",{ref:"crmForm",staticClass:"crm-create-box",attrs:{model:e.crmForm,"label-position":"top"}},e._l(this.crmForm.crmFields,function(t,a){return i("el-form-item",{key:t.key,class:{"crm-create-block-item":t.showblock,"crm-create-item":!t.showblock},style:{"padding-left":e.getPaddingLeft(t,a),"padding-right":e.getPaddingRight(t,a)},attrs:{prop:"crmFields."+a+".value",rules:e.crmRules[t.key]}},[i("div",{staticStyle:{display:"inline-block"},attrs:{slot:"label"},slot:"label"},[i("div",{staticStyle:{margin:"5px 0","font-size":"12px","word-wrap":"break-word","word-break":"break-all"}},[e._v("\n "+e._s(t.data.name)+"\n "),i("span",{staticStyle:{color:"#999"}},[e._v("\n "+e._s(t.data.input_tips?"("+t.data.input_tips+")":"")+"\n ")])])]),e._v(" "),i(e._f("typeToComponentName")(t.data.form_type),{tag:"component",attrs:{value:t.value,index:a,item:t,radio:!1},on:{"value-change":e.fieldValueChange}})],1)}))],1),e._v(" "),i("div",{directives:[{name:"show",rawName:"v-show",value:2==e.currentPage,expression:"currentPage == 2"}],staticClass:"crm-create-body"},[i("div",{staticStyle:{padding:"0 20px","font-size":"12px"}},[i("el-radio",{attrs:{label:1},model:{value:e.examineType,callback:function(t){e.examineType=t},expression:"examineType"}},[e._v("固定审批流")]),e._v(" "),i("div",{staticClass:"examine-items"},e._l(e.examineList,function(t,a){return i("flexbox",{key:a,staticClass:"examine-item"},[i("div",{staticClass:"examine-item-name"},[e._v("第"+e._s(e._f("numberToZh")(a+1))+"级")]),e._v(" "),i("el-select",{staticClass:"examine-item-select",attrs:{placeholder:"请选择"},on:{focus:function(i){e.selectOptionsFocus(t,a)},change:function(a){e.selectOptionsChange(t)}},model:{value:t.type,callback:function(a){e.$set(t,"type",a)},expression:"item.type"}},e._l(t.options,function(e){return i("el-option",{key:e.value,attrs:{label:e.name,value:e.value}})})),e._v(" "),t.show?i("xh-user-cell",{staticClass:"examine-item-user",attrs:{radio:!1,index:a,value:t.value},on:{"value-change":e.flowUserSelect}}):e._e(),e._v(" "),i("i",{staticClass:"el-icon-remove examine-item-delete",on:{click:function(t){e.deleteExamineItems(a)}}})],1)})),e._v(" "),i("div",{staticClass:"examine-items-add"},[i("span",{on:{click:e.examineItemsAdd}},[e._v("+ 添加审批层级")])]),e._v(" "),i("div",{staticClass:"examine-add-des"},[i("p",[i("span",{staticClass:"examine-add-required"},[e._v("*")]),e._v("当选择“负责人主管”审批时。系统仅会通知负责人主管,但负责人所有上级(主管、主管的主管)均可审批。")]),e._v(" "),i("p",[i("span",{staticClass:"examine-add-required"},[e._v("*")]),e._v("当选择多个“指定用户”审批时。如果指定用户没有权限查看对应的合同,系统不会通知其审批。 ")]),e._v(" "),i("p",[i("span",{staticClass:"examine-add-required"},[e._v("*")]),e._v("当选择“指定用户(任意一人)”表示指定用户中任意一人审批即可。当选择“指定用户(多人会签)”表示 指定用户中所有人都要审批。")])]),e._v(" "),i("el-radio",{attrs:{label:0},model:{value:e.examineType,callback:function(t){e.examineType=t},expression:"examineType"}},[e._v("授权审批人")])],1)])]),e._v(" "),1==e.currentPage?i("div",{staticClass:"handle-bar"},[i("el-button",{staticClass:"handle-button",nativeOn:{click:function(t){return e.hidenView(t)}}},[e._v("取消")]),e._v(" "),i("el-button",{staticClass:"handle-button",attrs:{type:"primary"},nativeOn:{click:function(t){return e.nextPage(t)}}},[e._v("下一页")])],1):e._e(),e._v(" "),2==e.currentPage?i("div",{staticClass:"handle-bar"},[i("el-button",{staticClass:"handle-button",nativeOn:{click:function(t){return e.hidenView(t)}}},[e._v("取消")]),e._v(" "),i("el-button",{staticClass:"handle-button",attrs:{type:"primary"},nativeOn:{click:function(t){return e.saveField(t)}}},[e._v("保存")]),e._v(" "),i("el-button",{staticClass:"handle-button",attrs:{type:"primary"},nativeOn:{click:function(t){e.currentPage=1}}},[e._v("上一页")])],1):e._e()],1)],1)},[],!1,null,"c16e3d3e",null);d.options.__file="CreateExamineCategory.vue";var u=d.exports,h=a("P2sY"),m=a.n(h),f=a("Ew9n"),p=a("EP+0"),v=a("oP74"),g={name:"examine-category-detail",components:{SlideView:f.a,CreateSections:p.a,PreviewFieldView:v.a},props:{data:Object},filters:{formatedScopeInfo:function(e){for(var t="",a=e.structure_ids_info?e.structure_ids_info:[],i=0;i<a.length;i++){t=t+a[i].name+"、"}for(var n=e.user_ids_info?e.user_ids_info:[],s=0;s<n.length;s++){t=t+n[s].realname+(s===n.length-1?"":"、")}return t||"全公司"},toRowName:function(e){return"第"+r.a.encodeS(e)+"级"},toRowNameDes:function(e){return 1==e.status?"负责人主管":2==e.status?e.user_id_info.length+"人或签":3==e.status?e.user_id_info.length+"人会签":4==e.status?"上一级审批人主管":""}},watch:{data:function(e){this.getShowData()}},data:function(){return{showData:{},examineStatus:!1,tablePreviewData:{types:"",types_id:""},showTablePreview:!1}},computed:{},mounted:function(){this.getShowData()},methods:{handlePreview:function(){this.tablePreviewData.types="oa_examine",this.tablePreviewData.types_id=this.data.category_id,this.showTablePreview=!0},deleteClick:function(){var e=this;this.$confirm("您确定要删除该审批流?","提示",{confirmButtonText:"确定",cancelButtonText:"取消",type:"warning"}).then(function(){Object(i.b)({id:e.data.category_id}).then(function(t){e.$emit("refresh"),e.hideView(),e.$message({type:"success",message:t.data})}).catch(function(){})}).catch(function(){e.$message({type:"info",message:"已取消删除"})})},getShowData:function(){this.showData=m()({},this.data),this.examineStatus=0!=this.showData.status},examineStatusChange:function(){var e=this;this.showData.status=this.examineStatus?1:0,this.$confirm("您确定要"+(0===this.examineStatus?"启用":"停用")+"该审批流?","提示",{confirmButtonText:"确定",cancelButtonText:"取消",type:"warning"}).then(function(){Object(i.c)({id:e.data.category_id,status:0===e.examineStatus?1:0}).then(function(t){e.$emit("refresh"),e.$message({type:"success",message:t.data})}).catch(function(){e.cancelStatusChange()})}).catch(function(){e.cancelStatusChange(),e.$message({type:"info",message:"已取消删除"})})},cancelStatusChange:function(){this.examineStatus=!this.examineStatus,this.showData.status=this.examineStatus?1:0},hideView:function(){this.$emit("hide-view")}}},_=(a("SfBd"),Object(c.a)(g,function(){var e=this,t=e.$createElement,i=e._self._c||t;return i("slide-view",{staticClass:"d-view",attrs:{listenerIDs:["manager-main-container"],noListenerIDs:["examine-table"],appendToBody:!0,"body-style":{padding:"10px 30px",height:"100%"}},on:{"side-close":e.hideView}},[i("flexbox",{staticStyle:{height:"100%"},attrs:{orient:"vertical"}},[i("div",{staticClass:"header"},[i("flexbox",{staticClass:"detail-header"},[i("div",{staticClass:"header-name"},[e._v(e._s(e.showData.title)),1!=e.showData.is_sys?i("i",{staticClass:"el-icon-delete delete",on:{click:e.deleteClick}}):e._e()]),e._v(" "),i("img",{staticClass:"header-close",attrs:{src:a("cjwK")},on:{click:e.hideView}})]),e._v(" "),i("div",{staticClass:"detail-header-des"},[i("div",{staticClass:"status"},[e._v("状态:"+e._s(0==e.showData.status?"停用":"启用"))]),e._v(" "),i("div",{staticClass:"status-handle"},[e._v("\n 启用\n "),i("el-switch",{on:{change:e.examineStatusChange},model:{value:e.examineStatus,callback:function(t){e.examineStatus=t},expression:"examineStatus"}})],1)])],1),e._v(" "),i("div",{staticClass:"detail-body"},[i("create-sections",{staticClass:"create-sections",attrs:{title:"基本信息"}},[i("div",{staticClass:"create-sections-content"},[i("div",{staticClass:"dialog-content"},[i("flexbox",{staticClass:"content-items"},[i("div",{staticClass:"content-items-name"},[e._v("审批名称")]),e._v(" "),i("div",{staticClass:"content-items-value"},[e._v(e._s(e.showData.title))])]),e._v(" "),i("flexbox",{staticClass:"content-items"},[i("div",{staticClass:"content-items-name"},[e._v("适用范围")]),e._v(" "),i("div",{staticClass:"content-items-value"},[e._v(e._s(e._f("formatedScopeInfo")(e.showData)))])]),e._v(" "),i("flexbox",{staticClass:"content-items"},[i("div",{staticClass:"content-items-name"},[e._v("审批说明")]),e._v(" "),i("div",{staticClass:"content-items-value"},[e._v(e._s(e.showData.remark))])])],1)])]),e._v(" "),i("create-sections",{staticClass:"create-sections",attrs:{title:"表单"}},[i("div",{staticClass:"preview-section",attrs:{slot:"header"},slot:"header"},[i("el-button",{attrs:{type:"text"},on:{click:e.handlePreview}},[e._v("预览")])],1)]),e._v(" "),i("create-sections",{staticClass:"create-sections",attrs:{title:"流程"}},[1==e.showData.config?i("div",{staticClass:"create-sections-content"},e._l(e.showData.stepList,function(t,a){return i("flexbox",{key:a,staticClass:"examine-flow",attrs:{align:"stretch"}},[i("div",{staticClass:"examine-flow-header"},[i("div",{staticClass:"mark-circle"}),e._v(" "),0!=a?i("div",{staticClass:"mark-top-line"}):e._e(),e._v(" "),a<e.showData.stepList.length-1?i("div",{staticClass:"mark-bottom-line"}):e._e()]),e._v(" "),i("div",{staticClass:"examine-flow-body"},[i("div",{staticClass:"body-header"},[i("span",{staticClass:"body-header-name"},[e._v(e._s(e._f("toRowName")(a+1)))]),i("span",{staticClass:"body-header-des"},[e._v("("+e._s(e._f("toRowNameDes")(t))+")")])]),e._v(" "),t.user_id_info.length>0?i("flexbox",{staticClass:"examine-users"},e._l(t.user_id_info,function(t,a){return i("div",{key:a,staticClass:"examine-users-item"},[i("div",{directives:[{name:"photo",rawName:"v-photo",value:t,expression:"userItem"},{name:"lazy",rawName:"v-lazy:background-image",value:e.$options.filters.filterUserLazyImg(t.thumb_img),expression:"$options.filters.filterUserLazyImg(userItem.thumb_img)",arg:"background-image"}],key:t.thumb_img,staticClass:"div-photo"}),e._v(" "),i("div",{staticClass:"name"},[e._v(e._s(t.realname))])])})):e._e()],1)])})):i("div",{staticClass:"create-sections-content"},[e._v("授权审批人")])])],1)]),e._v(" "),e.showTablePreview?i("preview-field-view",{attrs:{types:e.tablePreviewData.types,types_id:e.tablePreviewData.types_id},on:{"hiden-view":function(t){e.showTablePreview=!1}}}):e._e()],1)},[],!1,null,"5d010a13",null));_.options.__file="examineCategoryDetail.vue";var y=_.exports,x=(a("7Qib"),{name:"examine-manager",components:{CreateExamineCategory:u,ExamineCategoryDetail:y},data:function(){return{loading:!1,tableHeight:document.documentElement.clientHeight-240,list:[],fieldList:[{prop:"title",label:"审批名称",width:150},{prop:"config",label:"流程类型",width:150},{prop:"user_ids",label:"可见范围",width:150},{prop:"remark",label:"审批说明",width:150},{prop:"update_time",label:"最后修改时间",width:150},{prop:"status",label:"状态",width:150}],currentPage:1,pageSize:10,pageSizes:[10,20,30,40],total:0,showHandleView:!1,createHandleInfo:{action:"save",type:"examine",id:""},showDetail:!1,detailData:{}}},watch:{},mounted:function(){var e=this;window.onresize=function(){e.tableHeight=document.documentElement.clientHeight-240},this.getList()},methods:{saveSuccess:function(){this.currentPage=1,this.getList()},getList:function(){var e=this;this.loading=!0,Object(i.a)({page:this.currentPage,limit:this.pageSize}).then(function(t){e.list=t.data.list,e.total=t.data.dataCount,e.loading=!1}).catch(function(){e.loading=!1})},fieldFormatter:function(e,t){if("config"===t.property)return 1===e[t.property]?"固定审批流":0===e[t.property]?"授权审批人":"";if("user_ids"===t.property){var a=(e.structure_ids_info||[]).map(function(e){return e.name}).join("、");a&&(a+="、");var i=a+(e.user_ids_info||[]).map(function(e){return e.realname}).join("、");return i||"全公司"}return"status"===t.property?0===e[t.property]?"停用":"启用":e[t.property]},addExamine:function(){this.createHandleInfo={action:"save",type:"examine",id:""},this.showHandleView=!0},handleRowClick:function(e,t,a){t.property&&(this.detailData=e,this.showDetail=!0)},handleSizeChange:function(e){this.pageSize=e,this.getList()},handleCurrentChange:function(e){this.currentPage=e,this.getList()},handleClick:function(e,t){var a=this;"edit-table"===e?this.$router.push({name:"handlefield",params:{type:"oa_examine",id:t.row.category_id}}):"edit"===e?(this.createHandleInfo.action="update",this.createHandleInfo.id=t.row.category_id,this.createHandleInfo.data=t.row,this.showHandleView=!0):"delete"===e?this.$confirm("您确定要删除该审批流?","提示",{confirmButtonText:"确定",cancelButtonText:"取消",type:"warning"}).then(function(){a.loading=!0,Object(i.b)({id:t.row.category_id}).then(function(e){a.list.splice(t.$index,1),a.getList(),a.$message({type:"success",message:e.data}),a.loading=!1}).catch(function(){a.loading=!1})}).catch(function(){a.$message({type:"info",message:"已取消删除"})}):"change"===e&&this.$confirm("您确定要"+(0===t.row.status?"启用":"停用")+"该审批流?","提示",{confirmButtonText:"确定",cancelButtonText:"取消",type:"warning"}).then(function(){Object(i.c)({id:t.row.category_id,status:0===t.row.status?1:0}).then(function(e){t.row.status=0===t.row.status?1:0,a.$message({type:"success",message:e.data})}).catch(function(){})}).catch(function(){a.$message({type:"info",message:"已取消删除"})})}}}),b=(a("NJhl"),Object(c.a)(x,function(){var e=this,t=e.$createElement,a=e._self._c||t;return a("div",[a("div",{staticClass:"se-table-header"},[a("el-button",{staticClass:"se-table-header-button",attrs:{type:"primary"},on:{click:e.addExamine}},[e._v("添加审批流程")])],1),e._v(" "),a("el-table",{directives:[{name:"loading",rawName:"v-loading",value:e.loading,expression:"loading"}],staticClass:"main-table",staticStyle:{width:"100%"},attrs:{id:"examine-table",data:e.list,height:e.tableHeight,stripe:"","highlight-current-row":""},on:{"row-click":e.handleRowClick}},[e._l(e.fieldList,function(t,i){return a("el-table-column",{key:i,attrs:{"show-overflow-tooltip":"",formatter:e.fieldFormatter,prop:t.prop,width:t.width,label:t.label},scopedSlots:e._u([{key:"header",fn:function(t){return[a("div",{staticClass:"table-head-name"},[e._v(e._s(t.column.label))])]}}])})}),e._v(" "),a("el-table-column"),e._v(" "),a("el-table-column",{attrs:{fixed:"right",label:"操作",width:"250"},scopedSlots:e._u([{key:"default",fn:function(t){return[a("el-button",{attrs:{type:"text",size:"small"},on:{click:function(a){e.handleClick("edit-table",t)}}},[e._v("编辑表单")]),e._v(" "),a("el-button",{attrs:{type:"text",size:"small"},on:{click:function(a){e.handleClick("edit",t)}}},[e._v("编辑")]),e._v(" "),a("el-button",{attrs:{disabled:1===t.row.is_sys,type:"text",size:"small"},on:{click:function(a){e.handleClick("delete",t)}}},[e._v("删除")]),e._v(" "),a("el-button",{attrs:{type:"text",size:"small"},on:{click:function(a){e.handleClick("change",t)}}},[e._v(e._s(0===t.row.status?"启用":"停用"))])]}}])})],2),e._v(" "),a("div",{staticClass:"p-contianer"},[a("el-pagination",{staticClass:"p-bar",attrs:{"current-page":e.currentPage,"page-sizes":e.pageSizes,"page-size":e.pageSize,layout:"total, sizes, prev, pager, next, jumper",total:e.total},on:{"size-change":e.handleSizeChange,"current-change":e.handleCurrentChange,"update:pageSize":function(t){e.pageSize=t}}})],1),e._v(" "),e.showHandleView?a("create-examine-category",{attrs:{handle:e.createHandleInfo},on:{save:e.saveSuccess,"hiden-view":function(t){e.showHandleView=!1}}}):e._e(),e._v(" "),e.showDetail?a("examine-category-detail",{attrs:{data:e.detailData},on:{refresh:e.getList,"hide-view":function(t){e.showDetail=!1}}}):e._e()],1)},[],!1,null,"23ef54bc",null));b.options.__file="ExamineManager.vue";var w={components:{ExamineManager:b.exports},data:function(){return{leftType:"ExamineManager",leftSides:[{name:"审批类型管理",type:"ExamineManager"}]}},methods:{},mounted:function(){}},C=(a("/Ark"),Object(c.a)(w,function(){var e=this,t=e.$createElement,a=e._self._c||t;return a("flexbox",{staticStyle:{height:"100%"},attrs:{direction:"column",align:"stretch"}},[a("div",{staticClass:"sw-header"},[e._v("\n 工作台\n ")]),e._v(" "),a("div",{staticClass:"sw-body"},[a("flexbox",{staticClass:"sw-box",attrs:{align:"stretch"}},[a("div",{staticClass:"sw-body-side"},e._l(e.leftSides,function(t,i){return a("div",{key:i,staticClass:"side-item",class:e.leftType==t.type?"side-item-select":"side-item-default",on:{click:function(a){e.sideClick(t)}}},[e._v("\n "+e._s(t.name)+"\n ")])})),e._v(" "),a("div",{staticClass:"sw-body-content"},[a(e.leftType,{tag:"component"})],1)])],1)])},[],!1,null,"39efac0c",null));C.options.__file="index.vue";t.default=C.exports},jOSx:function(e,t,a){},nhiu:function(e,t,a){"use strict";var i=a("ejzQ");a.n(i).a},oP74:function(e,t,a){"use strict";var i=a("ViDN"),n=a("EP+0"),s=a("conU"),o=a("8GhS"),r={name:"preview-field-view",components:{CreateView:i.a,CreateSections:n.a,XhInput:o.g,XhTextarea:o.o,XhSelect:o.l,XhMultipleSelect:o.h,XhDate:o.d,XhDateTime:o.e,XhUserCell:o.p,XhStructureCell:o.n,XhFiles:o.f,CrmRelativeCell:o.a,XhProuctCate:o.j,XhProduct:o.i,XhBusinessStatus:o.b,XhCustomerAddress:o.c,XhReceivablesPlan:o.k},computed:{},watch:{types:function(e){this.crmForm={crmFields:[]},this.getField()}},data:function(){return{title:"预览",loading:!1,crmForm:{crmFields:[]}}},filters:{typeToComponentName:function(e){return"text"==e||"number"==e||"floatnumber"==e||"mobile"==e||"email"==e?"XhInput":"textarea"==e?"XhTextarea":"select"==e||"business_status"==e?"XhSelect":"checkbox"==e?"XhMultipleSelect":"date"==e?"XhDate":"datetime"==e?"XhDateTime":"user"==e?"XhUserCell":"structure"==e?"XhStructureCell":"file"==e?"XhFiles":"contacts"==e||"customer"==e||"contract"==e||"business"==e?"CrmRelativeCell":"category"==e?"XhProuctCate":"business_type"==e?"XhBusinessStatus":"product"==e?"XhProduct":"map_address"==e?"XhCustomerAddress":"receivables_plan"==e?"XhReceivablesPlan":void 0}},props:{types:{type:String,default:""},types_id:{type:[String,Number],default:""}},mounted:function(){document.body.appendChild(this.$el),this.getField()},methods:{getField:function(){var e=this;this.loading=!0;var t={};t.types=this.types,"oa_examine"==this.types&&(t.types_id=this.types_id),Object(s.t)(t).then(function(t){e.getcrmRulesAndModel(t.data),e.loading=!1}).catch(function(){e.loading=!1})},getcrmRulesAndModel:function(e){for(var t=0;t<e.length;t++){var a=e[t],i={};i.value=a.value,i.key=a.field,i.data=a,i.disabled=!0,this.crmForm.crmFields.push(i)}},hidenView:function(){this.$emit("hiden-view")},getPaddingLeft:function(e,t){return e.showblock&&1==e.showblock?"0":t%2==0?"0":"25px"},getPaddingRight:function(e,t){return e.showblock&&1==e.showblock?"0":t%2==0?"25px":"0"}},destroyed:function(){this.$el&&this.$el.parentNode&&this.$el.parentNode.removeChild(this.$el)}},l=(a("axw9"),a("KHd+")),c=Object(l.a)(r,function(){var e=this,t=e.$createElement,i=e._self._c||t;return i("create-view",{attrs:{loading:e.loading,"body-style":{height:"100%"}}},[i("flexbox",{staticClass:"crm-create-container",attrs:{direction:"column",align:"stretch"}},[i("flexbox",{staticClass:"crm-create-header"},[i("div",{staticStyle:{flex:"1","font-size":"17px",color:"#333"}},[e._v(e._s(e.title))]),e._v(" "),i("img",{staticClass:"close",attrs:{src:a("cjwK")},on:{click:e.hidenView}})]),e._v(" "),i("div",{staticClass:"crm-create-flex"},[i("create-sections",{attrs:{title:"基本信息"}},[i("flexbox",{attrs:{direction:"column",align:"stretch"}},[i("div",{staticClass:"crm-create-body"},[i("el-form",{ref:"crmForm",staticClass:"crm-create-box",attrs:{model:e.crmForm,"label-position":"top"}},e._l(this.crmForm.crmFields,function(t,a){return i("el-form-item",{key:t.key,class:{"crm-create-block-item":t.showblock,"crm-create-item":!t.showblock},style:{"padding-left":e.getPaddingLeft(t,a),"padding-right":e.getPaddingRight(t,a)},attrs:{prop:"crmFields."+a+".value"}},[i("div",{staticStyle:{display:"inline-block"},attrs:{slot:"label"},slot:"label"},[i("div",{staticStyle:{margin:"5px 0","font-size":"12px","word-wrap":"break-word","word-break":"break-all"}},[e._v("\n "+e._s(t.data.name)+"\n "),i("span",{staticStyle:{color:"#999"}},[e._v("\n "+e._s(t.data.input_tips?"("+t.data.input_tips+")":"")+"\n ")])])]),e._v(" "),i(e._f("typeToComponentName")(t.data.form_type),{tag:"component",attrs:{radio:!1,disabled:t.disabled}})],1)}))],1)])],1)],1)],1)],1)},[],!1,null,"233dde4a",null);c.options.__file="previewFieldView.vue";t.a=c.exports},qIAI:function(e,t,a){},yVnO:function(e,t,a){}}]); | o |
alter.go | package mysql
import (
schemasv1alpha4 "github.com/schemahero/schemahero/pkg/apis/schemas/v1alpha4"
"github.com/schemahero/schemahero/pkg/database/types"
)
func AlterColumnStatements(tableName string, primaryKeys []string, desiredColumns []*schemasv1alpha4.SQLTableColumn, existingColumn *types.Column) ([]string, error) {
// this could be an alter or a drop column command
for _, desiredColumn := range desiredColumns {
if desiredColumn.Name == existingColumn.Name {
column, err := schemaColumnToColumn(desiredColumn)
if err != nil {
return nil, err
}
isPrimaryKey := false
for _, primaryKey := range primaryKeys {
if column.Name == primaryKey {
isPrimaryKey = true
}
}
// primary keys are always not null
if isPrimaryKey {
ensureColumnConstraintsNotNullTrue(column)
}
if columnsMatch(existingColumn, column) {
return []string{}, nil
}
return AlterModifyColumnStatement{
TableName: tableName,
ExistingColumn: *existingColumn, | }
}
// wasn't found as a desired column, so drop
return AlterDropColumnStatement{
TableName: tableName,
Column: types.Column{Name: existingColumn.Name},
}.DDL(), nil
}
func columnsMatch(col1 *types.Column, col2 *types.Column) bool {
if col1.DataType != col2.DataType {
return false
}
if col1.ColumnDefault != nil && col2.ColumnDefault == nil {
return false
} else if col1.ColumnDefault == nil && col2.ColumnDefault != nil {
return false
} else if col1.ColumnDefault != nil && col2.ColumnDefault != nil && *col1.ColumnDefault != *col2.ColumnDefault {
return false
}
col1Constraints, col2Constraints := col1.Constraints, col2.Constraints
if col1Constraints == nil {
col1Constraints = &types.ColumnConstraints{}
}
if col2Constraints == nil {
col2Constraints = &types.ColumnConstraints{}
}
if !types.BoolsEqual(col1Constraints.NotNull, col2Constraints.NotNull) {
return false
}
col1Attributes, col2Attributes := col1.Attributes, col2.Attributes
if col1Attributes == nil {
col1Attributes = &types.ColumnAttributes{}
}
if col2Attributes == nil {
col2Attributes = &types.ColumnAttributes{}
}
if !types.BoolsEqual(col1Attributes.AutoIncrement, col2Attributes.AutoIncrement) {
return false
}
return true
}
func ensureColumnConstraintsNotNullTrue(column *types.Column) {
if column.Constraints == nil {
column.Constraints = &types.ColumnConstraints{}
}
column.Constraints.NotNull = &trueValue
} | Column: *column,
}.DDL(), nil |
test_tags.py |
import pytest
from mock import MagicMock, patch, PropertyMock
from pontoon.tags.models import Tag
from pontoon.tags.utils import (
TagsLatestTranslationsTool, TagsResourcesTool,
TagsStatsTool, TagsTool, TagTool)
from pontoon.tags.utils.base import Clonable
def test_util_tags_tool():
# test tags tool instantiation
tags_tool = TagsTool()
assert tags_tool.tag_class is TagTool
assert tags_tool.resources_class is TagsResourcesTool
assert tags_tool.translations_class is TagsLatestTranslationsTool
assert tags_tool.stats_class is TagsStatsTool
assert tags_tool.locales is None
assert tags_tool.projects is None
assert tags_tool.priority is None
assert tags_tool.slug is None
assert tags_tool.path is None
assert tags_tool.tag_manager == Tag.objects
@patch('pontoon.tags.utils.TagsTool.stats_class')
def test_util_tags_tool_stats(stats_mock, tag_init_kwargs):
# tests instantiation of tag.stats_tool with different args
tags_tool = TagsTool(**tag_init_kwargs)
stats_mock.return_value = 23
assert tags_tool.stat_tool == 23
assert stats_mock.call_args[1] == tag_init_kwargs
@pytest.mark.parametrize(
"kwargs",
[dict(
slug=None,
locales=None,
projects=None,
path=None),
dict(
slug=1,
locales=2,
projects=3,
path=4)])
@patch('pontoon.tags.utils.TagsTool.resources_class')
def test_util_tags_tool_resources(resources_mock, kwargs):
# tests instantiation of tag.resources_tool with different args
tags_tool = TagsTool(**kwargs)
resources_mock.return_value = 23
assert tags_tool.resource_tool == 23
assert resources_mock.call_args[1] == kwargs
@pytest.mark.parametrize(
"kwargs",
[dict(
slug=None,
locales=None,
projects=None),
dict(
slug=1,
locales=2,
projects=3)])
@patch('pontoon.tags.utils.TagsTool.translations_class')
def test_util_tags_tool_translations(trans_mock, kwargs):
# tests instantiation of tag.translations_tool with different args
tags_tool = TagsTool(**kwargs)
trans_mock.return_value = 23
assert tags_tool.translation_tool == 23
assert trans_mock.call_args[1] == kwargs
@patch('pontoon.tags.utils.TagsTool.tag_class')
@patch('pontoon.tags.utils.TagsTool.get_tags')
@patch('pontoon.tags.utils.TagsTool.__len__')
@patch('pontoon.tags.utils.TagsTool.__iter__')
def test_util_tags_tool_get(iter_mock, len_mock, tags_mock, class_mock):
# tests getting a TagTool from TagsTool
tags_tool = TagsTool()
class_mock.return_value = 23
len_mock.return_value = 7
iter_mock.return_value = iter([3, 17, 73])
# with no slug returns first result from iter(self)
assert tags_tool.get() == 3
assert not class_mock.called
assert not tags_mock.called
assert len_mock.called
assert iter_mock.called
len_mock.reset_mock()
iter_mock.reset_mock()
# calling with slug creates a TagTool instance
# and doesnt call iter(self) at all
assert tags_tool.get(113) == 23
assert not len_mock.called
assert not iter_mock.called
assert (
list(class_mock.call_args)
== [(tags_tool, ), {}])
assert (
list(tags_mock.call_args)
== [(), {'slug': 113}])
def test_util_tags_tool_call_and_clone():
# tests cloning a TagsTool
|
@patch('pontoon.tags.utils.TagsTool.__call__')
def test_util_tags_tool_getitem(call_mock):
# test that calling __getitem__ calls __call__ with slug
tags_tool = TagsTool()
slugs = ["foo", "bar"]
for slug in slugs:
tags_tool[slug]
assert call_mock.call_args_list[0][1] == dict(slug=slugs[0])
assert call_mock.call_args_list[1][1] == dict(slug=slugs[1])
@patch('pontoon.tags.utils.TagsTool.iter_tags')
@patch('pontoon.tags.utils.TagsTool.stat_tool',
new_callable=PropertyMock)
def test_util_tags_tool_iter(stats_mock, iter_mock):
# tests that when you iter it calls iter_tags with
# stats data
tags_tool = TagsTool()
stats_mock.configure_mock(
**{'return_value.data': [7, 23]})
iter_mock.return_value = iter([])
assert list(tags_tool) == []
assert stats_mock.called
assert (
list(iter_mock.call_args)
== [([7, 23],), {}])
@patch('pontoon.tags.utils.TagsTool.stat_tool',
new_callable=PropertyMock)
def test_util_tags_tool_len(stats_mock):
# tests that when you len() you get the len
# of the stats data
m_len = MagicMock()
m_len.__len__.return_value = 23
stats_mock.configure_mock(
**{'return_value.data': m_len})
tags_tool = TagsTool()
assert len(tags_tool) == 23
assert m_len.__len__.called
@patch('pontoon.tags.utils.TagsTool.translation_tool',
new_callable=PropertyMock)
@patch('pontoon.tags.utils.TagsTool.tag_class')
def test_util_tags_tool_iter_tags(tag_mock, trans_mock):
# tests that iter_tags calls instantiates a TagTool with
# stat data and latest_translation data
trans_mock.configure_mock(
**{'return_value.data.get.return_value': 23})
tags_tool = TagsTool()
list(
tags_tool.iter_tags(
[dict(resource__tag=1, foo="bar"),
dict(resource__tag=2, foo="bar"),
dict(resource__tag=3, foo="bar")]))
# translation_tool.data.get() was called 3 times with tag pks
assert (
[x[0][0] for x in trans_mock.return_value.data.get.call_args_list]
== [1, 2, 3])
# TagTool was called 3 times with the tags tool as arg
assert (
[x[0][0] for x in tag_mock.call_args_list]
== [tags_tool] * 3)
# and stat + translation data as kwargs
assert (
[x[1] for x in tag_mock.call_args_list]
== [{'resource__tag': 1, 'latest_translation': 23, 'foo': 'bar'},
{'resource__tag': 2, 'latest_translation': 23, 'foo': 'bar'},
{'resource__tag': 3, 'latest_translation': 23, 'foo': 'bar'}])
@patch('pontoon.tags.utils.TagsTool.tag_manager',
new_callable=PropertyMock)
@patch('pontoon.tags.utils.tags.glob_to_regex')
def test_util_tags_tool_get_tags(glob_mock, tag_mock):
glob_mock.return_value = 17
filter_mock = MagicMock(
**{'filter.return_value': 23})
tag_mock.configure_mock(
**{'return_value.values.return_value': filter_mock})
tags_tool = TagsTool()
# no slug provided, returns `values`
assert tags_tool.get_tags() is filter_mock
assert not filter_mock.called
assert not glob_mock.called
assert (
list(tag_mock.return_value.values.call_args)
== [('pk', 'name', 'slug', 'priority', 'project'), {}])
tag_mock.reset_mock()
# slug provided, `values` is filtered
assert tags_tool.get_tags('FOO') == 23
assert (
list(filter_mock.filter.call_args)
== [(), {'slug__regex': 17}])
assert list(glob_mock.call_args) == [('FOO',), {}]
assert (
list(tag_mock.return_value.values.call_args)
== [('pk', 'name', 'slug', 'priority', 'project'), {}])
| tags_tool = TagsTool()
cloned = tags_tool()
assert cloned is not tags_tool
assert isinstance(tags_tool, Clonable)
assert isinstance(cloned, Clonable) |
__main__.py | # Copyright (c) 2014, 2015, 2019 Wieland Hoffmann, MetaBrainz Foundation
# License: MIT, see LICENSE for details
import argparse
import logging
import multiprocessing
import ConfigParser
import config
from . import init_raven_client
from .amqp.extension_generation import generate_extension
from .amqp.handler import watch
from .amqp.setup import setup_rabbitmq
from .indexing import reindex
from .schema import SCHEMA
from .trigger_generation import generate_func
logger = logging.getLogger("sir")
def main():
parser = argparse.ArgumentParser(prog="sir")
parser.add_argument("-d", "--debug", action="store_true")
parser.add_argument("--sqltimings", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex",
help="Reindexes all or a single "
"entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entity-type', action='append',
help="Which entity types to index.",
choices=SCHEMA.keys())
generate_trigger_parser = subparsers.add_parser("triggers",
help="Generate triggers")
generate_trigger_parser.set_defaults(func=generate_func)
generate_trigger_parser.add_argument('-t', '--trigger-file',
action="store",
default="sql/CreateTriggers.sql",
help="The filename to save the "
"triggers into")
generate_trigger_parser.add_argument('-f', '--function-file',
action="store",
default="sql/CreateFunctions.sql",
help="The filename to save the "
"functions into")
generate_trigger_parser.add_argument('-bid', '--broker-id',
action="store",
default="1",
help="ID of the AMQP broker row "
"in the database.")
generate_extension_parser = subparsers.add_parser("extension",
help="Generate extension")
generate_extension_parser.set_defaults(func=generate_extension)
generate_extension_parser.add_argument('-e', '--extension-file',
action="store",
default="sql/CreateExtension.sql",
help="The filename to save the "
"extension into")
amqp_setup_parser = subparsers.add_parser("amqp_setup",
help="Set up AMQP exchanges and "
"queues")
amqp_setup_parser.set_defaults(func=setup_rabbitmq)
amqp_watch_parser = subparsers.add_parser("amqp_watch",
help="Watch AMQP queues for "
"changes")
amqp_watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
loghandler = logging.StreamHandler()
if args.debug:
formatter = logging.Formatter(fmt="%(processName)s %(asctime)s "
"%(levelname)s: %(message)s")
else:
formatter = logging.Formatter(fmt="%(asctime)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
mplogger = multiprocessing.get_logger()
mplogger.setLevel(logging.ERROR)
mplogger.addHandler(loghandler)
if args.sqltimings:
from sqlalchemy import event
from sqlalchemy.engine import Engine
import time
sqltimelogger = logging.getLogger("sqltimer")
sqltimelogger.setLevel(logging.DEBUG)
sqltimelogger.addHandler(loghandler)
@event.listens_for(Engine, "before_cursor_execute")
def before_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
|
@event.listens_for(Engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement,
parameters, context, executemany):
total = time.time() - conn.info['query_start_time'].pop(-1)
sqltimelogger.debug("Query Complete!")
sqltimelogger.debug("Total Time: %f", total)
config.read_config()
try:
init_raven_client(config.CFG.get("sentry", "dsn"))
except ConfigParser.Error as e:
logger.info("Skipping Raven client initialization. Configuration issue: %s", e)
func = args.func
args = vars(args)
func(args)
if __name__ == '__main__':
main()
| conn.info.setdefault('query_start_time', []).append(time.time())
sqltimelogger.debug("Start Query: %s", statement)
sqltimelogger.debug("With Parameters: %s", parameters) |
camelsus.py | from pathlib import Path
from typing import Dict, List, Tuple, Union
import numpy as np
import pandas as pd
import xarray
from neuralhydrology.datasetzoo.basedataset import BaseDataset
from neuralhydrology.utils.config import Config
class | (BaseDataset):
"""Data set class for the CAMELS US data set by [#]_ and [#]_.
Parameters
----------
cfg : Config
The run configuration.
is_train : bool
Defines if the dataset is used for training or evaluating. If True (training), means/stds for each feature
are computed and stored to the run directory. If one-hot encoding is used, the mapping for the one-hot encoding
is created and also stored to disk. If False, a `scaler` input is expected and similarly the `id_to_int` input
if one-hot encoding is used.
period : {'train', 'validation', 'test'}
Defines the period for which the data will be loaded
basin : str, optional
If passed, the data for only this basin will be loaded. Otherwise the basin(s) are read from the appropriate
basin file, corresponding to the `period`.
additional_features : List[Dict[str, pd.DataFrame]], optional
List of dictionaries, mapping from a basin id to a pandas DataFrame. This DataFrame will be added to the data
loaded from the dataset and all columns are available as 'dynamic_inputs', 'static_inputs' and
'target_variables'
id_to_int : Dict[str, int], optional
If the config argument 'use_basin_id_encoding' is True in the config and period is either 'validation' or
'test', this input is required. It is a dictionary, mapping from basin id to an integer (the one-hot encoding).
scaler : Dict[str, Union[pd.Series, xarray.DataArray]], optional
If period is either 'validation' or 'test', this input is required. It contains the means and standard
deviations for each feature and is stored to the run directory during training (train_data/train_data_scaler.p)
References
----------
.. [#] A. J. Newman, M. P. Clark, K. Sampson, A. Wood, L. E. Hay, A. Bock, R. J. Viger, D. Blodgett,
L. Brekke, J. R. Arnold, T. Hopson, and Q. Duan: Development of a large-sample watershed-scale
hydrometeorological dataset for the contiguous USA: dataset characteristics and assessment of regional
variability in hydrologic model performance. Hydrol. Earth Syst. Sci., 19, 209-223,
doi:10.5194/hess-19-209-2015, 2015
.. [#] Addor, N., Newman, A. J., Mizukami, N. and Clark, M. P.: The CAMELS data set: catchment attributes and
meteorology for large-sample studies, Hydrol. Earth Syst. Sci., 21, 5293-5313, doi:10.5194/hess-21-5293-2017,
2017.
"""
def __init__(self,
cfg: Config,
is_train: bool,
period: str,
basin: str = None,
additional_features: List[Dict[str, pd.DataFrame]] = [],
id_to_int: Dict[str, int] = {},
scaler: Dict[str, Union[pd.Series, xarray.DataArray]] = {}):
super(CamelsUS, self).__init__(cfg=cfg,
is_train=is_train,
period=period,
basin=basin,
additional_features=additional_features,
id_to_int=id_to_int,
scaler=scaler)
def _load_basin_data(self, basin: str) -> pd.DataFrame:
"""Load input and output data from text files."""
# get forcings
dfs = []
for forcing in self.cfg.forcings:
df, area = load_camels_us_forcings(self.cfg.data_dir, basin, forcing)
# rename columns
if len(self.cfg.forcings) > 1:
df = df.rename(columns={col: f"{col}_{forcing}" for col in df.columns})
dfs.append(df)
df = pd.concat(dfs, axis=1)
# add discharge
df['QObs(mm/d)'] = load_camels_us_discharge(self.cfg.data_dir, basin, area)
# replace invalid discharge values by NaNs
qobs_cols = [col for col in df.columns if "qobs" in col.lower()]
for col in qobs_cols:
df.loc[df[col] < 0, col] = np.nan
return df
def _load_attributes(self) -> pd.DataFrame:
if self.cfg.camels_attributes:
df = load_camels_us_attributes(self.cfg.data_dir, basins=self.basins)
# remove all attributes not defined in the config
drop_cols = [c for c in df.columns if c not in self.cfg.camels_attributes]
df = df.drop(drop_cols, axis=1)
return df
def load_camels_us_attributes(data_dir: Path, basins: List[str] = []) -> pd.DataFrame:
"""Load CAMELS US attributes from the dataset provided by [#]_
Parameters
----------
data_dir : Path
Path to the CAMELS US directory. This folder must contain a 'camels_attributes_v2.0' folder (the original
data set) containing the corresponding txt files for each attribute group.
basins : List[str], optional
If passed, return only attributes for the basins specified in this list. Otherwise, the attributes of all basins
are returned.
Returns
-------
pandas.DataFrame
Basin-indexed DataFrame, containing the attributes as columns.
References
----------
.. [#] Addor, N., Newman, A. J., Mizukami, N. and Clark, M. P.: The CAMELS data set: catchment attributes and
meteorology for large-sample studies, Hydrol. Earth Syst. Sci., 21, 5293-5313, doi:10.5194/hess-21-5293-2017,
2017.
"""
attributes_path = Path(data_dir) / 'camels_attributes_v2.0'
if not attributes_path.exists():
raise RuntimeError(f"Attribute folder not found at {attributes_path}")
txt_files = attributes_path.glob('camels_*.txt')
# Read-in attributes into one big dataframe
dfs = []
for txt_file in txt_files:
df_temp = pd.read_csv(txt_file, sep=';', header=0, dtype={'gauge_id': str})
df_temp = df_temp.set_index('gauge_id')
dfs.append(df_temp)
df = pd.concat(dfs, axis=1)
# convert huc column to double digit strings
df['huc'] = df['huc_02'].apply(lambda x: str(x).zfill(2))
df = df.drop('huc_02', axis=1)
if basins:
# drop rows of basins not contained in the passed list
drop_basins = [b for b in df.index if b not in basins]
df = df.drop(drop_basins, axis=0)
return df
def load_camels_us_forcings(data_dir: Path, basin: str, forcings: str) -> Tuple[pd.DataFrame, int]:
"""Load the forcing data for a basin of the CAMELS US data set.
Parameters
----------
data_dir : Path
Path to the CAMELS US directory. This folder must contain a 'basin_mean_forcing' folder containing one
subdirectory for each forcing. The forcing directories have to contain 18 subdirectories (for the 18 HUCS) as in
the original CAMELS data set. In each HUC folder are the forcing files (.txt), starting with the 8-digit basin
id.
basin : str
8-digit USGS identifier of the basin.
forcings : str
Can be e.g. 'daymet' or 'nldas', etc. Must match the folder names in the 'basin_mean_forcing' directory.
Returns
-------
pd.DataFrame
Time-indexed DataFrame, containing the forcing data.
int
Catchment area (m2), specified in the header of the forcing file.
"""
forcing_path = data_dir / 'basin_mean_forcing' / forcings
if not forcing_path.is_dir():
raise OSError(f"{forcing_path} does not exist")
files = list(forcing_path.glob('**/*_forcing_leap.txt'))
file_path = [f for f in files if f.name[:8] == basin]
if file_path:
file_path = file_path[0]
else:
raise FileNotFoundError(f'No file for Basin {basin} at {file_path}')
df = pd.read_csv(file_path, sep='\s+', header=3)
df["date"] = pd.to_datetime(df.Year.map(str) + "/" + df.Mnth.map(str) + "/" + df.Day.map(str), format="%Y/%m/%d")
df = df.set_index("date")
# load area from header
with open(file_path, 'r') as fp:
content = fp.readlines()
area = int(content[2])
return df, area
def load_camels_us_discharge(data_dir: Path, basin: str, area: int) -> pd.Series:
"""Load the discharge data for a basin of the CAMELS US data set.
Parameters
----------
data_dir : Path
Path to the CAMELS US directory. This folder must contain a 'usgs_streamflow' folder with 18
subdirectories (for the 18 HUCS) as in the original CAMELS data set. In each HUC folder are the discharge files
(.txt), starting with the 8-digit basin id.
basin : str
8-digit USGS identifier of the basin.
area : int
Catchment area (m2), used to normalize the discharge.
Returns
-------
pd.Series
Time-index pandas.Series of the discharge values (mm/day)
"""
discharge_path = data_dir / 'usgs_streamflow'
files = list(discharge_path.glob('**/*_streamflow_qc.txt'))
file_path = [f for f in files if f.name[:8] == basin]
if file_path:
file_path = file_path[0]
else:
raise FileNotFoundError(f'No file for Basin {basin} at {file_path}')
col_names = ['basin', 'Year', 'Mnth', 'Day', 'QObs', 'flag']
df = pd.read_csv(file_path, sep='\s+', header=None, names=col_names)
df["date"] = pd.to_datetime(df.Year.map(str) + "/" + df.Mnth.map(str) + "/" + df.Day.map(str), format="%Y/%m/%d")
df = df.set_index("date")
# normalize discharge from cubic feed per second to mm per day
df.QObs = 28316846.592 * df.QObs * 86400 / (area * 10**6)
return df.QObs
| CamelsUS |
client_config.rs | use core::fmt;
use hashbrown::HashMap;
use near_account_id::AccountId; |
use crate::key_stores::KeyStore;
pub struct ClientConfig {
pub key_store: Option<Box<dyn KeyStore>>,
pub signer: Option<Box<dyn Signer>>,
pub helper_url: Option<String>,
pub initial_balance: Option<String>,
pub master_account: Option<AccountId>,
pub network_id: String,
pub node_url: String,
pub wallet_url: Option<String>,
pub headers: HashMap<String, String>,
pub key_path: Option<String>,
}
impl ClientConfig {
pub fn new(network_id: &str, node_url: &str) -> Self {
Self {
network_id: String::from(network_id),
node_url: String::from(node_url),
key_store: None,
signer: None,
helper_url: None,
initial_balance: None,
master_account: None,
wallet_url: None,
headers: HashMap::new(),
key_path: None,
}
}
}
impl fmt::Display for ClientConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let signer = match self.signer.as_ref() {
Some(signer) => signer.public_key().to_string(),
_ => String::from("No signer"),
};
write!(
f,
"network_id: {}, node_url: {}, key: {:?}",
self.network_id, self.node_url, signer
)
}
} | use near_crypto::{PublicKey, Signer};
use std::borrow::Borrow; |
image.py | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from bigdl.util.common import JavaValue
from bigdl.util.common import callBigDlFunc
from bigdl.util.common import *
if sys.version >= '3':
long = int
unicode = str
class FeatureTransformer(JavaValue):
"""
FeatureTransformer is a transformer that transform ImageFeature
"""
def __init__(self, bigdl_type="float", *args):
self.value = callBigDlFunc(
bigdl_type, JavaValue.jvm_class_constructor(self), *args)
def transform(self, image_feature, bigdl_type="float"):
"""
transform ImageFeature
"""
callBigDlFunc(bigdl_type, "transformImageFeature", self.value, image_feature)
return image_feature
def __call__(self, image_frame, bigdl_type="float"):
"""
transform ImageFrame
"""
jframe = callBigDlFunc(bigdl_type,
"transformImageFrame", self.value, image_frame)
return ImageFrame(jvalue=jframe)
class Pipeline(FeatureTransformer):
"""
Pipeline of FeatureTransformer
"""
def __init__(self, transformers, bigdl_type="float"):
for transfomer in transformers:
assert transfomer.__class__.__bases__[0].__name__ == "FeatureTransformer", "the transformer should be " \
"subclass of FeatureTransformer"
super(Pipeline, self).__init__(bigdl_type, transformers)
class ImageFeature(JavaValue):
"""
Each ImageFeature keeps information about single image,
it can include various status of an image,
e.g. original bytes read from image file, an opencv mat,
pixels in float array, image label, meta data and so on.
it uses HashMap to store all these data,
the key is string that identify the corresponding value
"""
def __init__(self, image=None, label=None, path=None, bigdl_type="float"):
image_tensor = JTensor.from_ndarray(image) if image is not None else None
label_tensor = JTensor.from_ndarray(label) if label is not None else None
self.bigdl_type = bigdl_type
self.value = callBigDlFunc(
bigdl_type, JavaValue.jvm_class_constructor(self), image_tensor, label_tensor, path)
def get_image(self, float_key="floats", to_chw=True):
"""
get image as ndarray from ImageFeature
"""
tensor = callBigDlFunc(self.bigdl_type, "imageFeatureToImageTensor", self.value,
float_key, to_chw)
return tensor.to_ndarray()
def get_label(self):
"""
get label as ndarray from ImageFeature
"""
label = callBigDlFunc(self.bigdl_type, "imageFeatureToLabelTensor", self.value)
return label.to_ndarray()
def keys(self):
"""
get key set from ImageFeature
"""
return callBigDlFunc(self.bigdl_type, "imageFeatureGetKeys", self.value)
class ImageFrame(JavaValue):
"""
ImageFrame wraps a set of ImageFeature
"""
def __init__(self, jvalue, bigdl_type="float"):
self.value = jvalue
self.bigdl_type = bigdl_type
if self.is_local():
self.image_frame = LocalImageFrame(jvalue=self.value)
else:
self.image_frame = DistributedImageFrame(jvalue=self.value)
@classmethod
def read(cls, path, sc=None, min_partitions=1, bigdl_type="float"):
|
@classmethod
def read_parquet(cls, path, sc, bigdl_type="float"):
"""
Read parquet file as DistributedImageFrame
"""
return DistributedImageFrame(jvalue=callBigDlFunc(bigdl_type, "readParquet", path, sc))
@classmethod
def write_parquet(cls, path, output, sc, partition_num = 1, bigdl_type="float"):
"""
write ImageFrame as parquet file
"""
return callBigDlFunc(bigdl_type, "writeParquet", path, output, sc, partition_num)
def is_local(self):
"""
whether this is a LocalImageFrame
"""
return callBigDlFunc(self.bigdl_type, "isLocal", self.value)
def is_distributed(self):
"""
whether this is a DistributedImageFrame
"""
return callBigDlFunc(self.bigdl_type, "isDistributed", self.value)
def transform(self, transformer, bigdl_type="float"):
"""
transformImageFrame
"""
self.value = callBigDlFunc(bigdl_type,
"transformImageFrame", transformer, self.value)
return self
def get_image(self, float_key="floats", to_chw=True):
"""
get image from ImageFrame
"""
return self.image_frame.get_image(float_key, to_chw)
def get_label(self):
"""
get label from ImageFrame
"""
return self.image_frame.get_label()
def get_predict(self, key="predict"):
"""
get prediction from ImageFrame
"""
return self.image_frame.get_predict(key)
def get_sample(self):
"""
get sample from ImageFrame
"""
return self.image_frame.get_sample()
def get_uri(self):
"""
get uri from imageframe
"""
return self.image_frame.get_uri()
def set_label(self, label, bigdl_type="float"):
"""
set label for imageframe
"""
return callBigDlFunc(bigdl_type,
"setLabel", label, self.value)
def random_split(self, weights):
"""
Random split imageframes according to weights
:param weights: weights for each ImageFrame
:return:
"""
jvalues = self.image_frame.random_split(weights)
return [ImageFrame(jvalue) for jvalue in jvalues]
class LocalImageFrame(ImageFrame):
"""
LocalImageFrame wraps a list of ImageFeature
"""
def __init__(self, image_list=None, label_list=None, jvalue=None, bigdl_type="float"):
assert jvalue or image_list, "jvalue and image_list cannot be None in the same time"
if jvalue:
self.value = jvalue
else:
# init from image ndarray list and label rdd(optional)
image_tensor_list = map(lambda image: JTensor.from_ndarray(image), image_list)
label_tensor_list = map(lambda label: JTensor.from_ndarray(label), label_list) if label_list else None
self.value = callBigDlFunc(bigdl_type, JavaValue.jvm_class_constructor(self),
image_tensor_list, label_tensor_list)
self.bigdl_type = bigdl_type
def get_image(self, float_key="floats", to_chw=True):
"""
get image list from ImageFrame
"""
tensors = callBigDlFunc(self.bigdl_type,
"localImageFrameToImageTensor", self.value, float_key, to_chw)
return map(lambda tensor: tensor.to_ndarray(), tensors)
def get_label(self):
"""
get label list from ImageFrame
"""
labels = callBigDlFunc(self.bigdl_type, "localImageFrameToLabelTensor", self.value)
return map(lambda tensor: tensor.to_ndarray(), labels)
def get_predict(self, key="predict"):
"""
get prediction list from ImageFrame
"""
predicts = callBigDlFunc(self.bigdl_type, "localImageFrameToPredict", self.value, key)
return map(lambda predict: (predict[0], predict[1].to_ndarray()) if predict[1] else (predict[0], None), predicts)
def get_sample(self, key="sample"):
return callBigDlFunc(self.bigdl_type, "localImageFrameToSample", self.value, key)
def get_uri(self, key = "uri"):
return callBigDlFunc(self.bigdl_type, "localImageFrameToUri", self.value, key)
def random_split(self, weights):
raise "random split not supported in LocalImageFrame"
class DistributedImageFrame(ImageFrame):
"""
DistributedImageFrame wraps an RDD of ImageFeature
"""
def __init__(self, image_rdd=None, label_rdd=None, jvalue=None, bigdl_type="float"):
assert jvalue or image_rdd, "jvalue and image_rdd cannot be None in the same time"
if jvalue:
self.value = jvalue
else:
# init from image ndarray rdd and label rdd(optional)
image_tensor_rdd = image_rdd.map(lambda image: JTensor.from_ndarray(image))
label_tensor_rdd = label_rdd.map(lambda label: JTensor.from_ndarray(label)) if label_rdd else None
self.value = callBigDlFunc(bigdl_type, JavaValue.jvm_class_constructor(self),
image_tensor_rdd, label_tensor_rdd)
self.bigdl_type = bigdl_type
def get_image(self, float_key="floats", to_chw=True):
"""
get image rdd from ImageFrame
"""
tensor_rdd = callBigDlFunc(self.bigdl_type,
"distributedImageFrameToImageTensorRdd", self.value, float_key, to_chw)
return tensor_rdd.map(lambda tensor: tensor.to_ndarray())
def get_label(self):
"""
get label rdd from ImageFrame
"""
tensor_rdd = callBigDlFunc(self.bigdl_type, "distributedImageFrameToLabelTensorRdd", self.value)
return tensor_rdd.map(lambda tensor: tensor.to_ndarray())
def get_predict(self, key="predict"):
"""
get prediction rdd from ImageFrame
"""
predicts = callBigDlFunc(self.bigdl_type, "distributedImageFrameToPredict", self.value, key)
return predicts.map(lambda predict: (predict[0], predict[1].to_ndarray()) if predict[1] else (predict[0], None))
def get_sample(self, key="sample"):
return callBigDlFunc(self.bigdl_type, "distributedImageFrameToSample", self.value, key)
def get_uri(self, key = "uri"):
return callBigDlFunc(self.bigdl_type, "distributedImageFrameToUri", self.value, key)
def random_split(self, weights):
return callBigDlFunc(self.bigdl_type, "distributedImageFrameRandomSplit", self.value, weights)
class HFlip(FeatureTransformer):
"""
Flip the image horizontally
"""
def __init__(self, bigdl_type="float"):
super(HFlip, self).__init__(bigdl_type)
class Resize(FeatureTransformer):
"""
Resize image
:param resize_h height after resize
:param resize_w width after resize
:param resize_mode if resizeMode = -1, random select a mode from (Imgproc.INTER_LINEAR,
Imgproc.INTER_CUBIC, Imgproc.INTER_AREA, Imgproc.INTER_NEAREST, Imgproc.INTER_LANCZOS4)
:param use_scale_factor if true, scale factor fx and fy is used, fx = fy = 0
note that the result of the following are different
Imgproc.resize(mat, mat, new Size(resizeWH, resizeWH), 0, 0, Imgproc.INTER_LINEAR)
Imgproc.resize(mat, mat, new Size(resizeWH, resizeWH))
"""
def __init__(self, resize_h, resize_w, resize_mode = 1, use_scale_factor=True,
bigdl_type="float"):
super(Resize, self).__init__(bigdl_type, resize_h, resize_w, resize_mode, use_scale_factor)
class Brightness(FeatureTransformer):
"""
adjust the image brightness
:param deltaLow brightness parameter: low bound
:param deltaHigh brightness parameter: high bound
"""
def __init__(self, delta_low, delta_high, bigdl_type="float"):
super(Brightness, self).__init__(bigdl_type, delta_low, delta_high)
class ChannelOrder(FeatureTransformer):
"""
random change the channel of an image
"""
def __init__(self, bigdl_type="float"):
super(ChannelOrder, self).__init__(bigdl_type)
class Contrast(FeatureTransformer):
"""
Adjust the image contrast
:param delta_low contrast parameter low bound
:param delta_high contrast parameter high bound
"""
def __init__(self, delta_low, delta_high, bigdl_type="float"):
super(Contrast, self).__init__(bigdl_type, delta_low, delta_high)
class Saturation(FeatureTransformer):
"""
Adjust image saturation
"""
def __init__(self, delta_low, delta_high, bigdl_type="float"):
super(Saturation, self).__init__(bigdl_type, delta_low, delta_high)
class Hue(FeatureTransformer):
"""
Adjust image hue
:param delta_low hue parameter: low bound
:param delta_high hue parameter: high bound
"""
def __init__(self, delta_low, delta_high, bigdl_type="float"):
super(Hue, self).__init__(bigdl_type, delta_low, delta_high)
class ChannelNormalize(FeatureTransformer):
"""
image channel normalize
:param mean_r mean value in R channel
:param mean_g mean value in G channel
:param meanB_b mean value in B channel
:param std_r std value in R channel
:param std_g std value in G channel
:param std_b std value in B channel
"""
def __init__(self, mean_r, mean_g, mean_b, std_r=1.0, std_g=1.0, std_b=1.0, bigdl_type="float"):
super(ChannelNormalize, self).__init__(bigdl_type, mean_r, mean_g, mean_b, std_r, std_g, std_b)
class PixelNormalize(FeatureTransformer):
"""
Pixel level normalizer, data(i) = data(i) - mean(i)
:param means pixel level mean, following H * W * C order
"""
def __init__(self, means, bigdl_type="float"):
super(PixelNormalize, self).__init__(bigdl_type, means)
class RandomCrop(FeatureTransformer):
"""
Random crop a `cropWidth` x `cropHeight` patch from an image.
The patch size should be less than the image size.
:param crop_width width after crop
:param crop_height height after crop
:param is_clip whether to clip the roi to image boundaries
"""
def __init__(self, crop_width, crop_height, is_clip=True, bigdl_type="float"):
super(RandomCrop, self).__init__(bigdl_type, crop_width, crop_height, is_clip)
class CenterCrop(FeatureTransformer):
"""
Crop a `cropWidth` x `cropHeight` patch from center of image.
The patch size should be less than the image size.
:param crop_width width after crop
:param crop_height height after crop
:param is_clip clip cropping box boundary
"""
def __init__(self, crop_width, crop_height, is_clip=True, bigdl_type="float"):
super(CenterCrop, self).__init__(bigdl_type, crop_width, crop_height, is_clip)
class FixedCrop(FeatureTransformer):
"""
Crop a fixed area of image
:param x1 start in width
:param y1 start in height
:param x2 end in width
:param y2 end in height
:param normalized whether args are normalized, i.e. in range [0, 1]
:param is_clip whether to clip the roi to image boundaries
"""
def __init__(self, x1, y1, x2, y2, normalized=True, is_clip=True, bigdl_type="float"):
super(FixedCrop, self).__init__(bigdl_type, x1, y1, x2, y2, normalized, is_clip)
class DetectionCrop(FeatureTransformer):
"""
Crop from object detections, each image should has a tensor detection,
which is stored in ImageFeature
:param roi_key key that map a tensor detection
:param normalized whether is detection is normalized, i.e. in range [0, 1]
"""
def __init__(self, roi_key, normalized=True, bigdl_type="float"):
super(DetectionCrop, self).__init__(bigdl_type, roi_key, normalized)
class Expand(FeatureTransformer):
"""
expand image, fill the blank part with the meanR, meanG, meanB
:param means_r means in R channel
:param means_g means in G channel
:param means_b means in B channel
:param min_expand_ratio min expand ratio
:param max_expand_ratio max expand ratio
"""
def __init__(self, means_r=123, means_g=117, means_b=104,
min_expand_ratio=1.0,
max_expand_ratio=4.0, bigdl_type="float"):
super(Expand, self).__init__(bigdl_type, means_r, means_g, means_b,
min_expand_ratio, max_expand_ratio)
class Filler(FeatureTransformer):
"""
Fill part of image with certain pixel value
:param start_x start x ratio
:param start_y start y ratio
:param end_x end x ratio
:param end_y end y ratio
:param value filling value
"""
def __init__(self, start_x, start_y, end_x, end_y, value = 255, bigdl_type="float"):
super(Filler, self).__init__(bigdl_type, start_x,
start_y,
end_x,
end_y,
value)
class RandomTransformer(FeatureTransformer):
"""
It is a wrapper for transformers to control the transform probability
:param transformer transformer to apply randomness
:param prob max prob
"""
def __init__(self, transformer, prob, bigdl_type="float"):
super(RandomTransformer, self).__init__(bigdl_type, transformer, prob)
class ColorJitter(FeatureTransformer):
"""
Random adjust brightness, contrast, hue, saturation
:param brightness_prob probability to adjust brightness
:param brightness_delta brightness parameter
:param contrast_prob probability to adjust contrast
:param contrast_lower contrast lower parameter
:param contrast_upper contrast upper parameter
:param hue_prob probability to adjust hue
:param hue_delta hue parameter
:param saturation_prob probability to adjust saturation
:param saturation_lower saturation lower parameter
:param saturation_upper saturation upper parameter
:param random_order_prob random order for different operation
:param shuffle shuffle the transformers
"""
def __init__(self, brightness_prob = 0.5,
brightness_delta = 32.0,
contrast_prob = 0.5,
contrast_lower = 0.5,
contrast_upper = 1.5,
hue_prob = 0.5,
hue_delta = 18.0,
saturation_prob = 0.5,
saturation_lower = 0.5,
saturation_upper = 1.5,
random_order_prob = 0.0,
shuffle = False,
bigdl_type="float"):
super(ColorJitter, self).__init__(bigdl_type, brightness_prob,
brightness_delta,
contrast_prob,
contrast_lower,
contrast_upper,
hue_prob,
hue_delta,
saturation_prob,
saturation_lower,
saturation_upper,
random_order_prob,
shuffle)
class RandomSampler(FeatureTransformer):
"""
Random sample a bounding box given some constraints and crop the image
This is used in SSD training augmentation
"""
def __init__(self):
super(RandomSampler, self).__init__(bigdl_type)
class RoiProject(FeatureTransformer):
"""
Project gt boxes onto the coordinate system defined by image boundary
:param need_meet_center_constraint whether need to meet center constraint, i.e., the center of gt box need be within image boundary
"""
def __init__(self, need_meet_center_constraint, bigdl_type="float"):
super(RoiProject, self).__init__(bigdl_type, need_meet_center_constraint)
class RoiHFlip(FeatureTransformer):
"""
horizontally flip the roi
:param normalized whether the roi is normalized, i.e. in range [0, 1]
"""
def __init__(self, normalized=True, bigdl_type="float"):
super(RoiHFlip, self).__init__(bigdl_type, normalized)
class RoiResize(FeatureTransformer):
"""
resize the roi according to scale
:param normalized whether the roi is normalized, i.e. in range [0, 1]
"""
def __init__(self, normalized=True, bigdl_type="float"):
super(RoiResize, self).__init__(bigdl_type, normalized)
class RoiNormalize(FeatureTransformer):
"""
Normalize Roi to [0, 1]
"""
def __init__(self, bigdl_type="float"):
super(RoiNormalize, self).__init__(bigdl_type)
class MatToFloats(FeatureTransformer):
"""
Transform OpenCVMat to float array, note that in this transformer, the mat is released
:param valid_height valid height in case the mat is invalid
:param valid_width valid width in case the mat is invalid
:param valid_channel valid channel in case the mat is invalid
:param out_key key to store float array
:param share_buffer share buffer of output
"""
def __init__(self, valid_height=300, valid_width=300, valid_channel=300,
out_key = "floats", share_buffer=True, bigdl_type="float"):
super(MatToFloats, self).__init__(bigdl_type, valid_height, valid_width, valid_channel,
out_key, share_buffer)
class MatToTensor(FeatureTransformer):
"""
transform opencv mat to tensor
:param to_rgb BGR to RGB (default is BGR)
:param tensor_key key to store transformed tensor
"""
def __init__(self, to_rgb=False, tensor_key="imageTensor", bigdl_type="float"):
super(MatToTensor, self).__init__(bigdl_type, to_rgb, tensor_key)
class AspectScale(FeatureTransformer):
"""
Resize the image, keep the aspect ratio. scale according to the short edge
:param min_size scale size, apply to short edge
:param scale_multiple_of make the scaled size multiple of some value
:param max_size max size after scale
:param resize_mode if resizeMode = -1, random select a mode from
(Imgproc.INTER_LINEAR, Imgproc.INTER_CUBIC, Imgproc.INTER_AREA,
Imgproc.INTER_NEAREST, Imgproc.INTER_LANCZOS4)
:param use_scale_factor if true, scale factor fx and fy is used, fx = fy = 0
:aram min_scale control the minimum scale up for image
"""
def __init__(self, min_size, scale_multiple_of = 1, max_size = 1000,
resize_mode = 1, use_scale_factor=True, min_scale=-1.0,
bigdl_type="float"):
super(AspectScale, self).__init__(bigdl_type, min_size, scale_multiple_of, max_size,
resize_mode, use_scale_factor, min_scale)
class RandomAspectScale(FeatureTransformer):
"""
resize the image by randomly choosing a scale
:param scales array of scale options that for random choice
:param scaleMultipleOf Resize test images so that its width and height are multiples of
:param maxSize Max pixel size of the longest side of a scaled input image
"""
def __init__(self, scales, scale_multiple_of = 1, max_size = 1000, bigdl_type="float"):
super(RandomAspectScale, self).__init__(bigdl_type, scales, scale_multiple_of, max_size)
class BytesToMat(FeatureTransformer):
"""
Transform byte array(original image file in byte) to OpenCVMat
:param byte_key key that maps byte array
"""
def __init__(self, byte_key = "bytes", bigdl_type="float"):
super(BytesToMat, self).__init__(bigdl_type, byte_key)
class ImageFrameToSample(FeatureTransformer):
"""
transform imageframe to samples
:param input_keys keys that maps inputs (each input should be a tensor)
:param target_keys keys that maps targets (each target should be a tensor)
:param sample_key key to store sample
"""
def __init__(self, input_keys=["imageTensor"], target_keys=None,
sample_key="sample", bigdl_type="float"):
super(ImageFrameToSample, self).__init__(bigdl_type, input_keys, target_keys, sample_key)
class PixelBytesToMat(FeatureTransformer):
"""
Transform byte array(pixels in byte) to OpenCVMat
:param byte_key key that maps byte array
"""
def __init__(self, byte_key = "bytes", bigdl_type="float"):
super(PixelBytesToMat, self).__init__(bigdl_type, byte_key)
class FixExpand(FeatureTransformer):
"""
Expand image with given expandHeight and expandWidth,
put the original image to the center of expanded image
:param expand_height height expand to
:param expand_width width expand to
"""
def __init__(self, expand_height, expand_width, bigdl_type="float"):
super(FixExpand, self).__init__(bigdl_type, expand_height, expand_width)
class ChannelScaledNormalizer(FeatureTransformer):
"""
Scaled image at channel level with offset and scale
:param mean_r : offset for R channel
:param mean_g : offset for G channel
:param mean_b: offset for B channel
:param scale: scaling factor for all channels
"""
def __init__(self, mean_r, mean_g, mean_b, scale, bigdl_type="float"):
super(ChannelScaledNormalizer, self).__init__(bigdl_type, mean_r, mean_g, mean_b, scale)
class RandomAlterAspect(FeatureTransformer):
"""
Apply random crop based on area ratio and resize to cropLenth size
:param min_area_ratio min area ratio
:param max_area_ratio max area ratio
:param min_aspect_ratio_change factor applied to ratio area
:param interp_mode interp mode applied in resize
:param crop_length final size resized to
"""
def __init__(self, min_area_ratio,
max_area_ratio,
min_aspect_ratio_change,
interp_mode,
crop_length, bigdl_type="float"):
super(RandomAlterAspect, self).__init__(bigdl_type, min_area_ratio,
max_area_ratio,
min_aspect_ratio_change,
interp_mode,
crop_length)
class RandomCropper(FeatureTransformer):
"""
Random cropper on uniform distribution with fixed height & width
:param crop_w width cropped to
:param crop_h height cropped to
:param mirror whether mirror
:param cropper_method crop method
:param channels total channels
"""
def __init__(self, crop_w, crop_h, mirror, cropper_method, channels, bigdl_type="float"):
super(RandomCropper, self).__init__(bigdl_type, crop_w, crop_h, mirror, cropper_method, channels)
class RandomResize(FeatureTransformer):
"""
Random resize between minSize and maxSize and scale height and width to each other
:param min_size min size to resize to
:param max_size max size to resize to
"""
def __init__(self, min_size, max_size, bigdl_type="float"):
super(RandomResize, self).__init__(bigdl_type, min_size, max_size)
class SeqFileFolder(JavaValue):
@classmethod
def files_to_image_frame(cls,
url,
sc,
class_num,
partition_num=-1,
bigdl_type="float"):
"""
Extract hadoop sequence files from an HDFS path as ImageFrame
:param url: sequence files folder path
:param sc: spark context
:param class_num: class number of data
:param partition_num: partition number, default: Engine.nodeNumber() * Engine.coreNumber()
"""
jvalue = callBigDlFunc(bigdl_type,
"seqFilesToImageFrame",
url,
sc,
class_num,
partition_num)
return ImageFrame(jvalue=jvalue)
| """
Read images as Image Frame
if sc is defined, Read image as DistributedImageFrame from local file system or HDFS
if sc is null, Read image as LocalImageFrame from local file system
:param path path to read images
if sc is defined, path can be local or HDFS. Wildcard character are supported.
if sc is null, path is local directory/image file/image file with wildcard character
:param sc SparkContext
:param min_partitions A suggestion value of the minimal splitting number for input data.
:return ImageFrame
"""
return ImageFrame(jvalue=callBigDlFunc(bigdl_type, "read", path, sc, min_partitions)) |
app.module.ts | import {BrowserModule} from '@angular/platform-browser';
import {NgModule} from '@angular/core';
import {AppRoutingModule} from './app-routing.module';
import {AppComponent} from './app.component';
import {TemplateHeadComponent} from './layout/app-layout/template-head/template-head.component';
import {TemplateTopnavComponent} from './layout/app-layout/template-topnav/template-topnav.component';
import {TemplateSidenavComponent} from './layout/app-layout/template-sidenav/template-sidenav.component';
import {AppLayoutComponent} from './layout/app-layout/app-layout.component';
import {FormsModule, ReactiveFormsModule} from '@angular/forms';
import {HttpClientModule, HttpClientXsrfModule} from '@angular/common/http';
import {CalendarModule, DateAdapter} from 'angular-calendar';
import {adapterFactory} from 'angular-calendar/date-adapters/date-fns';
import {NgbModalModule} from '@ng-bootstrap/ng-bootstrap';
import {OwlDateTimeModule, OwlNativeDateTimeModule} from 'ng-pick-datetime';
import {
MatAutocompleteModule,
MatBadgeModule,
MatBottomSheetModule,
MatButtonModule,
MatButtonToggleModule,
MatCardModule,
MatCheckboxModule,
MatChipsModule,
MatDatepickerModule,
MatDialogModule,
MatDividerModule,
MatExpansionModule,
MatGridListModule,
MatIconModule,
MatInputModule,
MatListModule,
MatMenuModule,
MatNativeDateModule,
MatPaginatorModule,
MatProgressBarModule,
MatProgressSpinnerModule,
MatRadioModule,
MatRippleModule,
MatSelectModule,
MatSidenavModule,
MatSliderModule,
MatSlideToggleModule,
MatSnackBarModule,
MatSortModule,
MatStepperModule,
MatTableModule,
MatTabsModule,
MatToolbarModule,
MatTooltipModule,
MatTreeModule,
} from '@angular/material';
import {Ng2SearchPipeModule} from 'ng2-search-filter';
import {NgxPaginationModule} from 'ngx-pagination';
import {BrowserAnimationsModule} from "@angular/platform-browser/animations";
import {StockListComponent} from './componant/stock-list/stock-list.component';
import {StockService} from "./service/stock.service";
@NgModule({
imports: [
BrowserModule,
AppRoutingModule,
HttpClientModule,
FormsModule,
ReactiveFormsModule,
BrowserAnimationsModule,
OwlDateTimeModule,
MatAutocompleteModule,
MatBadgeModule,
MatBottomSheetModule,
MatButtonModule,
MatButtonToggleModule,
MatCardModule,
MatCheckboxModule,
MatChipsModule,
NgxPaginationModule,
MatDatepickerModule,
MatDialogModule,
MatDividerModule,
Ng2SearchPipeModule,
MatExpansionModule,
MatGridListModule,
MatIconModule,
MatInputModule,
MatListModule,
MatMenuModule,
MatNativeDateModule,
MatPaginatorModule,
MatProgressBarModule,
MatProgressSpinnerModule,
MatRadioModule,
MatRippleModule,
MatSelectModule,
MatSidenavModule,
MatSliderModule,
MatSlideToggleModule,
MatSnackBarModule,
MatSortModule,
MatStepperModule,
MatTableModule,
MatTabsModule,
MatToolbarModule,
MatTooltipModule,
MatTreeModule,
OwlNativeDateTimeModule,
HttpClientXsrfModule.withOptions({
cookieName: 'XSRF-TOKEN',
headerName: 'X-XSRF-TOKEN'
}),
CalendarModule.forRoot({
provide: DateAdapter,
useFactory: adapterFactory
}),
NgbModalModule
],
declarations: [
AppComponent,
TemplateHeadComponent,
TemplateTopnavComponent,
TemplateSidenavComponent,
AppLayoutComponent,
StockListComponent,
],
providers: [
StockService
],
bootstrap: [
AppComponent
]
})
export class | {
}
| AppModule |
CommonServerPython.py | """Common functions script
This script will be appended to each server script before being executed.
Please notice that to add custom common code, add it to the CommonServerUserPython script.
Note that adding code to CommonServerUserPython can override functions in CommonServerPython
"""
from __future__ import print_function
import base64
import json
import logging
import os
import re
import socket
import sys
import time
import traceback
from random import randint
import xml.etree.cElementTree as ET
from collections import OrderedDict
from datetime import datetime, timedelta
from abc import abstractmethod
from distutils.version import LooseVersion
from threading import Lock
import demistomock as demisto
import warnings
class WarningsHandler(object):
# Wrapper to handle warnings. We use a class to cleanup after execution
@staticmethod
def handle_warning(message, category, filename, lineno, file=None, line=None):
try:
msg = warnings.formatwarning(message, category, filename, lineno, line)
demisto.info("python warning: " + msg)
except Exception:
# ignore the warning if it can't be handled for some reason
pass
def __init__(self):
self.org_handler = warnings.showwarning
warnings.showwarning = WarningsHandler.handle_warning
def __del__(self):
warnings.showwarning = self.org_handler
_warnings_handler = WarningsHandler()
# ignore warnings from logging as a result of not being setup
logging.raiseExceptions = False
# imports something that can be missed from docker image
try:
import requests
from requests.adapters import HTTPAdapter
from urllib3.util import Retry
from typing import Optional, Dict, List, Any, Union, Set
import dateparser
from datetime import timezone # type: ignore
except Exception:
if sys.version_info[0] < 3:
# in python 2 an exception in the imports might still be raised even though it is caught.
# for more info see https://cosmicpercolator.com/2016/01/13/exception-leaks-in-python-2-and-3/
sys.exc_clear()
CONTENT_RELEASE_VERSION = '0.0.0'
CONTENT_BRANCH_NAME = 'master'
IS_PY3 = sys.version_info[0] == 3
STIX_PREFIX = "STIX "
# pylint: disable=undefined-variable
ZERO = timedelta(0)
HOUR = timedelta(hours=1)
if IS_PY3:
STRING_TYPES = (str, bytes) # type: ignore
STRING_OBJ_TYPES = (str,)
else:
STRING_TYPES = (str, unicode) # type: ignore # noqa: F821
STRING_OBJ_TYPES = STRING_TYPES # type: ignore
# pylint: enable=undefined-variable
# DEPRECATED - use EntryType enum instead
entryTypes = {
'note': 1,
'downloadAgent': 2,
'file': 3,
'error': 4,
'pinned': 5,
'userManagement': 6,
'image': 7,
'playgroundError': 8,
'entryInfoFile': 9,
'warning': 11,
'map': 15,
'widget': 17
}
ENDPOINT_STATUS_OPTIONS = [
'Online',
'Offline'
]
ENDPOINT_ISISOLATED_OPTIONS = [
'Yes',
'No',
'Pending isolation',
'Pending unisolation'
]
class EntryType(object):
"""
Enum: contains all the entry types (e.g. NOTE, ERROR, WARNING, FILE, etc.)
:return: None
:rtype: ``None``
"""
NOTE = 1
DOWNLOAD_AGENT = 2
FILE = 3
ERROR = 4
PINNED = 5
USER_MANAGEMENT = 6
IMAGE = 7
PLAYGROUND_ERROR = 8
ENTRY_INFO_FILE = 9
WARNING = 11
MAP_ENTRY_TYPE = 15
WIDGET = 17
class IncidentStatus(object):
"""
Enum: contains all the incidents status types (e.g. pending, active, done, archive)
:return: None
:rtype: ``None``
"""
PENDING = 0
ACTIVE = 1
DONE = 2
ARCHIVE = 3
class IncidentSeverity(object):
"""
Enum: contains all the incident severity types
:return: None
:rtype: ``None``
"""
UNKNOWN = 0
INFO = 0.5
LOW = 1
MEDIUM = 2
HIGH = 3
CRITICAL = 4
# DEPRECATED - use EntryFormat enum instead
formats = {
'html': 'html',
'table': 'table',
'json': 'json',
'text': 'text',
'dbotResponse': 'dbotCommandResponse',
'markdown': 'markdown'
}
class EntryFormat(object):
"""
Enum: contains all the entry formats (e.g. HTML, TABLE, JSON, etc.)
"""
HTML = 'html'
TABLE = 'table'
JSON = 'json'
TEXT = 'text'
DBOT_RESPONSE = 'dbotCommandResponse'
MARKDOWN = 'markdown'
@classmethod
def is_valid_type(cls, _type):
# type: (str) -> bool
return _type in (
EntryFormat.HTML,
EntryFormat.TABLE,
EntryFormat.JSON,
EntryFormat.TEXT,
EntryFormat.MARKDOWN,
EntryFormat.DBOT_RESPONSE
)
brands = {
'xfe': 'xfe',
'vt': 'virustotal',
'wf': 'WildFire',
'cy': 'cylance',
'cs': 'crowdstrike-intel'
}
providers = {
'xfe': 'IBM X-Force Exchange',
'vt': 'VirusTotal',
'wf': 'WildFire',
'cy': 'Cylance',
'cs': 'CrowdStrike'
}
thresholds = {
'xfeScore': 4,
'vtPositives': 10,
'vtPositiveUrlsForIP': 30
}
class DBotScoreType(object):
"""
Enum: contains all the indicator types
DBotScoreType.IP
DBotScoreType.FILE
DBotScoreType.DOMAIN
DBotScoreType.URL
DBotScoreType.CVE
DBotScoreType.ACCOUNT
DBotScoreType.CRYPTOCURRENCY
DBotScoreType.EMAIL
DBotScoreType.ATTACKPATTERN
DBotScoreType.CUSTOM
:return: None
:rtype: ``None``
"""
IP = 'ip'
FILE = 'file'
DOMAIN = 'domain'
URL = 'url'
CVE = 'cve'
ACCOUNT = 'account'
CIDR = 'cidr',
DOMAINGLOB = 'domainglob'
CERTIFICATE = 'certificate'
CRYPTOCURRENCY = 'cryptocurrency'
EMAIL = 'email'
ATTACKPATTERN = 'attackpattern'
CUSTOM = 'custom'
def __init__(self):
# required to create __init__ for create_server_docs.py purpose
pass
@classmethod
def is_valid_type(cls, _type):
# type: (str) -> bool
return _type in (
DBotScoreType.IP,
DBotScoreType.FILE,
DBotScoreType.DOMAIN,
DBotScoreType.URL,
DBotScoreType.CVE,
DBotScoreType.ACCOUNT,
DBotScoreType.CIDR,
DBotScoreType.DOMAINGLOB,
DBotScoreType.CERTIFICATE,
DBotScoreType.CRYPTOCURRENCY,
DBotScoreType.EMAIL,
DBotScoreType.ATTACKPATTERN,
DBotScoreType.CUSTOM,
)
class DBotScoreReliability(object):
"""
Enum: Source reliability levels
Values are case sensitive
:return: None
:rtype: ``None``
"""
A_PLUS = 'A+ - 3rd party enrichment'
A = 'A - Completely reliable'
B = 'B - Usually reliable'
C = 'C - Fairly reliable'
D = 'D - Not usually reliable'
E = 'E - Unreliable'
F = 'F - Reliability cannot be judged'
def __init__(self):
# required to create __init__ for create_server_docs.py purpose
pass
@staticmethod
def is_valid_type(_type):
# type: (str) -> bool
return _type in (
DBotScoreReliability.A_PLUS,
DBotScoreReliability.A,
DBotScoreReliability.B,
DBotScoreReliability.C,
DBotScoreReliability.D,
DBotScoreReliability.E,
DBotScoreReliability.F,
)
@staticmethod
def get_dbot_score_reliability_from_str(reliability_str):
if reliability_str == DBotScoreReliability.A_PLUS:
return DBotScoreReliability.A_PLUS
elif reliability_str == DBotScoreReliability.A:
return DBotScoreReliability.A
elif reliability_str == DBotScoreReliability.B:
return DBotScoreReliability.B
elif reliability_str == DBotScoreReliability.C:
return DBotScoreReliability.C
elif reliability_str == DBotScoreReliability.D:
return DBotScoreReliability.D
elif reliability_str == DBotScoreReliability.E:
return DBotScoreReliability.E
elif reliability_str == DBotScoreReliability.F:
return DBotScoreReliability.F
raise Exception("Please use supported reliability only.")
INDICATOR_TYPE_TO_CONTEXT_KEY = {
'ip': 'Address',
'email': 'Address',
'url': 'Data',
'domain': 'Name',
'cve': 'ID',
'md5': 'file',
'sha1': 'file',
'sha256': 'file',
'crc32': 'file',
'sha512': 'file',
'ctph': 'file',
'ssdeep': 'file'
}
class FeedIndicatorType(object):
"""Type of Indicator (Reputations), used in TIP integrations"""
Account = "Account"
CVE = "CVE"
Domain = "Domain"
DomainGlob = "DomainGlob"
Email = "Email"
File = "File"
FQDN = "Domain"
Host = "Host"
IP = "IP"
CIDR = "CIDR"
IPv6 = "IPv6"
IPv6CIDR = "IPv6CIDR"
Registry = "Registry Key"
SSDeep = "ssdeep"
URL = "URL"
@staticmethod
def is_valid_type(_type):
return _type in (
FeedIndicatorType.Account,
FeedIndicatorType.CVE,
FeedIndicatorType.Domain,
FeedIndicatorType.DomainGlob,
FeedIndicatorType.Email,
FeedIndicatorType.File,
FeedIndicatorType.Host,
FeedIndicatorType.IP,
FeedIndicatorType.CIDR,
FeedIndicatorType.IPv6,
FeedIndicatorType.IPv6CIDR,
FeedIndicatorType.Registry,
FeedIndicatorType.SSDeep,
FeedIndicatorType.URL
)
@staticmethod
def list_all_supported_indicators():
indicator_types = []
for key, val in vars(FeedIndicatorType).items():
if not key.startswith('__') and type(val) == str:
indicator_types.append(val)
return indicator_types
@staticmethod
def ip_to_indicator_type(ip):
"""Returns the indicator type of the input IP.
:type ip: ``str``
:param ip: IP address to get it's indicator type.
:return:: Indicator type from FeedIndicatorType, or None if invalid IP address.
:rtype: ``str``
"""
if re.match(ipv4cidrRegex, ip):
return FeedIndicatorType.CIDR
elif re.match(ipv4Regex, ip):
return FeedIndicatorType.IP
elif re.match(ipv6cidrRegex, ip):
return FeedIndicatorType.IPv6CIDR
elif re.match(ipv6Regex, ip):
return FeedIndicatorType.IPv6
else:
return None
@staticmethod
def indicator_type_by_server_version(indicator_type):
"""Returns the indicator type of the input by the server version.
If the server version is 6.2 and greater, remove the STIX prefix of the type
:type indicator_type: ``str``
:param indicator_type: Type of an indicator.
:return:: Indicator type .
:rtype: ``str``
"""
if is_demisto_version_ge("6.2.0") and indicator_type.startswith(STIX_PREFIX):
return indicator_type[len(STIX_PREFIX):]
return indicator_type
# -------------------------------- Threat Intel Objects ----------------------------------- #
class ThreatIntel:
"""
XSOAR Threat Intel Objects
:return: None
:rtype: ``None``
"""
class ObjectsNames(object):
"""
Enum: Threat Intel Objects names.
:return: None
:rtype: ``None``
"""
CAMPAIGN = 'Campaign'
ATTACK_PATTERN = 'Attack Pattern'
REPORT = 'Report'
MALWARE = 'Malware'
COURSE_OF_ACTION = 'Course of Action'
INTRUSION_SET = 'Intrusion Set'
TOOL = 'Tool'
THREAT_ACTOR = 'Threat Actor'
INFRASTRUCTURE = 'Infrastructure'
class ObjectsScore(object):
"""
Enum: Threat Intel Objects Score.
:return: None
:rtype: ``None``
"""
CAMPAIGN = 3
ATTACK_PATTERN = 2
REPORT = 3
MALWARE = 3
COURSE_OF_ACTION = 0
INTRUSION_SET = 3
TOOL = 2
THREAT_ACTOR = 3
INFRASTRUCTURE = 2
class KillChainPhases(object):
"""
Enum: Kill Chain Phases names.
:return: None
:rtype: ``None``
"""
BUILD_CAPABILITIES = "Build Capabilities"
PRIVILEGE_ESCALATION = "Privilege Escalation"
ADVERSARY_OPSEC = "Adversary Opsec"
CREDENTIAL_ACCESS = "Credential Access"
EXFILTRATION = "Exfiltration"
LATERAL_MOVEMENT = "Lateral Movement"
DEFENSE_EVASION = "Defense Evasion"
PERSISTENCE = "Persistence"
COLLECTION = "Collection"
IMPACT = "Impact"
INITIAL_ACCESS = "Initial Access"
DISCOVERY = "Discovery"
EXECUTION = "Execution"
INSTALLATION = "Installation"
DELIVERY = "Delivery"
WEAPONIZATION = "Weaponization"
ACT_ON_OBJECTIVES = "Actions on Objectives"
COMMAND_AND_CONTROL = "Command \u0026 Control"
def is_debug_mode():
"""Return if this script/command was passed debug-mode=true option
:return: true if debug-mode is enabled
:rtype: ``bool``
"""
# use `hasattr(demisto, 'is_debug')` to ensure compatibility with server version <= 4.5
return hasattr(demisto, 'is_debug') and demisto.is_debug
def get_schedule_metadata(context):
"""
Get the entry schedule metadata if available
:type context: ``dict``
:param context: Context in which the command was executed.
:return: Dict with metadata of scheduled entry
:rtype: ``dict``
"""
schedule_metadata = {}
parent_entry = context.get('ParentEntry', {})
if parent_entry:
schedule_metadata = assign_params(
is_polling=True if parent_entry.get('polling') else False,
polling_command=parent_entry.get('pollingCommand'),
polling_args=parent_entry.get('pollingArgs'),
times_ran=int(parent_entry.get('timesRan', 0)) + 1,
start_date=parent_entry.get('startDate'),
end_date=parent_entry.get('endingDate')
)
return schedule_metadata
def auto_detect_indicator_type(indicator_value):
"""
Infer the type of the indicator.
:type indicator_value: ``str``
:param indicator_value: The indicator whose type we want to check. (required)
:return: The type of the indicator.
:rtype: ``str``
"""
try:
import tldextract
except Exception:
raise Exception("Missing tldextract module, In order to use the auto detect function please use a docker"
" image with it installed such as: demisto/jmespath")
if re.match(ipv4cidrRegex, indicator_value):
return FeedIndicatorType.CIDR
if re.match(ipv6cidrRegex, indicator_value):
return FeedIndicatorType.IPv6CIDR
if re.match(ipv4Regex, indicator_value):
return FeedIndicatorType.IP
if re.match(ipv6Regex, indicator_value):
return FeedIndicatorType.IPv6
if re.match(sha256Regex, indicator_value):
return FeedIndicatorType.File
if re.match(urlRegex, indicator_value):
return FeedIndicatorType.URL
if re.match(md5Regex, indicator_value):
return FeedIndicatorType.File
if re.match(sha1Regex, indicator_value):
return FeedIndicatorType.File
if re.match(emailRegex, indicator_value):
return FeedIndicatorType.Email
if re.match(cveRegex, indicator_value):
return FeedIndicatorType.CVE
if re.match(sha512Regex, indicator_value):
return FeedIndicatorType.File
try:
tldextract_version = tldextract.__version__
if LooseVersion(tldextract_version) < '3.0.0':
no_cache_extract = tldextract.TLDExtract(cache_file=False, suffix_list_urls=None)
else:
no_cache_extract = tldextract.TLDExtract(cache_dir=False, suffix_list_urls=None)
if no_cache_extract(indicator_value).suffix:
if '*' in indicator_value:
return FeedIndicatorType.DomainGlob
return FeedIndicatorType.Domain
except Exception:
demisto.debug('tldextract failed to detect indicator type. indicator value: {}'.format(indicator_value))
demisto.debug('Failed to detect indicator type. Indicator value: {}'.format(indicator_value))
return None
def add_http_prefix_if_missing(address=''):
"""
This function adds `http://` prefix to the proxy address in case it is missing.
:type address: ``string``
:param address: Proxy address.
:return: proxy address after the 'http://' prefix was added, if needed.
:rtype: ``string``
"""
PROXY_PREFIXES = ['http://', 'https://', 'socks5://', 'socks5h://', 'socks4://', 'socks4a://']
if not address:
return ''
for prefix in PROXY_PREFIXES:
if address.startswith(prefix):
return address
return 'http://' + address
def handle_proxy(proxy_param_name='proxy', checkbox_default_value=False, handle_insecure=True,
insecure_param_name=None):
"""
Handle logic for routing traffic through the system proxy.
Should usually be called at the beginning of the integration, depending on proxy checkbox state.
Additionally will unset env variables REQUESTS_CA_BUNDLE and CURL_CA_BUNDLE if handle_insecure is speficied (default).
This is needed as when these variables are set and a requests.Session object is used, requests will ignore the
Sesssion.verify setting. See: https://github.com/psf/requests/blob/master/requests/sessions.py#L703
:type proxy_param_name: ``string``
:param proxy_param_name: name of the "use system proxy" integration parameter
:type checkbox_default_value: ``bool``
:param checkbox_default_value: Default value of the proxy param checkbox
:type handle_insecure: ``bool``
:param handle_insecure: Whether to check the insecure param and unset env variables
:type insecure_param_name: ``string``
:param insecure_param_name: Name of insecure param. If None will search insecure and unsecure
:return: proxies dict for the 'proxies' parameter of 'requests' functions
:rtype: ``dict``
"""
proxies = {} # type: dict
if demisto.params().get(proxy_param_name, checkbox_default_value):
ensure_proxy_has_http_prefix()
proxies = {
'http': os.environ.get('HTTP_PROXY') or os.environ.get('http_proxy', ''),
'https': os.environ.get('HTTPS_PROXY') or os.environ.get('https_proxy', '')
}
else:
skip_proxy()
if handle_insecure:
if insecure_param_name is None:
param_names = ('insecure', 'unsecure')
else:
param_names = (insecure_param_name,) # type: ignore[assignment]
for p in param_names:
if demisto.params().get(p, False):
skip_cert_verification()
return proxies
def skip_proxy():
"""
The function deletes the proxy environment vars in order to http requests to skip routing through proxy
:return: None
:rtype: ``None``
"""
for k in ('HTTP_PROXY', 'HTTPS_PROXY', 'http_proxy', 'https_proxy'):
if k in os.environ:
del os.environ[k]
def ensure_proxy_has_http_prefix():
"""
The function checks if proxy environment vars are missing http/https prefixes, and adds http if so.
:return: None
:rtype: ``None``
"""
for k in ('HTTP_PROXY', 'HTTPS_PROXY', 'http_proxy', 'https_proxy'):
if k in os.environ:
proxy_env_var = os.getenv(k)
if proxy_env_var:
os.environ[k] = add_http_prefix_if_missing(os.environ[k])
def skip_cert_verification():
"""
The function deletes the self signed certificate env vars in order to http requests to skip certificate validation.
:return: None
:rtype: ``None``
"""
for k in ('REQUESTS_CA_BUNDLE', 'CURL_CA_BUNDLE'):
if k in os.environ:
del os.environ[k]
def urljoin(url, suffix=""):
"""
Will join url and its suffix
Example:
"https://google.com/", "/" => "https://google.com/"
"https://google.com", "/" => "https://google.com/"
"https://google.com", "api" => "https://google.com/api"
"https://google.com", "/api" => "https://google.com/api"
"https://google.com/", "api" => "https://google.com/api"
"https://google.com/", "/api" => "https://google.com/api"
:type url: ``string``
:param url: URL string (required)
:type suffix: ``string``
:param suffix: the second part of the url
:return: Full joined url
:rtype: ``string``
"""
if url[-1:] != "/":
url = url + "/"
if suffix.startswith("/"):
suffix = suffix[1:]
return url + suffix
return url + suffix
def positiveUrl(entry):
"""
Checks if the given entry from a URL reputation query is positive (known bad) (deprecated)
:type entry: ``dict``
:param entry: URL entry (required)
:return: True if bad, false otherwise
:rtype: ``bool``
"""
if entry['Type'] != entryTypes['error'] and entry['ContentsFormat'] == formats['json']:
if entry['Brand'] == brands['xfe']:
return demisto.get(entry, 'Contents.url.result.score') > thresholds['xfeScore']
if entry['Brand'] == brands['vt']:
return demisto.get(entry, 'Contents.positives') > thresholds['vtPositives']
if entry['Brand'] == brands['cs'] and demisto.get(entry, 'Contents'):
c = demisto.get(entry, 'Contents')[0]
return demisto.get(c, 'indicator') and demisto.get(c, 'malicious_confidence') in ['high', 'medium']
return False
def positiveFile(entry):
"""
Checks if the given entry from a file reputation query is positive (known bad) (deprecated)
:type entry: ``dict``
:param entry: File entry (required)
:return: True if bad, false otherwise
:rtype: ``bool``
"""
if entry['Type'] != entryTypes['error'] and entry['ContentsFormat'] == formats['json']:
if entry['Brand'] == brands['xfe'] and (demisto.get(entry, 'Contents.malware.family')
or demisto.gets(entry, 'Contents.malware.origins.external.family')):
return True
if entry['Brand'] == brands['vt']:
return demisto.get(entry, 'Contents.positives') > thresholds['vtPositives']
if entry['Brand'] == brands['wf']:
return demisto.get(entry, 'Contents.wildfire.file_info.malware') == 'yes'
if entry['Brand'] == brands['cy'] and demisto.get(entry, 'Contents'):
contents = demisto.get(entry, 'Contents')
k = contents.keys()
if k and len(k) > 0:
v = contents[k[0]]
if v and demisto.get(v, 'generalscore'):
return v['generalscore'] < -0.5
if entry['Brand'] == brands['cs'] and demisto.get(entry, 'Contents'):
c = demisto.get(entry, 'Contents')[0]
return demisto.get(c, 'indicator') and demisto.get(c, 'malicious_confidence') in ['high', 'medium']
return False
def vtCountPositives(entry):
"""
Counts the number of detected URLs in the entry
:type entry: ``dict``
:param entry: Demisto entry (required)
:return: The number of detected URLs
:rtype: ``int``
"""
positives = 0
if demisto.get(entry, 'Contents.detected_urls'):
for detected in demisto.get(entry, 'Contents.detected_urls'):
if demisto.get(detected, 'positives') > thresholds['vtPositives']:
positives += 1
return positives
def positiveIp(entry):
"""
Checks if the given entry from a file reputation query is positive (known bad) (deprecated)
:type entry: ``dict``
:param entry: IP entry (required)
:return: True if bad, false otherwise
:rtype: ``bool``
"""
if entry['Type'] != entryTypes['error'] and entry['ContentsFormat'] == formats['json']:
if entry['Brand'] == brands['xfe']:
return demisto.get(entry, 'Contents.reputation.score') > thresholds['xfeScore']
if entry['Brand'] == brands['vt'] and demisto.get(entry, 'Contents.detected_urls'):
return vtCountPositives(entry) > thresholds['vtPositiveUrlsForIP']
if entry['Brand'] == brands['cs'] and demisto.get(entry, 'Contents'):
c = demisto.get(entry, 'Contents')[0]
return demisto.get(c, 'indicator') and demisto.get(c, 'malicious_confidence') in ['high', 'medium']
return False
def formatEpochDate(t):
"""
Convert a time expressed in seconds since the epoch to a string representing local time
:type t: ``int``
:param t: Time represented in seconds (required)
:return: A string representing local time
:rtype: ``str``
"""
if t:
return time.ctime(t)
return ''
def shortCrowdStrike(entry):
"""
Display CrowdStrike Intel results in Markdown (deprecated)
:type entry: ``dict``
:param entry: CrowdStrike result entry (required)
:return: A Demisto entry containing the shortened CrowdStrike info
:rtype: ``dict``
"""
if entry['Type'] != entryTypes['error'] and entry['ContentsFormat'] == formats['json']:
if entry['Brand'] == brands['cs'] and demisto.get(entry, 'Contents'):
c = demisto.get(entry, 'Contents')[0]
csRes = '## CrowdStrike Falcon Intelligence'
csRes += '\n\n### Indicator - ' + demisto.gets(c, 'indicator')
labels = demisto.get(c, 'labels')
if labels:
csRes += '\n### Labels'
csRes += '\nName|Created|Last Valid'
csRes += '\n----|-------|----------'
for label in labels:
csRes += '\n' + demisto.gets(label, 'name') + '|' + \
formatEpochDate(demisto.get(label, 'created_on')) + '|' + \
formatEpochDate(demisto.get(label, 'last_valid_on'))
relations = demisto.get(c, 'relations')
if relations:
csRes += '\n### Relations'
csRes += '\nIndicator|Type|Created|Last Valid'
csRes += '\n---------|----|-------|----------'
for r in relations:
csRes += '\n' + demisto.gets(r, 'indicator') + '|' + demisto.gets(r, 'type') + '|' + \
formatEpochDate(demisto.get(label, 'created_date')) + '|' + \
formatEpochDate(demisto.get(label, 'last_valid_date'))
return {'ContentsFormat': formats['markdown'], 'Type': entryTypes['note'], 'Contents': csRes}
return entry
def shortUrl(entry):
"""
Formats a URL reputation entry into a short table (deprecated)
:type entry: ``dict``
:param entry: URL result entry (required)
:return: A Demisto entry containing the shortened URL info
:rtype: ``dict``
"""
if entry['Type'] != entryTypes['error'] and entry['ContentsFormat'] == formats['json']:
c = entry['Contents']
if entry['Brand'] == brands['xfe']:
return {'ContentsFormat': formats['table'], 'Type': entryTypes['note'], 'Contents': {
'Country': c['country'], 'MalwareCount': demisto.get(c, 'malware.count'),
'A': demisto.gets(c, 'resolution.A'), 'AAAA': demisto.gets(c, 'resolution.AAAA'),
'Score': demisto.get(c, 'url.result.score'), 'Categories': demisto.gets(c, 'url.result.cats'),
'URL': demisto.get(c, 'url.result.url'), 'Provider': providers['xfe'],
'ProviderLink': 'https://exchange.xforce.ibmcloud.com/url/' + demisto.get(c, 'url.result.url')}}
if entry['Brand'] == brands['vt']:
return {'ContentsFormat': formats['table'], 'Type': entryTypes['note'], 'Contents': {
'ScanDate': c['scan_date'], 'Positives': c['positives'], 'Total': c['total'],
'URL': c['url'], 'Provider': providers['vt'], 'ProviderLink': c['permalink']}}
if entry['Brand'] == brands['cs'] and demisto.get(entry, 'Contents'):
return shortCrowdStrike(entry)
return {'ContentsFormat': 'text', 'Type': 4, 'Contents': 'Unknown provider for result: ' + entry['Brand']}
def shortFile(entry):
"""
Formats a file reputation entry into a short table (deprecated)
:type entry: ``dict``
:param entry: File result entry (required)
:return: A Demisto entry containing the shortened file info
:rtype: ``dict``
"""
if entry['Type'] != entryTypes['error'] and entry['ContentsFormat'] == formats['json']:
c = entry['Contents']
if entry['Brand'] == brands['xfe']:
cm = c['malware']
return {'ContentsFormat': formats['table'], 'Type': entryTypes['note'], 'Contents': {
'Family': cm['family'], 'MIMEType': cm['mimetype'], 'MD5': cm['md5'][2:] if 'md5' in cm else '',
'CnCServers': demisto.get(cm, 'origins.CncServers.count'),
'DownloadServers': demisto.get(cm, 'origins.downloadServers.count'),
'Emails': demisto.get(cm, 'origins.emails.count'),
'ExternalFamily': demisto.gets(cm, 'origins.external.family'),
'ExternalCoverage': demisto.get(cm, 'origins.external.detectionCoverage'),
'Provider': providers['xfe'],
'ProviderLink': 'https://exchange.xforce.ibmcloud.com/malware/' + cm['md5'].replace('0x', '')}}
if entry['Brand'] == brands['vt']:
return {'ContentsFormat': formats['table'], 'Type': entryTypes['note'], 'Contents': {
'Resource': c['resource'], 'ScanDate': c['scan_date'], 'Positives': c['positives'],
'Total': c['total'], 'SHA1': c['sha1'], 'SHA256': c['sha256'], 'Provider': providers['vt'],
'ProviderLink': c['permalink']}}
if entry['Brand'] == brands['wf']:
c = demisto.get(entry, 'Contents.wildfire.file_info')
if c:
return {'Contents': {'Type': c['filetype'], 'Malware': c['malware'], 'MD5': c['md5'],
'SHA256': c['sha256'], 'Size': c['size'], 'Provider': providers['wf']},
'ContentsFormat': formats['table'], 'Type': entryTypes['note']}
if entry['Brand'] == brands['cy'] and demisto.get(entry, 'Contents'):
contents = demisto.get(entry, 'Contents')
k = contents.keys()
if k and len(k) > 0:
v = contents[k[0]]
if v and demisto.get(v, 'generalscore'):
return {'Contents': {'Status': v['status'], 'Code': v['statuscode'], 'Score': v['generalscore'],
'Classifiers': str(v['classifiers']), 'ConfirmCode': v['confirmcode'],
'Error': v['error'], 'Provider': providers['cy']},
'ContentsFormat': formats['table'], 'Type': entryTypes['note']}
if entry['Brand'] == brands['cs'] and demisto.get(entry, 'Contents'):
return shortCrowdStrike(entry)
return {'ContentsFormat': formats['text'], 'Type': entryTypes['error'],
'Contents': 'Unknown provider for result: ' + entry['Brand']}
def shortIp(entry):
"""
Formats an ip reputation entry into a short table (deprecated)
:type entry: ``dict``
:param entry: IP result entry (required)
:return: A Demisto entry containing the shortened IP info
:rtype: ``dict``
"""
if entry['Type'] != entryTypes['error'] and entry['ContentsFormat'] == formats['json']:
c = entry['Contents']
if entry['Brand'] == brands['xfe']:
cr = c['reputation']
return {'ContentsFormat': formats['table'], 'Type': entryTypes['note'], 'Contents': {
'IP': cr['ip'], 'Score': cr['score'], 'Geo': str(cr['geo']), 'Categories': str(cr['cats']),
'Provider': providers['xfe']}}
if entry['Brand'] == brands['vt']:
return {'ContentsFormat': formats['table'], 'Type': entryTypes['note'],
'Contents': {'Positive URLs': vtCountPositives(entry), 'Provider': providers['vt']}}
if entry['Brand'] == brands['cs'] and demisto.get(entry, 'Contents'):
return shortCrowdStrike(entry)
return {'ContentsFormat': formats['text'], 'Type': entryTypes['error'],
'Contents': 'Unknown provider for result: ' + entry['Brand']}
def shortDomain(entry):
"""
Formats a domain reputation entry into a short table (deprecated)
:type entry: ``dict``
:param entry: Domain result entry (required)
:return: A Demisto entry containing the shortened domain info
:rtype: ``dict``
"""
if entry['Type'] != entryTypes['error'] and entry['ContentsFormat'] == formats['json']:
if entry['Brand'] == brands['vt']:
return {'ContentsFormat': formats['table'], 'Type': entryTypes['note'],
'Contents': {'Positive URLs': vtCountPositives(entry), 'Provider': providers['vt']}}
return {'ContentsFormat': formats['text'], 'Type': entryTypes['error'],
'Contents': 'Unknown provider for result: ' + entry['Brand']}
def get_error(execute_command_result):
"""
execute_command_result must contain error entry - check the result first with is_error function
if there is no error entry in the result then it will raise an Exception
:type execute_command_result: ``dict`` or ``list``
:param execute_command_result: result of demisto.executeCommand()
:return: Error message extracted from the demisto.executeCommand() result
:rtype: ``string``
"""
if not is_error(execute_command_result):
raise ValueError("execute_command_result has no error entry. before using get_error use is_error")
if isinstance(execute_command_result, dict):
return execute_command_result['Contents']
error_messages = []
for entry in execute_command_result:
is_error_entry = type(entry) == dict and entry['Type'] == entryTypes['error']
if is_error_entry:
error_messages.append(entry['Contents'])
return '\n'.join(error_messages)
def is_error(execute_command_result):
"""
Check if the given execute_command_result has an error entry
:type execute_command_result: ``dict`` or ``list``
:param execute_command_result: Demisto entry (required) or result of demisto.executeCommand()
:return: True if the execute_command_result has an error entry, false otherwise
:rtype: ``bool``
"""
if execute_command_result is None:
return False
if isinstance(execute_command_result, list):
if len(execute_command_result) > 0:
for entry in execute_command_result:
if type(entry) == dict and entry['Type'] == entryTypes['error']:
return True
return type(execute_command_result) == dict and execute_command_result['Type'] == entryTypes['error']
isError = is_error
def FormatADTimestamp(ts):
"""
Formats an Active Directory timestamp into human readable time representation
:type ts: ``int``
:param ts: The timestamp to be formatted (required)
:return: A string represeting the time
:rtype: ``str``
"""
return (datetime(year=1601, month=1, day=1) + timedelta(seconds=int(ts) / 10 ** 7)).ctime()
def PrettifyCompactedTimestamp(x):
"""
Formats a compacted timestamp string into human readable time representation
:type x: ``str``
:param x: The timestamp to be formatted (required)
:return: A string represeting the time
:rtype: ``str``
"""
return '%s-%s-%sT%s:%s:%s' % (x[:4], x[4:6], x[6:8], x[8:10], x[10:12], x[12:])
def NormalizeRegistryPath(strRegistryPath):
"""
Normalizes a registry path string
:type strRegistryPath: ``str``
:param strRegistryPath: The registry path (required)
:return: The normalized string
:rtype: ``str``
"""
dSub = {
'HKCR': 'HKEY_CLASSES_ROOT',
'HKCU': 'HKEY_CURRENT_USER',
'HKLM': 'HKEY_LOCAL_MACHINE',
'HKU': 'HKEY_USERS',
'HKCC': 'HKEY_CURRENT_CONFIG',
'HKPD': 'HKEY_PERFORMANCE_DATA'
}
for k in dSub:
if strRegistryPath[:len(k)] == k:
return dSub[k] + strRegistryPath[len(k):]
return strRegistryPath
def scoreToReputation(score):
"""
Converts score (in number format) to human readable reputation format
:type score: ``int``
:param score: The score to be formatted (required)
:return: The formatted score
:rtype: ``str``
"""
to_str = {
4: 'Critical',
3: 'Bad',
2: 'Suspicious',
1: 'Good',
0.5: 'Informational',
0: 'Unknown'
}
return to_str.get(score, 'None')
def b64_encode(text):
"""
Base64 encode a string. Wrapper function around base64.b64encode which will accept a string
In py3 will encode the string to binary using utf-8 encoding and return a string result decoded using utf-8
:param text: string to encode
:type text: str
:return: encoded string
:rtype: str
"""
if not text:
return ''
elif isinstance(text, bytes):
to_encode = text
else:
to_encode = text.encode('utf-8', 'ignore')
res = base64.b64encode(to_encode)
if IS_PY3:
res = res.decode('utf-8') # type: ignore
return res
def encode_string_results(text):
"""
Encode string as utf-8, if any unicode character exists.
:param text: string to encode
:type text: str
:return: encoded string
:rtype: str
"""
if not isinstance(text, STRING_OBJ_TYPES):
return text
try:
return str(text)
except UnicodeEncodeError:
return text.encode("utf8", "replace")
def safe_load_json(json_object):
"""
Safely loads a JSON object from an argument. Allows the argument to accept either a JSON in string form,
or an entry ID corresponding to a JSON file.
:param json_object: Entry ID or JSON string.
:type json_object: str
:return: Dictionary object from a parsed JSON file or string.
:rtype: dict
"""
safe_json = None
if isinstance(json_object, dict) or isinstance(json_object, list):
return json_object
if (json_object.startswith('{') and json_object.endswith('}')) or (
json_object.startswith('[') and json_object.endswith(']')):
try:
safe_json = json.loads(json_object)
except ValueError as e:
return_error(
'Unable to parse JSON string. Please verify the JSON is valid. - ' + str(e))
else:
try:
path = demisto.getFilePath(json_object)
with open(path['path'], 'rb') as data:
try:
safe_json = json.load(data)
except Exception: # lgtm [py/catch-base-exception]
safe_json = json.loads(data.read())
except Exception as e:
return_error('Unable to parse JSON file. Please verify the JSON is valid or the Entry'
'ID is correct. - ' + str(e))
return safe_json
def datetime_to_string(datetime_obj):
"""
Converts a datetime object into a string. When used with `json.dumps()` for the `default` parameter,
e.g. `json.dumps(response, default=datetime_to_string)` datetime_to_string allows entire JSON objects
to be safely added to context without causing any datetime marshalling errors.
:param datetime_obj: Datetime object.
:type datetime_obj: datetime.datetime
:return: String representation of a datetime object.
:rtype: str
"""
if isinstance(datetime_obj, datetime): # type: ignore
return datetime_obj.__str__()
def remove_empty_elements(d):
"""
Recursively remove empty lists, empty dicts, or None elements from a dictionary.
:param d: Input dictionary.
:type d: dict
:return: Dictionary with all empty lists, and empty dictionaries removed.
:rtype: dict
"""
def empty(x):
return x is None or x == {} or x == []
if not isinstance(d, (dict, list)):
return d
elif isinstance(d, list):
return [v for v in (remove_empty_elements(v) for v in d) if not empty(v)]
else:
return {k: v for k, v in ((k, remove_empty_elements(v)) for k, v in d.items()) if not empty(v)}
class SmartGetDict(dict):
"""A dict that when called with get(key, default) will return the default passed
value, even if there is a value of "None" in the place of the key. Example with built-in dict:
```
>>> d = {}
>>> d['test'] = None
>>> d.get('test', 1)
>>> print(d.get('test', 1))
None
```
Example with SmartGetDict:
```
>>> d = SmartGetDict()
>>> d['test'] = None
>>> d.get('test', 1)
>>> print(d.get('test', 1))
1
```
:return: SmartGetDict
:rtype: ``SmartGetDict``
"""
def get(self, key, default=None):
res = dict.get(self, key)
if res is not None:
return res
return default
if (not os.getenv('COMMON_SERVER_NO_AUTO_PARAMS_REMOVE_NULLS')) and hasattr(demisto, 'params') and demisto.params():
demisto.callingContext['params'] = SmartGetDict(demisto.params())
def aws_table_to_markdown(response, table_header):
"""
Converts a raw response from AWS into a markdown formatted table. This function checks to see if
there is only one nested dict in the top level of the dictionary and will use the nested data.
:param response: Raw response from AWS
:type response: dict
:param table_header: The header string to use for the table.
:type table_header: str
:return: Markdown formatted table as a string.
:rtype: str
"""
if isinstance(response, dict):
if len(response) == 1:
if isinstance(response[list(response.keys())[0]], dict) or isinstance(
response[list(response.keys())[0]], list):
if isinstance(response[list(response.keys())[0]], list):
list_response = response[list(response.keys())[0]]
if not list_response:
human_readable = tableToMarkdown(table_header, list_response)
elif isinstance(list_response[0], str):
human_readable = tableToMarkdown(
table_header, response)
else:
human_readable = tableToMarkdown(
table_header, response[list(response.keys())[0]])
else:
human_readable = tableToMarkdown(
table_header, response[list(response.keys())[0]])
else:
human_readable = tableToMarkdown(table_header, response)
else:
human_readable = tableToMarkdown(table_header, response)
else:
human_readable = tableToMarkdown(table_header, response)
return human_readable
def stringEscape(st):
"""
Escape newline chars in the given string.
:type st: ``str``
:param st: The string to be modified (required).
:return: A modified string.
:rtype: ``str``
"""
return st.replace('\r', '\\r').replace('\n', '\\n').replace('\t', '\\t')
def stringUnEscape(st):
"""
Unescape newline chars in the given string.
:type st: ``str``
:param st: The string to be modified (required).
:return: A modified string.
:rtype: ``str``
"""
return st.replace('\\r', '\r').replace('\\n', '\n').replace('\\t', '\t')
class IntegrationLogger(object):
"""
a logger for python integrations:
use LOG(<message>) to add a record to the logger (message can be any object with __str__)
use LOG.print_log(verbose=True/False) to display all records in War-Room (if verbose) and server log.
use add_replace_strs to add sensitive strings that should be replaced before going to the log.
:type message: ``str``
:param message: The message to be logged
:return: No data returned
:rtype: ``None``
"""
def __init__(self, debug_logging=False):
self.messages = [] # type: list
self.write_buf = [] # type: list
self.replace_strs = [] # type: list
self.curl = [] # type: list
self.buffering = True
self.debug_logging = debug_logging
# if for some reason you don't want to auto add credentials.password to replace strings
# set the os env COMMON_SERVER_NO_AUTO_REPLACE_STRS. Either in CommonServerUserPython, or docker env
if (not os.getenv('COMMON_SERVER_NO_AUTO_REPLACE_STRS') and hasattr(demisto, 'getParam')):
# add common params
sensitive_params = ('key', 'private', 'password', 'secret', 'token', 'credentials', 'service_account')
if demisto.params():
self._iter_sensistive_dict_obj(demisto.params(), sensitive_params)
def _iter_sensistive_dict_obj(self, dict_obj, sensitive_params):
for (k, v) in dict_obj.items():
if isinstance(v, dict): # credentials object case. recurse into the object
self._iter_sensistive_dict_obj(v, sensitive_params)
if v.get('identifier') and v.get('password'): # also add basic auth case
basic_auth = '{}:{}'.format(v.get('identifier'), v.get('password'))
self.add_replace_strs(b64_encode(basic_auth))
elif isinstance(v, STRING_OBJ_TYPES):
k_lower = k.lower()
for p in sensitive_params:
if p in k_lower:
self.add_replace_strs(v, b64_encode(v))
def encode(self, message):
try:
res = str(message)
except UnicodeEncodeError as exception:
# could not decode the message
# if message is an Exception, try encode the exception's message
if isinstance(message, Exception) and message.args and isinstance(message.args[0], STRING_OBJ_TYPES):
res = message.args[0].encode('utf-8', 'replace') # type: ignore
elif isinstance(message, STRING_OBJ_TYPES):
# try encode the message itself
res = message.encode('utf-8', 'replace') # type: ignore
else:
res = "Failed encoding message with error: {}".format(exception)
for s in self.replace_strs:
res = res.replace(s, '<XX_REPLACED>')
return res
def __call__(self, message):
text = self.encode(message)
if self.buffering:
self.messages.append(text)
if self.debug_logging:
demisto.debug(text)
else:
demisto.info(text)
return text
def add_replace_strs(self, *args):
'''
Add strings which will be replaced when logging.
Meant for avoiding passwords and so forth in the log.
'''
to_add = []
for a in args:
if a:
a = self.encode(a)
to_add.append(stringEscape(a))
to_add.append(stringUnEscape(a))
js = json.dumps(a)
if js.startswith('"'):
js = js[1:]
if js.endswith('"'):
js = js[:-1]
to_add.append(js)
self.replace_strs.extend(to_add)
def set_buffering(self, state):
"""
set whether the logger buffers messages or writes staight to the demisto log
:param state: True/False
:type state: boolean
"""
self.buffering = state
def print_log(self, verbose=False):
if self.write_buf:
self.messages.append("".join(self.write_buf))
if self.messages:
text = 'Full Integration Log:\n' + '\n'.join(self.messages)
if verbose:
demisto.log(text)
if not self.debug_logging: # we don't print out if in debug_logging as already all message where printed
demisto.info(text)
self.messages = []
def build_curl(self, text):
"""
Parses the HTTP client "send" log messages and generates cURL queries out of them.
:type text: ``str``
:param text: The HTTP client log message.
:return: No data returned
:rtype: ``None``
"""
http_methods = ['GET', 'POST', 'PUT', 'DELETE', 'PATCH']
data = text.split("send: b'")[1]
if data and data[0] in {'{', '<'}:
# it is the request url query params/post body - will always come after we already have the url and headers
# `<` is for xml body
self.curl[-1] += "-d '{}".format(data)
elif any(http_method in data for http_method in http_methods):
method = ''
url = ''
headers = []
headers_to_skip = ['Content-Length', 'User-Agent', 'Accept-Encoding', 'Connection']
request_parts = repr(data).split('\\\\r\\\\n') # splitting lines on repr since data is a bytes-string
for line, part in enumerate(request_parts):
if line == 0:
method, url, _ = part[1:].split() # ignoring " at first char
elif line != len(request_parts) - 1: # ignoring the last line which is empty
if part.startswith('Host:'):
_, host = part.split('Host: ')
url = 'https://{}{}'.format(host, url)
else:
if any(header_to_skip in part for header_to_skip in headers_to_skip):
continue
headers.append(part)
curl_headers = ''
for header in headers:
if header:
curl_headers += '-H "{}" '.format(header)
curl = 'curl -X {} {} {}'.format(method, url, curl_headers)
if demisto.params().get('proxy'):
proxy_address = os.environ.get('https_proxy')
if proxy_address:
curl += '--proxy {} '.format(proxy_address)
else:
curl += '--noproxy "*" '
if demisto.params().get('insecure'):
curl += '-k '
self.curl.append(curl)
def write(self, msg):
# same as __call__ but allows IntegrationLogger to act as a File like object.
msg = self.encode(msg)
has_newline = False
if '\n' in msg:
has_newline = True
# if new line is last char we trim it out
if msg[-1] == '\n':
msg = msg[:-1]
self.write_buf.append(msg)
if has_newline:
text = "".join(self.write_buf)
if self.buffering:
self.messages.append(text)
else:
demisto.info(text)
if is_debug_mode() and text.startswith('send:'):
try:
self.build_curl(text)
except Exception as e: # should fail silently
demisto.debug('Failed generating curl - {}'.format(str(e)))
self.write_buf = []
def print_override(self, *args, **kwargs):
# print function that can be used to override print usage of internal modules
# will print to the log if the print target is stdout/stderr
try:
import __builtin__ # type: ignore
except ImportError:
# Python 3
import builtins as __builtin__ # type: ignore
file_ = kwargs.get('file')
if (not file_) or file_ == sys.stdout or file_ == sys.stderr:
kwargs['file'] = self
__builtin__.print(*args, **kwargs)
"""
a logger for python integrations:
use LOG(<message>) to add a record to the logger (message can be any object with __str__)
use LOG.print_log() to display all records in War-Room and server log.
"""
LOG = IntegrationLogger(debug_logging=is_debug_mode())
def formatAllArgs(args, kwds):
"""
makes a nice string representation of all the arguments
:type args: ``list``
:param args: function arguments (required)
:type kwds: ``dict``
:param kwds: function keyword arguments (required)
:return: string representation of all the arguments
:rtype: ``string``
"""
formattedArgs = ','.join([repr(a) for a in args]) + ',' + str(kwds).replace(':', "=").replace(" ", "")[1:-1]
return formattedArgs
def logger(func):
"""
decorator function to log the function call using LOG
:type func: ``function``
:param func: function to call (required)
:return: returns the func return value.
:rtype: ``any``
"""
def func_wrapper(*args, **kwargs):
LOG('calling {}({})'.format(func.__name__, formatAllArgs(args, kwargs)))
ret_val = func(*args, **kwargs)
if is_debug_mode():
LOG('Return value [{}]: {}'.format(func.__name__, str(ret_val)))
return ret_val
return func_wrapper
def formatCell(data, is_pretty=True):
"""
Convert a given object to md while decending multiple levels
:type data: ``str`` or ``list``
:param data: The cell content (required)
:type is_pretty: ``bool``
:param is_pretty: Should cell content be prettified (default is True)
:return: The formatted cell content as a string
:rtype: ``str``
"""
if isinstance(data, STRING_TYPES):
return data
elif isinstance(data, dict):
return '\n'.join([u'{}: {}'.format(k, flattenCell(v, is_pretty)) for k, v in data.items()])
else:
return flattenCell(data, is_pretty)
def flattenCell(data, is_pretty=True):
"""
Flattens a markdown table cell content into a single string
:type data: ``str`` or ``list``
:param data: The cell content (required)
:type is_pretty: ``bool``
:param is_pretty: Should cell content be pretified (default is True)
:return: A sting representation of the cell content
:rtype: ``str``
"""
indent = 4 if is_pretty else None
if isinstance(data, STRING_TYPES):
return data
elif isinstance(data, list):
string_list = []
for d in data:
try:
if IS_PY3 and isinstance(d, bytes):
string_list.append(d.decode('utf-8'))
else:
string_list.append(str(d))
except UnicodeEncodeError:
string_list.append(d.encode('utf-8'))
return ',\n'.join(string_list)
else:
return json.dumps(data, indent=indent, ensure_ascii=False)
def FormatIso8601(t):
"""
Convert a time expressed in seconds to ISO 8601 time format string
:type t: ``int``
:param t: Time expressed in seconds (required)
:return: An ISO 8601 time format string
:rtype: ``str``
"""
return t.strftime("%Y-%m-%dT%H:%M:%S")
def argToList(arg, separator=','):
"""
Converts a string representation of args to a python list
:type arg: ``str`` or ``list``
:param arg: Args to be converted (required)
:type separator: ``str``
:param separator: A string separator to separate the strings, the default is a comma.
:return: A python list of args
:rtype: ``list``
"""
if not arg:
return []
if isinstance(arg, list):
return arg
if isinstance(arg, STRING_TYPES):
if arg[0] == '[' and arg[-1] == ']':
return json.loads(arg)
return [s.strip() for s in arg.split(separator)]
return [arg]
def argToBoolean(value):
"""
Boolean-ish arguments that are passed through demisto.args() could be type bool or type string.
This command removes the guesswork and returns a value of type bool, regardless of the input value's type.
It will also return True for 'yes' and False for 'no'.
:param value: the value to evaluate
:type value: ``string|bool``
:return: a boolean representatation of 'value'
:rtype: ``bool``
"""
if isinstance(value, bool):
return value
if isinstance(value, STRING_OBJ_TYPES):
if value.lower() in ['true', 'yes']:
return True
elif value.lower() in ['false', 'no']:
return False
else:
raise ValueError('Argument does not contain a valid boolean-like value')
else:
raise ValueError('Argument is neither a string nor a boolean')
def appendContext(key, data, dedup=False):
"""
Append data to the investigation context
:type key: ``str``
:param key: The context path (required)
:type data: ``any``
:param data: Data to be added to the context (required)
:type dedup: ``bool``
:param dedup: True if de-duplication is required. Default is False.
:return: No data returned
:rtype: ``None``
"""
if data is None:
return
existing = demisto.get(demisto.context(), key)
if existing:
if isinstance(existing, STRING_TYPES):
if isinstance(data, STRING_TYPES):
new_val = data + ',' + existing
else:
new_val = data + existing # will raise a self explanatory TypeError
elif isinstance(existing, dict):
if isinstance(data, dict):
new_val = [existing, data] # type: ignore[assignment]
else:
new_val = data + existing # will raise a self explanatory TypeError
elif isinstance(existing, list):
if isinstance(data, list):
existing.extend(data)
else:
existing.append(data)
new_val = existing # type: ignore[assignment]
else:
new_val = [existing, data] # type: ignore[assignment]
if dedup and isinstance(new_val, list):
new_val = list(set(new_val))
demisto.setContext(key, new_val)
else:
demisto.setContext(key, data)
def url_to_clickable_markdown(data, url_keys):
"""
Turn the given urls fields in to clickable url, used for the markdown table.
:type data: ``[Union[str, List[Any], Dict[str, Any]]]``
:param data: a dictionary or a list containing data with some values that are urls
:type url_keys: ``List[str]``
:param url_keys: the keys of the url's wished to turn clickable
:return: markdown format for clickable url
:rtype: ``[Union[str, List[Any], Dict[str, Any]]]``
"""
if isinstance(data, list):
data = [url_to_clickable_markdown(item, url_keys) for item in data]
elif isinstance(data, dict):
data = {key: create_clickable_url(value) if key in url_keys else url_to_clickable_markdown(data[key], url_keys)
for key, value in data.items()}
return data
def create_clickable_url(url):
"""
Make the given url clickable when in markdown format by concatenating itself, with the proper brackets
:type url: ``Union[List[str], str]``
:param url: the url of interest or a list of urls
:return: markdown format for clickable url
:rtype: ``str``
"""
if not url:
return None
elif isinstance(url, list):
return ['[{}]({})'.format(item, item) for item in url]
return '[{}]({})'.format(url, url)
def tableToMarkdown(name, t, headers=None, headerTransform=None, removeNull=False, metadata=None, url_keys=None):
"""
Converts a demisto table in JSON form to a Markdown table
:type name: ``str``
:param name: The name of the table (required)
:type t: ``dict`` or ``list``
:param t: The JSON table - List of dictionaries with the same keys or a single dictionary (required)
:type headers: ``list`` or ``string``
:param headers: A list of headers to be presented in the output table (by order). If string will be passed
then table will have single header. Default will include all available headers.
:type headerTransform: ``function``
:param headerTransform: A function that formats the original data headers (optional)
:type removeNull: ``bool``
:param removeNull: Remove empty columns from the table. Default is False
:type metadata: ``str``
:param metadata: Metadata about the table contents
:type url_keys: ``list``
:param url_keys: a list of keys in the given JSON table that should be turned in to clickable
:return: A string representation of the markdown table
:rtype: ``str``
"""
# Turning the urls in the table to clickable
if url_keys:
t = url_to_clickable_markdown(t, url_keys)
mdResult = ''
if name:
mdResult = '### ' + name + '\n'
if metadata:
mdResult += metadata + '\n'
if not t or len(t) == 0:
mdResult += '**No entries.**\n'
return mdResult
if not headers and isinstance(t, dict) and len(t.keys()) == 1:
# in case of a single key, create a column table where each element is in a different row.
headers = list(t.keys())
t = list(t.values())[0]
if not isinstance(t, list):
t = [t]
if headers and isinstance(headers, STRING_TYPES):
headers = [headers]
if not isinstance(t[0], dict):
# the table contains only simple objects (strings, numbers)
# should be only one header
if headers and len(headers) > 0:
header = headers[0]
t = [{header: item} for item in t]
else:
raise Exception("Missing headers param for tableToMarkdown. Example: headers=['Some Header']")
# in case of headers was not provided (backward compatibility)
if not headers:
headers = list(t[0].keys())
headers.sort()
if removeNull:
headers_aux = headers[:]
for header in headers:
if all(obj.get(header) in ('', None, [], {}) for obj in t):
headers_aux.remove(header)
headers = headers_aux
if t and len(headers) > 0:
newHeaders = []
if headerTransform is None: # noqa
def headerTransform(s): return stringEscapeMD(s, True, True) # noqa
for header in headers:
newHeaders.append(headerTransform(header))
mdResult += '|'
if len(newHeaders) == 1:
mdResult += newHeaders[0]
else:
mdResult += '|'.join(newHeaders)
mdResult += '|\n'
sep = '---'
mdResult += '|' + '|'.join([sep] * len(headers)) + '|\n'
for entry in t:
vals = [stringEscapeMD((formatCell(entry.get(h, ''), False) if entry.get(h) is not None else ''),
True, True) for h in headers]
# this pipe is optional
mdResult += '| '
try:
mdResult += ' | '.join(vals)
except UnicodeDecodeError:
vals = [str(v) for v in vals]
mdResult += ' | '.join(vals)
mdResult += ' |\n'
else:
mdResult += '**No entries.**\n'
return mdResult
tblToMd = tableToMarkdown
def createContextSingle(obj, id=None, keyTransform=None, removeNull=False):
"""Receives a dict with flattened key values, and converts them into nested dicts
:type obj: ``dict`` or ``list``
:param obj: The data to be added to the context (required)
:type id: ``str``
:param id: The ID of the context entry
:type keyTransform: ``function``
:param keyTransform: A formatting function for the markdown table headers
:type removeNull: ``bool``
:param removeNull: True if empty columns should be removed, false otherwise
:return: The converted context list
:rtype: ``list``
"""
res = {} # type: dict
if keyTransform is None:
def keyTransform(s): return s # noqa
keys = obj.keys()
for key in keys:
if removeNull and obj[key] in ('', None, [], {}):
continue
values = key.split('.')
current = res
for v in values[:-1]:
current.setdefault(v, {})
current = current[v]
current[keyTransform(values[-1])] = obj[key]
if id is not None:
res.setdefault('ID', id)
return res
def createContext(data, id=None, keyTransform=None, removeNull=False):
"""Receives a dict with flattened key values, and converts them into nested dicts
:type data: ``dict`` or ``list``
:param data: The data to be added to the context (required)
:type id: ``str``
:param id: The ID of the context entry
:type keyTransform: ``function``
:param keyTransform: A formatting function for the markdown table headers
:type removeNull: ``bool``
:param removeNull: True if empty columns should be removed, false otherwise
:return: The converted context list
:rtype: ``list``
"""
if isinstance(data, (list, tuple)):
return [createContextSingle(d, id, keyTransform, removeNull) for d in data]
else:
return createContextSingle(data, id, keyTransform, removeNull)
def sectionsToMarkdown(root):
"""
Converts a list of Demisto JSON tables to markdown string of tables
:type root: ``dict`` or ``list``
:param root: The JSON table - List of dictionaries with the same keys or a single dictionary (required)
:return: A string representation of the markdown table
:rtype: ``str``
"""
mdResult = ''
if isinstance(root, dict):
for section in root:
data = root[section]
if isinstance(data, dict):
data = [data]
data = [{k: formatCell(row[k]) for k in row} for row in data]
mdResult += tblToMd(section, data)
return mdResult
def fileResult(filename, data, file_type=None):
"""
Creates a file from the given data
:type filename: ``str``
:param filename: The name of the file to be created (required)
:type data: ``str`` or ``bytes``
:param data: The file data (required)
:type file_type: ``str``
:param file_type: one of the entryTypes file or entryInfoFile (optional)
:return: A Demisto war room entry
:rtype: ``dict``
"""
if file_type is None:
file_type = entryTypes['file']
temp = demisto.uniqueFile()
# pylint: disable=undefined-variable
if (IS_PY3 and isinstance(data, str)) or (not IS_PY3 and isinstance(data, unicode)): # type: ignore # noqa: F821
data = data.encode('utf-8')
# pylint: enable=undefined-variable
with open(demisto.investigation()['id'] + '_' + temp, 'wb') as f:
f.write(data)
return {'Contents': '', 'ContentsFormat': formats['text'], 'Type': file_type, 'File': filename, 'FileID': temp}
def hash_djb2(s, seed=5381):
"""
Hash string with djb2 hash function
:type s: ``str``
:param s: The input string to hash
:type seed: ``int``
:param seed: The seed for the hash function (default is 5381)
:return: The hashed value
:rtype: ``int``
"""
hash_name = seed
for x in s:
hash_name = ((hash_name << 5) + hash_name) + ord(x)
return hash_name & 0xFFFFFFFF
def file_result_existing_file(filename, saveFilename=None):
"""
Rename an existing file
:type filename: ``str``
:param filename: The name of the file to be modified (required)
:type saveFilename: ``str``
:param saveFilename: The new file name
:return: A Demisto war room entry
:rtype: ``dict``
"""
temp = demisto.uniqueFile()
os.rename(filename, demisto.investigation()['id'] + '_' + temp)
return {'Contents': '', 'ContentsFormat': formats['text'], 'Type': entryTypes['file'],
'File': saveFilename if saveFilename else filename, 'FileID': temp}
def flattenRow(rowDict):
"""
Flatten each element in the given rowDict
:type rowDict: ``dict``
:param rowDict: The dict to be flattened (required)
:return: A flattened dict
:rtype: ``dict``
"""
return {k: formatCell(rowDict[k]) for k in rowDict}
def flattenTable(tableDict):
"""
Flatten each row in the given tableDict
:type tableDict: ``dict``
:param tableDict: The table to be flattened (required)
:return: A flattened table
:rtype: ``dict``
"""
return [flattenRow(row) for row in tableDict]
MARKDOWN_CHARS = r"\`*_{}[]()#+-!|"
def stringEscapeMD(st, minimal_escaping=False, escape_multiline=False):
"""
Escape any chars that might break a markdown string
:type st: ``str``
:param st: The string to be modified (required)
:type minimal_escaping: ``bool``
:param minimal_escaping: Whether replace all special characters or table format only (optional)
:type escape_multiline: ``bool``
:param escape_multiline: Whether convert line-ending characters (optional)
:return: A modified string
:rtype: ``str``
"""
if escape_multiline:
st = st.replace('\r\n', '<br>') # Windows
st = st.replace('\r', '<br>') # old Mac
st = st.replace('\n', '<br>') # Unix
if minimal_escaping:
for c in '|':
st = st.replace(c, '\\' + c)
else:
st = "".join(["\\" + str(c) if c in MARKDOWN_CHARS else str(c) for c in st])
return st
def raiseTable(root, key):
newInternal = {}
if key in root and isinstance(root[key], dict):
for sub in root[key]:
if sub not in root:
root[sub] = root[key][sub]
else:
newInternal[sub] = root[key][sub]
if newInternal:
root[key] = newInternal
else:
del root[key]
def zoomField(item, fieldName):
if isinstance(item, dict) and fieldName in item:
return item[fieldName]
else:
return item
def isCommandAvailable(cmd):
"""
Check the list of available modules to see whether a command is currently available to be run.
:type cmd: ``str``
:param cmd: The command to check (required)
:return: True if command is available, False otherwise
:rtype: ``bool``
"""
modules = demisto.getAllSupportedCommands()
for m in modules:
if modules[m] and isinstance(modules[m], list):
for c in modules[m]:
if c['name'] == cmd:
return True
return False
def epochToTimestamp(epoch):
return datetime.utcfromtimestamp(epoch / 1000.0).strftime("%Y-%m-%d %H:%M:%S")
def formatTimeColumns(data, timeColumnNames):
for row in data:
for k in timeColumnNames:
row[k] = epochToTimestamp(row[k])
def strip_tag(tag):
split_array = tag.split('}')
if len(split_array) > 1:
strip_ns_tag = split_array[1]
tag = strip_ns_tag
return tag
def elem_to_internal(elem, strip_ns=1, strip=1):
"""Convert an Element into an internal dictionary (not JSON!)."""
d = OrderedDict() # type: dict
elem_tag = elem.tag
if strip_ns:
elem_tag = strip_tag(elem.tag)
for key, value in list(elem.attrib.items()):
d['@' + key] = value
# loop over subelements to merge them
for subelem in elem:
v = elem_to_internal(subelem, strip_ns=strip_ns, strip=strip)
tag = subelem.tag
if strip_ns:
tag = strip_tag(subelem.tag)
value = v[tag]
try:
# add to existing list for this tag
d[tag].append(value)
except AttributeError:
# turn existing entry into a list
d[tag] = [d[tag], value]
except KeyError:
# add a new non-list entry
d[tag] = value
text = elem.text
tail = elem.tail
if strip:
# ignore leading and trailing whitespace
if text:
text = text.strip()
if tail:
tail = tail.strip()
if tail:
d['#tail'] = tail
if d:
# use #text element if other attributes exist
if text:
d["#text"] = text
else:
# text is the value if no attributes
d = text or None # type: ignore
return {elem_tag: d}
def internal_to_elem(pfsh, factory=ET.Element):
"""Convert an internal dictionary (not JSON!) into an Element.
Whatever Element implementation we could import will be
used by default; if you want to use something else, pass the
Element class as the factory parameter.
"""
attribs = OrderedDict() # type: dict
text = None
tail = None
sublist = []
tag = list(pfsh.keys())
if len(tag) != 1:
raise ValueError("Illegal structure with multiple tags: %s" % tag)
tag = tag[0]
value = pfsh[tag]
if isinstance(value, dict):
for k, v in list(value.items()):
if k[:1] == "@":
attribs[k[1:]] = v
elif k == "#text":
text = v
elif k == "#tail":
tail = v
elif isinstance(v, list):
for v2 in v:
sublist.append(internal_to_elem({k: v2}, factory=factory))
else:
sublist.append(internal_to_elem({k: v}, factory=factory))
else:
text = value
e = factory(tag, attribs)
for sub in sublist:
e.append(sub)
e.text = text
e.tail = tail
return e
def elem2json(elem, options, strip_ns=1, strip=1):
"""Convert an ElementTree or Element into a JSON string."""
if hasattr(elem, 'getroot'):
elem = elem.getroot()
if 'pretty' in options:
return json.dumps(elem_to_internal(elem, strip_ns=strip_ns, strip=strip), indent=4, separators=(',', ': '))
else:
return json.dumps(elem_to_internal(elem, strip_ns=strip_ns, strip=strip))
def json2elem(json_data, factory=ET.Element):
"""Convert a JSON string into an Element.
Whatever Element implementation we could import will be used by
default; if you want to use something else, pass the Element class
as the factory parameter.
"""
return internal_to_elem(json.loads(json_data), factory)
def xml2json(xmlstring, options={}, strip_ns=1, strip=1):
"""
Convert an XML string into a JSON string.
:type xmlstring: ``str``
:param xmlstring: The string to be converted (required)
:return: The converted JSON
:rtype: ``dict`` or ``list``
"""
elem = ET.fromstring(xmlstring)
return elem2json(elem, options, strip_ns=strip_ns, strip=strip)
def json2xml(json_data, factory=ET.Element):
"""Convert a JSON string into an XML string.
Whatever Element implementation we could import will be used by
default; if you want to use something else, pass the Element class
as the factory parameter.
"""
if not isinstance(json_data, dict):
json_data = json.loads(json_data)
elem = internal_to_elem(json_data, factory)
return ET.tostring(elem, encoding='utf-8')
def get_hash_type(hash_file):
"""
Checks the type of the given hash. Returns 'md5', 'sha1', 'sha256' or 'Unknown'.
:type hash_file: ``str``
:param hash_file: The hash to be checked (required)
:return: The hash type
:rtype: ``str``
"""
hash_len = len(hash_file)
if (hash_len == 32):
return 'md5'
elif (hash_len == 40):
return 'sha1'
elif (hash_len == 64):
return 'sha256'
elif (hash_len == 128):
return 'sha512'
else:
return 'Unknown'
def is_mac_address(mac):
"""
Test for valid mac address
:type mac: ``str``
:param mac: MAC address in the form of AA:BB:CC:00:11:22
:return: True/False
:rtype: ``bool``
"""
if re.search(r'([0-9A-F]{2}[:]){5}([0-9A-F]){2}', mac.upper()) is not None:
return True
else:
return False
def is_ipv6_valid(address):
"""
Checks if the given string represents a valid IPv6 address.
:type address: str
:param address: The string to check.
:return: True if the given string represents a valid IPv6 address.
:rtype: ``bool``
"""
try:
socket.inet_pton(socket.AF_INET6, address)
except socket.error: # not a valid address
return False
return True
def is_ip_valid(s, accept_v6_ips=False):
"""
Checks if the given string represents a valid IP address.
By default, will only return 'True' for IPv4 addresses.
:type s: ``str``
:param s: The string to be checked (required)
:type accept_v6_ips: ``bool``
:param accept_v6_ips: A boolean determining whether the
function should accept IPv6 addresses
:return: True if the given string represents a valid IP address, False otherwise
:rtype: ``bool``
"""
a = s.split('.')
if accept_v6_ips and is_ipv6_valid(s):
return True
elif len(a) != 4:
return False
else:
for x in a:
if not x.isdigit():
return False
i = int(x)
if i < 0 or i > 255:
return False
return True
def get_integration_name():
"""
Getting calling integration's name
:return: Calling integration's name
:rtype: ``str``
"""
return demisto.callingContext.get('context', '').get('IntegrationBrand')
class Common(object):
class Indicator(object):
"""
interface class
"""
@abstractmethod
def to_context(self):
pass
class DBotScore(object):
"""
DBotScore class
:type indicator: ``str``
:param indicator: indicator value, ip, hash, domain, url, etc
:type indicator_type: ``DBotScoreType``
:param indicator_type: use DBotScoreType class
:type integration_name: ``str``
:param integration_name: For integrations - The class will automatically determine the integration name.
For scripts - The class will use the given integration name.
:type score: ``DBotScore``
:param score: DBotScore.NONE, DBotScore.GOOD, DBotScore.SUSPICIOUS, DBotScore.BAD
:type malicious_description: ``str``
:param malicious_description: if the indicator is malicious and have explanation for it then set it to this field
:type reliability: ``DBotScoreReliability``
:param reliability: use DBotScoreReliability class
:return: None
:rtype: ``None``
"""
NONE = 0
GOOD = 1
SUSPICIOUS = 2
BAD = 3
CONTEXT_PATH = 'DBotScore(val.Indicator && val.Indicator == obj.Indicator && val.Vendor == obj.Vendor ' \
'&& val.Type == obj.Type)'
CONTEXT_PATH_PRIOR_V5_5 = 'DBotScore'
def __init__(self, indicator, indicator_type, integration_name='', score=None, malicious_description=None,
reliability=None):
if not DBotScoreType.is_valid_type(indicator_type):
raise TypeError('indicator_type must be of type DBotScoreType enum')
if not Common.DBotScore.is_valid_score(score):
raise TypeError('indicator_type must be of type DBotScore enum')
if reliability and not DBotScoreReliability.is_valid_type(reliability):
raise TypeError('reliability must be of type DBotScoreReliability enum')
self.indicator = indicator
self.indicator_type = indicator_type
# For integrations - The class will automatically determine the integration name.
if demisto.callingContext.get('integration'):
self.integration_name = get_integration_name()
else:
self.integration_name = integration_name
self.score = score
self.malicious_description = malicious_description
self.reliability = reliability
@staticmethod
def is_valid_score(score):
return score in (
Common.DBotScore.NONE,
Common.DBotScore.GOOD,
Common.DBotScore.SUSPICIOUS,
Common.DBotScore.BAD
)
@staticmethod
def get_context_path():
if is_demisto_version_ge('5.5.0'):
return Common.DBotScore.CONTEXT_PATH
else:
return Common.DBotScore.CONTEXT_PATH_PRIOR_V5_5
def to_context(self):
dbot_context = {
'Indicator': self.indicator,
'Type': self.indicator_type,
'Vendor': self.integration_name,
'Score': self.score
}
if self.reliability:
dbot_context['Reliability'] = self.reliability
ret_value = {
Common.DBotScore.get_context_path(): dbot_context
}
return ret_value
def to_readable(self):
dbot_score_to_text = {0: 'Unknown',
1: 'Good',
2: 'Suspicious',
3: 'Bad'}
return dbot_score_to_text.get(self.score, 'Undefined')
class CustomIndicator(Indicator):
def __init__(self, indicator_type, value, dbot_score, data, context_prefix):
"""
:type indicator_type: ``Str``
:param indicator_type: The name of the indicator type.
:type value: ``Any``
:param value: The value of the indicator.
:type dbot_score: ``DBotScore``
:param dbot_score: If custom indicator has a score then create and set a DBotScore object.
:type data: ``Dict(Str,Any)``
:param data: A dictionary containing all the param names and their values.
:type context_prefix: ``Str``
:param context_prefix: Will be used as the context path prefix.
:return: None
:rtype: ``None``
"""
if hasattr(DBotScoreType, indicator_type.upper()):
raise ValueError('Creating a custom indicator type with an existing type name is not allowed')
if not value:
raise ValueError('value is mandatory for creating the indicator')
if not context_prefix:
raise ValueError('context_prefix is mandatory for creating the indicator')
self.CONTEXT_PATH = '{context_prefix}(val.value && val.value == obj.value)'.\
format(context_prefix=context_prefix)
self.value = value
if not isinstance(dbot_score, Common.DBotScore):
raise ValueError('dbot_score must be of type DBotScore')
self.dbot_score = dbot_score
self.indicator_type = indicator_type
self.data = data
INDICATOR_TYPE_TO_CONTEXT_KEY[indicator_type.lower()] = indicator_type.capitalize()
for key in self.data:
setattr(self, key, data[key])
def to_context(self):
custom_context = {
'value': self.value
}
custom_context.update(self.data)
ret_value = {
self.CONTEXT_PATH: custom_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
ret_value[Common.DBotScore.get_context_path()]['Type'] = self.indicator_type
return ret_value
class IP(Indicator):
"""
IP indicator class - https://xsoar.pan.dev/docs/integrations/context-standards-mandatory#ip
:type ip: ``str``
:param ip: IP address
:type asn: ``str``
:param asn: The autonomous system name for the IP address, for example: "AS8948".
:type as_owner: ``str``
:param as_owner: The autonomous system owner of the IP.
:type region: ``str``
:param region: The region in which the IP is located.
:type port: ``str``
:param port: Ports that are associated with the IP.
:type internal: ``bool``
:param internal: Whether or not the IP is internal or external.
:type updated_date: ``date``
:param updated_date: The date that the IP was last updated.
:type registrar_abuse_name: ``str``
:param registrar_abuse_name: The name of the contact for reporting abuse.
:type registrar_abuse_address: ``str``
:param registrar_abuse_address: The address of the contact for reporting abuse.
:type registrar_abuse_country: ``str``
:param registrar_abuse_country: The country of the contact for reporting abuse.
:type registrar_abuse_network: ``str``
:param registrar_abuse_network: The network of the contact for reporting abuse.
:type registrar_abuse_phone: ``str``
:param registrar_abuse_phone: The phone number of the contact for reporting abuse.
:type registrar_abuse_email: ``str``
:param registrar_abuse_email: The email address of the contact for reporting abuse.
:type campaign: ``str``
:param campaign: The campaign associated with the IP.
:type traffic_light_protocol: ``str``
:param traffic_light_protocol: The Traffic Light Protocol (TLP) color that is suitable for the IP.
:type community_notes: ``CommunityNotes``
:param community_notes: Notes on the IP that were given by the community.
:type publications: ``Publications``
:param publications: Publications on the ip that was published.
:type threat_types: ``ThreatTypes``
:param threat_types: Threat types that are associated with the file.
:type hostname: ``str``
:param hostname: The hostname that is mapped to this IP address.
:type geo_latitude: ``str``
:param geo_latitude: The geolocation where the IP address is located, in the format: latitude
:type geo_longitude: ``str``
:param geo_longitude: The geolocation where the IP address is located, in the format: longitude.
:type geo_country: ``str``
:param geo_country: The country in which the IP address is located.
:type geo_description: ``str``
:param geo_description: Additional information about the location.
:type detection_engines: ``int``
:param detection_engines: The total number of engines that checked the indicator.
:type positive_engines: ``int``
:param positive_engines: The number of engines that positively detected the indicator as malicious.
:type organization_name: ``str``
:param organization_name: The organization of the IP
:type organization_type: ``str``
:param organization_type:The organization type of the IP
:type tags: ``str``
:param tags: Tags of the IP.
:type malware_family: ``str``
:param malware_family: The malware family associated with the IP.
:type feed_related_indicators: ``FeedRelatedIndicators``
:param feed_related_indicators: List of indicators that are associated with the IP.
:type relationships: ``list of EntityRelationship``
:param relationships: List of relationships of the indicator.
:type dbot_score: ``DBotScore``
:param dbot_score: If IP has a score then create and set a DBotScore object.
:return: None
:rtype: ``None``
"""
CONTEXT_PATH = 'IP(val.Address && val.Address == obj.Address)'
def __init__(self, ip, dbot_score, asn=None, as_owner=None, region=None, port=None, internal=None,
updated_date=None, registrar_abuse_name=None, registrar_abuse_address=None,
registrar_abuse_country=None, registrar_abuse_network=None, registrar_abuse_phone=None,
registrar_abuse_email=None, campaign=None, traffic_light_protocol=None,
community_notes=None, publications=None, threat_types=None,
hostname=None, geo_latitude=None, geo_longitude=None,
geo_country=None, geo_description=None, detection_engines=None, positive_engines=None,
organization_name=None, organization_type=None, feed_related_indicators=None, tags=None,
malware_family=None, relationships=None):
self.ip = ip
self.asn = asn
self.as_owner = as_owner
self.region = region
self.port = port
self.internal = internal
self.updated_date = updated_date
self.registrar_abuse_name = registrar_abuse_name
self.registrar_abuse_address = registrar_abuse_address
self.registrar_abuse_country = registrar_abuse_country
self.registrar_abuse_network = registrar_abuse_network
self.registrar_abuse_phone = registrar_abuse_phone
self.registrar_abuse_email = registrar_abuse_email
self.campaign = campaign
self.traffic_light_protocol = traffic_light_protocol
self.community_notes = community_notes
self.publications = publications
self.threat_types = threat_types
self.hostname = hostname
self.geo_latitude = geo_latitude
self.geo_longitude = geo_longitude
self.geo_country = geo_country
self.geo_description = geo_description
self.detection_engines = detection_engines
self.positive_engines = positive_engines
self.organization_name = organization_name
self.organization_type = organization_type
self.feed_related_indicators = feed_related_indicators
self.tags = tags
self.malware_family = malware_family
self.relationships = relationships
if not isinstance(dbot_score, Common.DBotScore):
raise ValueError('dbot_score must be of type DBotScore')
self.dbot_score = dbot_score
def to_context(self):
ip_context = {
'Address': self.ip
}
if self.asn:
ip_context['ASN'] = self.asn
if self.as_owner:
ip_context['ASOwner'] = self.as_owner
if self.region:
ip_context['Region'] = self.region
if self.port:
ip_context['Port'] = self.port
if self.internal:
ip_context['Internal'] = self.internal
if self.updated_date:
ip_context['UpdatedDate'] = self.updated_date
if self.registrar_abuse_name or self.registrar_abuse_address or self.registrar_abuse_country or \
self.registrar_abuse_network or self.registrar_abuse_phone or self.registrar_abuse_email:
ip_context['Registrar'] = {'Abuse': {}}
if self.registrar_abuse_name:
ip_context['Registrar']['Abuse']['Name'] = self.registrar_abuse_name
if self.registrar_abuse_address:
ip_context['Registrar']['Abuse']['Address'] = self.registrar_abuse_address
if self.registrar_abuse_country:
ip_context['Registrar']['Abuse']['Country'] = self.registrar_abuse_country
if self.registrar_abuse_network:
ip_context['Registrar']['Abuse']['Network'] = self.registrar_abuse_network
if self.registrar_abuse_phone:
ip_context['Registrar']['Abuse']['Phone'] = self.registrar_abuse_phone
if self.registrar_abuse_email:
ip_context['Registrar']['Abuse']['Email'] = self.registrar_abuse_email
if self.campaign:
ip_context['Campaign'] = self.campaign
if self.traffic_light_protocol:
ip_context['TrafficLightProtocol'] = self.traffic_light_protocol
if self.community_notes:
community_notes = []
for community_note in self.community_notes:
community_notes.append(community_note.to_context())
ip_context['CommunityNotes'] = community_notes
if self.publications:
publications = []
for publication in self.publications:
publications.append(publication.to_context())
ip_context['Publications'] = publications
if self.threat_types:
threat_types = []
for threat_type in self.threat_types:
threat_types.append(threat_type.to_context())
ip_context['ThreatTypes'] = threat_types
if self.hostname:
ip_context['Hostname'] = self.hostname
if self.geo_latitude or self.geo_country or self.geo_description:
ip_context['Geo'] = {}
if self.geo_latitude and self.geo_longitude:
ip_context['Geo']['Location'] = '{}:{}'.format(self.geo_latitude, self.geo_longitude)
if self.geo_country:
ip_context['Geo']['Country'] = self.geo_country
if self.geo_description:
ip_context['Geo']['Description'] = self.geo_description
if self.organization_name or self.organization_type:
ip_context['Organization'] = {}
if self.organization_name:
ip_context['Organization']['Name'] = self.organization_name
if self.organization_type:
ip_context['Organization']['Type'] = self.organization_type
if self.detection_engines is not None:
ip_context['DetectionEngines'] = self.detection_engines
if self.positive_engines is not None:
ip_context['PositiveDetections'] = self.positive_engines
if self.feed_related_indicators:
feed_related_indicators = []
for feed_related_indicator in self.feed_related_indicators:
feed_related_indicators.append(feed_related_indicator.to_context())
ip_context['FeedRelatedIndicators'] = feed_related_indicators
if self.tags:
ip_context['Tags'] = self.tags
if self.malware_family:
ip_context['MalwareFamily'] = self.malware_family
if self.dbot_score and self.dbot_score.score == Common.DBotScore.BAD:
ip_context['Malicious'] = {
'Vendor': self.dbot_score.integration_name,
'Description': self.dbot_score.malicious_description
}
if self.relationships:
relationships_context = [relationship.to_context() for relationship in self.relationships if
relationship.to_context()]
ip_context['Relationships'] = relationships_context
ret_value = {
Common.IP.CONTEXT_PATH: ip_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class FileSignature(object):
"""
FileSignature class
:type authentihash: ``str``
:param authentihash: The authentication hash.
:type copyright: ``str``
:param copyright: Copyright information.
:type description: ``str``
:param description: A description of the signature.
:type file_version: ``str``
:param file_version: The file version.
:type internal_name: ``str``
:param internal_name: The internal name of the file.
:type original_name: ``str``
:param original_name: The original name of the file.
:return: None
:rtype: ``None``
"""
def __init__(self, authentihash, copyright, description, file_version, internal_name, original_name):
self.authentihash = authentihash
self.copyright = copyright
self.description = description
self.file_version = file_version
self.internal_name = internal_name
self.original_name = original_name
def to_context(self):
return {
'Authentihash': self.authentihash,
'Copyright': self.copyright,
'Description': self.description,
'FileVersion': self.file_version,
'InternalName': self.internal_name,
'OriginalName': self.original_name,
}
class FeedRelatedIndicators(object):
"""
FeedRelatedIndicators class
Implements Subject Indicators that are associated with Another indicator
:type value: ``str``
:param value: Indicators that are associated with the indicator.
:type indicator_type: ``str``
:param indicator_type: The type of the indicators that are associated with the indicator.
:type description: ``str``
:param description: The description of the indicators that are associated with the indicator.
:return: None
:rtype: ``None``
"""
def __init__(self, value=None, indicator_type=None, description=None):
self.value = value
self.indicator_type = indicator_type
self.description = description
def to_context(self):
return {
'value': self.value,
'type': self.indicator_type,
'description': self.description
}
class CommunityNotes(object):
"""
CommunityNotes class
Implements Subject Community Notes of a indicator
:type note: ``str``
:param note: Notes on the indicator that were given by the community.
:type timestamp: ``Timestamp``
:param timestamp: The time in which the note was published.
:return: None
:rtype: ``None``
"""
def __init__(self, note=None, timestamp=None):
self.note = note
self.timestamp = timestamp
def to_context(self):
return {
'note': self.note,
'timestamp': self.timestamp,
}
class Publications(object):
"""
Publications class
Implements Subject Publications of a indicator
:type source: ``str``
:param source: The source in which the article was published.
:type title: ``str``
:param title: The name of the article.
:type link: ``str``
:param link: A link to the original article.
:type timestamp: ``Timestamp``
:param timestamp: The time in which the article was published.
:return: None
:rtype: ``None``
"""
def __init__(self, source=None, title=None, link=None, timestamp=None):
self.source = source
self.title = title
self.link = link
self.timestamp = timestamp
def to_context(self):
return {
'source': self.source,
'title': self.title,
'link': self.link,
'timestamp': self.timestamp,
}
class Behaviors(object):
"""
Behaviors class
Implements Subject Behaviors of a indicator
:type details: ``str``
:param details:
:type action: ``str``
:param action:
:return: None
:rtype: ``None``
"""
def __init__(self, details=None, action=None):
self.details = details
self.action = action
def to_context(self):
|
class ThreatTypes(object):
"""
ThreatTypes class
Implements Subject ThreatTypes of a indicator
:type threat_category: ``str``
:param threat_category: The threat category associated to this indicator by the source vendor. For example,
Phishing, Control, TOR, etc.
:type threat_category_confidence: ``str``
:param threat_category_confidence: Threat Category Confidence is the confidence level provided by the vendor
for the threat type category
For example a confidence of 90 for threat type category "malware" means that the vendor rates that this
is 90% confidence of being a malware.
:return: None
:rtype: ``None``
"""
def __init__(self, threat_category=None, threat_category_confidence=None):
self.threat_category = threat_category
self.threat_category_confidence = threat_category_confidence
def to_context(self):
return {
'threatcategory': self.threat_category,
'threatcategoryconfidence': self.threat_category_confidence,
}
class File(Indicator):
"""
File indicator class - https://xsoar.pan.dev/docs/integrations/context-standards-mandatory#file
:type name: ``str``
:param name: The full file name (including file extension).
:type entry_id: ``str``
:param entry_id: The ID for locating the file in the War Room.
:type size: ``int``
:param size: The size of the file in bytes.
:type md5: ``str``
:param md5: The MD5 hash of the file.
:type sha1: ``str``
:param sha1: The SHA1 hash of the file.
:type sha256: ``str``
:param sha256: The SHA256 hash of the file.
:type sha512: ``str``
:param sha512: The SHA512 hash of the file.
:type ssdeep: ``str``
:param ssdeep: The ssdeep hash of the file (same as displayed in file entries).
:type extension: ``str``
:param extension: The file extension, for example: "xls".
:type file_type: ``str``
:param file_type: The file type, as determined by libmagic (same as displayed in file entries).
:type hostname: ``str``
:param hostname: The name of the host where the file was found. Should match Path.
:type path: ``str``
:param path: The path where the file is located.
:type company: ``str``
:param company: The name of the company that released a binary.
:type product_name: ``str``
:param product_name: The name of the product to which this file belongs.
:type digital_signature__publisher: ``str``
:param digital_signature__publisher: The publisher of the digital signature for the file.
:type signature: ``FileSignature``
:param signature: File signature class
:type actor: ``str``
:param actor: The actor reference.
:type tags: ``str``
:param tags: Tags of the file.
:type feed_related_indicators: ``FeedRelatedIndicators``
:param feed_related_indicators: List of indicators that are associated with the file.
:type malware_family: ``str``
:param malware_family: The malware family associated with the File.
:type campaign: ``str``
:param campaign:
:type traffic_light_protocol: ``str``
:param traffic_light_protocol:
:type community_notes: ``CommunityNotes``
:param community_notes: Notes on the file that were given by the community.
:type publications: ``Publications``
:param publications: Publications on the file that was published.
:type threat_types: ``ThreatTypes``
:param threat_types: Threat types that are associated with the file.
:type imphash: ``str``
:param imphash: The Imphash hash of the file.
:type quarantined: ``bool``
:param quarantined: Is the file quarantined or not.
:type organization: ``str``
:param organization: The organization of the file.
:type associated_file_names: ``str``
:param associated_file_names: File names that are known as associated to the file.
:type behaviors: ``Behaviors``
:param behaviors: list of behaviors associated with the file.
:type relationships: ``list of EntityRelationship``
:param relationships: List of relationships of the indicator.
:type dbot_score: ``DBotScore``
:param dbot_score: If file has a score then create and set a DBotScore object
:rtype: ``None``
:return: None
"""
CONTEXT_PATH = 'File(val.MD5 && val.MD5 == obj.MD5 || val.SHA1 && val.SHA1 == obj.SHA1 || ' \
'val.SHA256 && val.SHA256 == obj.SHA256 || val.SHA512 && val.SHA512 == obj.SHA512 || ' \
'val.CRC32 && val.CRC32 == obj.CRC32 || val.CTPH && val.CTPH == obj.CTPH || ' \
'val.SSDeep && val.SSDeep == obj.SSDeep)'
def __init__(self, dbot_score, name=None, entry_id=None, size=None, md5=None, sha1=None, sha256=None,
sha512=None, ssdeep=None, extension=None, file_type=None, hostname=None, path=None, company=None,
product_name=None, digital_signature__publisher=None, signature=None, actor=None, tags=None,
feed_related_indicators=None, malware_family=None, imphash=None, quarantined=None, campaign=None,
associated_file_names=None, traffic_light_protocol=None, organization=None, community_notes=None,
publications=None, threat_types=None, behaviors=None, relationships=None):
self.name = name
self.entry_id = entry_id
self.size = size
self.md5 = md5
self.sha1 = sha1
self.sha256 = sha256
self.sha512 = sha512
self.ssdeep = ssdeep
self.extension = extension
self.file_type = file_type
self.hostname = hostname
self.path = path
self.company = company
self.product_name = product_name
self.digital_signature__publisher = digital_signature__publisher
self.signature = signature
self.actor = actor
self.tags = tags
self.feed_related_indicators = feed_related_indicators
self.malware_family = malware_family
self.campaign = campaign
self.traffic_light_protocol = traffic_light_protocol
self.community_notes = community_notes
self.publications = publications
self.threat_types = threat_types
self.imphash = imphash
self.quarantined = quarantined
self.organization = organization
self.associated_file_names = associated_file_names
self.behaviors = behaviors
self.relationships = relationships
self.dbot_score = dbot_score
def to_context(self):
file_context = {}
if self.name:
file_context['Name'] = self.name
if self.entry_id:
file_context['EntryID'] = self.entry_id
if self.size:
file_context['Size'] = self.size
if self.md5:
file_context['MD5'] = self.md5
if self.sha1:
file_context['SHA1'] = self.sha1
if self.sha256:
file_context['SHA256'] = self.sha256
if self.sha512:
file_context['SHA512'] = self.sha512
if self.ssdeep:
file_context['SSDeep'] = self.ssdeep
if self.extension:
file_context['Extension'] = self.extension
if self.file_type:
file_context['Type'] = self.file_type
if self.hostname:
file_context['Hostname'] = self.hostname
if self.path:
file_context['Path'] = self.path
if self.company:
file_context['Company'] = self.company
if self.product_name:
file_context['ProductName'] = self.product_name
if self.digital_signature__publisher:
file_context['DigitalSignature'] = {
'Published': self.digital_signature__publisher
}
if self.signature:
file_context['Signature'] = self.signature.to_context()
if self.actor:
file_context['Actor'] = self.actor
if self.tags:
file_context['Tags'] = self.tags
if self.feed_related_indicators:
feed_related_indicators = []
for feed_related_indicator in self.feed_related_indicators:
feed_related_indicators.append(feed_related_indicator.to_context())
file_context['FeedRelatedIndicators'] = feed_related_indicators
if self.malware_family:
file_context['MalwareFamily'] = self.malware_family
if self.campaign:
file_context['Campaign'] = self.campaign
if self.traffic_light_protocol:
file_context['TrafficLightProtocol'] = self.traffic_light_protocol
if self.community_notes:
community_notes = []
for community_note in self.community_notes:
community_notes.append(community_note.to_context())
file_context['CommunityNotes'] = community_notes
if self.publications:
publications = []
for publication in self.publications:
publications.append(publication.to_context())
file_context['Publications'] = publications
if self.threat_types:
threat_types = []
for threat_type in self.threat_types:
threat_types.append(threat_type.to_context())
file_context['ThreatTypes'] = threat_types
if self.imphash:
file_context['Imphash'] = self.imphash
if self.quarantined:
file_context['Quarantined'] = self.quarantined
if self.organization:
file_context['Organization'] = self.organization
if self.associated_file_names:
file_context['AssociatedFileNames'] = self.associated_file_names
if self.behaviors:
behaviors = []
for behavior in self.behaviors:
behaviors.append(behavior.to_context())
file_context['Behavior'] = behaviors
if self.dbot_score and self.dbot_score.score == Common.DBotScore.BAD:
file_context['Malicious'] = {
'Vendor': self.dbot_score.integration_name,
'Description': self.dbot_score.malicious_description
}
if self.relationships:
relationships_context = [relationship.to_context() for relationship in self.relationships if
relationship.to_context()]
file_context['Relationships'] = relationships_context
ret_value = {
Common.File.CONTEXT_PATH: file_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class CVE(Indicator):
"""
CVE indicator class - https://xsoar.pan.dev/docs/integrations/context-standards-mandatory#cve
:type id: ``str``
:param id: The ID of the CVE, for example: "CVE-2015-1653".
:type cvss: ``str``
:param cvss: The CVSS of the CVE, for example: "10.0".
:type published: ``str``
:param published: The timestamp of when the CVE was published.
:type modified: ``str``
:param modified: The timestamp of when the CVE was last modified.
:type description: ``str``
:param description: A description of the CVE.
:type relationships: ``list of EntityRelationship``
:param relationships: List of relationships of the indicator.
:return: None
:rtype: ``None``
"""
CONTEXT_PATH = 'CVE(val.ID && val.ID == obj.ID)'
def __init__(self, id, cvss, published, modified, description, relationships=None):
# type (str, str, str, str, str) -> None
self.id = id
self.cvss = cvss
self.published = published
self.modified = modified
self.description = description
self.dbot_score = Common.DBotScore(
indicator=id,
indicator_type=DBotScoreType.CVE,
integration_name=None,
score=Common.DBotScore.NONE
)
self.relationships = relationships
def to_context(self):
cve_context = {
'ID': self.id
}
if self.cvss:
cve_context['CVSS'] = self.cvss
if self.published:
cve_context['Published'] = self.published
if self.modified:
cve_context['Modified'] = self.modified
if self.description:
cve_context['Description'] = self.description
if self.relationships:
relationships_context = [relationship.to_context() for relationship in self.relationships if
relationship.to_context()]
cve_context['Relationships'] = relationships_context
ret_value = {
Common.CVE.CONTEXT_PATH: cve_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class EMAIL(Indicator):
"""
EMAIL indicator class
:type address ``str``
:param address: The email's address.
:type domain: ``str``
:param domain: The domain of the Email.
:type blocked: ``bool``
:param blocked: Whether the email address is blocked.
:type relationships: ``list of EntityRelationship``
:param relationships: List of relationships of the indicator.
:return: None
:rtype: ``None``
"""
CONTEXT_PATH = 'Email(val.Address && val.Address == obj.Address)'
def __init__(self, address, dbot_score, domain=None, blocked=None, relationships=None):
# type (str, str, bool) -> None
self.address = address
self.domain = domain
self.blocked = blocked
self.dbot_score = dbot_score
self.relationships = relationships
def to_context(self):
email_context = {
'Address': self.address
}
if self.domain:
email_context['Domain'] = self.domain
if self.blocked:
email_context['Blocked'] = self.blocked
if self.relationships:
relationships_context = [relationship.to_context() for relationship in self.relationships if
relationship.to_context()]
email_context['Relationships'] = relationships_context
ret_value = {
Common.EMAIL.CONTEXT_PATH: email_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class URL(Indicator):
"""
URL indicator - https://xsoar.pan.dev/docs/integrations/context-standards-mandatory#url
:type url: ``str``
:param url: The URL
:type detection_engines: ``int``
:param detection_engines: The total number of engines that checked the indicator.
:type positive_detections: ``int``
:param positive_detections: The number of engines that positively detected the indicator as malicious.
:type category: ``str``
:param category: The category associated with the indicator.
:type feed_related_indicators: ``FeedRelatedIndicators``
:param feed_related_indicators: List of indicators that are associated with the URL.
:type malware_family: ``str``
:param malware_family: The malware family associated with the URL.
:type tags: ``str``
:param tags: Tags of the URL.
:type port: ``str``
:param port: Ports that are associated with the URL.
:type internal: ``bool``
:param internal: Whether or not the URL is internal or external.
:type campaign: ``str``
:param campaign: The campaign associated with the URL.
:type traffic_light_protocol: ``str``
:param traffic_light_protocol: The Traffic Light Protocol (TLP) color that is suitable for the URL.
:type threat_types: ``ThreatTypes``
:param threat_types: Threat types that are associated with the file.
:type asn: ``str``
:param asn: The autonomous system name for the URL, for example: 'AS8948'.
:type as_owner: ``str``
:param as_owner: The autonomous system owner of the URL.
:type geo_country: ``str``
:param geo_country: The country in which the URL is located.
:type organization: ``str``
:param organization: The organization of the URL.
:type community_notes: ``CommunityNotes``
:param community_notes: List of notes on the URL that were given by the community.
:type publications: ``Publications``
:param publications: List of publications on the URL that was published.
:type relationships: ``list of EntityRelationship``
:param relationships: List of relationships of the indicator.
:type dbot_score: ``DBotScore``
:param dbot_score: If URL has reputation then create DBotScore object
:return: None
:rtype: ``None``
"""
CONTEXT_PATH = 'URL(val.Data && val.Data == obj.Data)'
def __init__(self, url, dbot_score, detection_engines=None, positive_detections=None, category=None,
feed_related_indicators=None, tags=None, malware_family=None, port=None, internal=None,
campaign=None, traffic_light_protocol=None, threat_types=None, asn=None, as_owner=None,
geo_country=None, organization=None, community_notes=None, publications=None, relationships=None):
self.url = url
self.detection_engines = detection_engines
self.positive_detections = positive_detections
self.category = category
self.feed_related_indicators = feed_related_indicators
self.tags = tags
self.malware_family = malware_family
self.port = port
self.internal = internal
self.campaign = campaign
self.traffic_light_protocol = traffic_light_protocol
self.threat_types = threat_types
self.asn = asn
self.as_owner = as_owner
self.geo_country = geo_country
self.organization = organization
self.community_notes = community_notes
self.publications = publications
self.relationships = relationships
self.dbot_score = dbot_score
def to_context(self):
url_context = {
'Data': self.url
}
if self.detection_engines is not None:
url_context['DetectionEngines'] = self.detection_engines
if self.positive_detections is not None:
url_context['PositiveDetections'] = self.positive_detections
if self.category:
url_context['Category'] = self.category
if self.feed_related_indicators:
feed_related_indicators = []
for feed_related_indicator in self.feed_related_indicators:
feed_related_indicators.append(feed_related_indicator.to_context())
url_context['FeedRelatedIndicators'] = feed_related_indicators
if self.tags:
url_context['Tags'] = self.tags
if self.malware_family:
url_context['MalwareFamily'] = self.malware_family
if self.port:
url_context['Port'] = self.port
if self.internal:
url_context['Internal'] = self.internal
if self.campaign:
url_context['Campaign'] = self.campaign
if self.traffic_light_protocol:
url_context['TrafficLightProtocol'] = self.traffic_light_protocol
if self.threat_types:
threat_types = []
for threat_type in self.threat_types:
threat_types.append(threat_type.to_context())
url_context['ThreatTypes'] = threat_types
if self.asn:
url_context['ASN'] = self.asn
if self.as_owner:
url_context['ASOwner'] = self.as_owner
if self.geo_country:
url_context['Geo'] = {'Country': self.geo_country}
if self.organization:
url_context['Organization'] = self.organization
if self.community_notes:
community_notes = []
for community_note in self.community_notes:
community_notes.append(community_note.to_context())
url_context['CommunityNotes'] = community_notes
if self.publications:
publications = []
for publication in self.publications:
publications.append(publication.to_context())
url_context['Publications'] = publications
if self.dbot_score and self.dbot_score.score == Common.DBotScore.BAD:
url_context['Malicious'] = {
'Vendor': self.dbot_score.integration_name,
'Description': self.dbot_score.malicious_description
}
if self.relationships:
relationships_context = [relationship.to_context() for relationship in self.relationships if
relationship.to_context()]
url_context['Relationships'] = relationships_context
ret_value = {
Common.URL.CONTEXT_PATH: url_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class Domain(Indicator):
""" ignore docstring
Domain indicator - https://xsoar.pan.dev/docs/integrations/context-standards-mandatory#domain
"""
CONTEXT_PATH = 'Domain(val.Name && val.Name == obj.Name)'
def __init__(self, domain, dbot_score, dns=None, detection_engines=None, positive_detections=None,
organization=None, sub_domains=None, creation_date=None, updated_date=None, expiration_date=None,
domain_status=None, name_servers=None, feed_related_indicators=None, malware_family=None,
registrar_name=None, registrar_abuse_email=None, registrar_abuse_phone=None,
registrant_name=None, registrant_email=None, registrant_phone=None, registrant_country=None,
admin_name=None, admin_email=None, admin_phone=None, admin_country=None, tags=None,
domain_idn_name=None, port=None,
internal=None, category=None, campaign=None, traffic_light_protocol=None, threat_types=None,
community_notes=None, publications=None, geo_location=None, geo_country=None,
geo_description=None, tech_country=None, tech_name=None, tech_email=None, tech_organization=None,
billing=None, relationships=None):
self.domain = domain
self.dns = dns
self.detection_engines = detection_engines
self.positive_detections = positive_detections
self.organization = organization
self.sub_domains = sub_domains
self.creation_date = creation_date
self.updated_date = updated_date
self.expiration_date = expiration_date
self.registrar_name = registrar_name
self.registrar_abuse_email = registrar_abuse_email
self.registrar_abuse_phone = registrar_abuse_phone
self.registrant_name = registrant_name
self.registrant_email = registrant_email
self.registrant_phone = registrant_phone
self.registrant_country = registrant_country
self.admin_name = admin_name
self.admin_email = admin_email
self.admin_phone = admin_phone
self.admin_country = admin_country
self.tags = tags
self.domain_status = domain_status
self.name_servers = name_servers
self.feed_related_indicators = feed_related_indicators
self.malware_family = malware_family
self.domain_idn_name = domain_idn_name
self.port = port
self.internal = internal
self.category = category
self.campaign = campaign
self.traffic_light_protocol = traffic_light_protocol
self.threat_types = threat_types
self.community_notes = community_notes
self.publications = publications
self.geo_location = geo_location
self.geo_country = geo_country
self.geo_description = geo_description
self.tech_country = tech_country
self.tech_name = tech_name
self.tech_organization = tech_organization
self.tech_email = tech_email
self.billing = billing
self.relationships = relationships
self.dbot_score = dbot_score
def to_context(self):
domain_context = {
'Name': self.domain
}
whois_context = {}
if self.dns:
domain_context['DNS'] = self.dns
if self.detection_engines is not None:
domain_context['DetectionEngines'] = self.detection_engines
if self.positive_detections is not None:
domain_context['PositiveDetections'] = self.positive_detections
if self.registrar_name or self.registrar_abuse_email or self.registrar_abuse_phone:
domain_context['Registrar'] = {
'Name': self.registrar_name,
'AbuseEmail': self.registrar_abuse_email,
'AbusePhone': self.registrar_abuse_phone
}
whois_context['Registrar'] = domain_context['Registrar']
if self.registrant_name or self.registrant_phone or self.registrant_email or self.registrant_country:
domain_context['Registrant'] = {
'Name': self.registrant_name,
'Email': self.registrant_email,
'Phone': self.registrant_phone,
'Country': self.registrant_country
}
whois_context['Registrant'] = domain_context['Registrant']
if self.admin_name or self.admin_email or self.admin_phone or self.admin_country:
domain_context['Admin'] = {
'Name': self.admin_name,
'Email': self.admin_email,
'Phone': self.admin_phone,
'Country': self.admin_country
}
whois_context['Admin'] = domain_context['Admin']
if self.organization:
domain_context['Organization'] = self.organization
if self.sub_domains:
domain_context['Subdomains'] = self.sub_domains
if self.domain_status:
domain_context['DomainStatus'] = self.domain_status
whois_context['DomainStatus'] = domain_context['DomainStatus']
if self.creation_date:
domain_context['CreationDate'] = self.creation_date
whois_context['CreationDate'] = domain_context['CreationDate']
if self.updated_date:
domain_context['UpdatedDate'] = self.updated_date
whois_context['UpdatedDate'] = domain_context['UpdatedDate']
if self.expiration_date:
domain_context['ExpirationDate'] = self.expiration_date
whois_context['ExpirationDate'] = domain_context['ExpirationDate']
if self.name_servers:
domain_context['NameServers'] = self.name_servers
whois_context['NameServers'] = domain_context['NameServers']
if self.tags:
domain_context['Tags'] = self.tags
if self.feed_related_indicators:
feed_related_indicators = []
for feed_related_indicator in self.feed_related_indicators:
feed_related_indicators.append(feed_related_indicator.to_context())
domain_context['FeedRelatedIndicators'] = feed_related_indicators
if self.malware_family:
domain_context['MalwareFamily'] = self.malware_family
if self.dbot_score and self.dbot_score.score == Common.DBotScore.BAD:
domain_context['Malicious'] = {
'Vendor': self.dbot_score.integration_name,
'Description': self.dbot_score.malicious_description
}
if self.domain_idn_name:
domain_context['DomainIDNName'] = self.domain_idn_name
if self.port:
domain_context['Port'] = self.port
if self.internal:
domain_context['Internal'] = self.internal
if self.category:
domain_context['Category'] = self.category
if self.campaign:
domain_context['Campaign'] = self.campaign
if self.traffic_light_protocol:
domain_context['TrafficLightProtocol'] = self.traffic_light_protocol
if self.threat_types:
threat_types = []
for threat_type in self.threat_types:
threat_types.append(threat_type.to_context())
domain_context['ThreatTypes'] = threat_types
if self.community_notes:
community_notes = []
for community_note in self.community_notes:
community_notes.append(community_note.to_context())
domain_context['CommunityNotes'] = community_notes
if self.publications:
publications = []
for publication in self.publications:
publications.append(publication.to_context())
domain_context['Publications'] = publications
if self.geo_location or self.geo_country or self.geo_description:
domain_context['Geo'] = {}
if self.geo_location:
domain_context['Geo']['Location'] = self.geo_location
if self.geo_country:
domain_context['Geo']['Country'] = self.geo_country
if self.geo_description:
domain_context['Geo']['Description'] = self.geo_description
if self.tech_country or self.tech_name or self.tech_organization or self.tech_email:
domain_context['Tech'] = {}
if self.tech_country:
domain_context['Tech']['Country'] = self.tech_country
if self.tech_name:
domain_context['Tech']['Name'] = self.tech_name
if self.tech_organization:
domain_context['Tech']['Organization'] = self.tech_organization
if self.tech_email:
domain_context['Tech']['Email'] = self.tech_email
if self.billing:
domain_context['Billing'] = self.billing
if whois_context:
domain_context['WHOIS'] = whois_context
if self.relationships:
relationships_context = [relationship.to_context() for relationship in self.relationships if
relationship.to_context()]
domain_context['Relationships'] = relationships_context
ret_value = {
Common.Domain.CONTEXT_PATH: domain_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class Endpoint(Indicator):
""" ignore docstring
Endpoint indicator - https://xsoar.pan.dev/docs/integrations/context-standards-mandatory#endpoint
"""
CONTEXT_PATH = 'Endpoint(val.ID && val.ID == obj.ID)'
def __init__(self, id, hostname=None, ip_address=None, domain=None, mac_address=None,
os=None, os_version=None, dhcp_server=None, bios_version=None, model=None,
memory=None, processors=None, processor=None, relationships=None, vendor=None, status=None,
is_isolated=None):
self.id = id
self.hostname = hostname
self.ip_address = ip_address
self.domain = domain
self.mac_address = mac_address
self.os = os
self.os_version = os_version
self.dhcp_server = dhcp_server
self.bios_version = bios_version
self.model = model
self.memory = memory
self.processors = processors
self.processor = processor
self.vendor = vendor
self.status = status
self.is_isolated = is_isolated
self.relationships = relationships
def to_context(self):
endpoint_context = {
'ID': self.id
}
if self.hostname:
endpoint_context['Hostname'] = self.hostname
if self.ip_address:
endpoint_context['IPAddress'] = self.ip_address
if self.domain:
endpoint_context['Domain'] = self.domain
if self.mac_address:
endpoint_context['MACAddress'] = self.mac_address
if self.os:
endpoint_context['OS'] = self.os
if self.os_version:
endpoint_context['OSVersion'] = self.os_version
if self.dhcp_server:
endpoint_context['DHCPServer'] = self.dhcp_server
if self.bios_version:
endpoint_context['BIOSVersion'] = self.bios_version
if self.model:
endpoint_context['Model'] = self.model
if self.memory:
endpoint_context['Memory'] = self.memory
if self.processors:
endpoint_context['Processors'] = self.processors
if self.processor:
endpoint_context['Processor'] = self.processor
if self.relationships:
relationships_context = [relationship.to_context() for relationship in self.relationships if
relationship.to_context()]
endpoint_context['Relationships'] = relationships_context
if self.vendor:
endpoint_context['Vendor'] = self.vendor
if self.status:
if self.status not in ENDPOINT_STATUS_OPTIONS:
raise ValueError('Status does not have a valid value such as: Online or Offline')
endpoint_context['Status'] = self.status
if self.is_isolated:
if self.is_isolated not in ENDPOINT_ISISOLATED_OPTIONS:
raise ValueError('Is Isolated does not have a valid value such as: Yes, No, Pending'
' isolation or Pending unisolation')
endpoint_context['IsIsolated'] = self.is_isolated
ret_value = {
Common.Endpoint.CONTEXT_PATH: endpoint_context
}
return ret_value
class Account(Indicator):
"""
Account indicator - https://xsoar.pan.dev/docs/integrations/context-standards-recommended#account
:type dbot_score: ``DBotScore``
:param dbot_score: If account has reputation then create DBotScore object
:return: None
:rtype: ``None``
"""
CONTEXT_PATH = 'Account(val.id && val.id == obj.id)'
def __init__(self, id, type=None, username=None, display_name=None, groups=None,
domain=None, email_address=None, telephone_number=None, office=None, job_title=None,
department=None, country=None, state=None, city=None, street=None, is_enabled=None,
dbot_score=None, relationships=None):
self.id = id
self.type = type
self.username = username
self.display_name = display_name
self.groups = groups
self.domain = domain
self.email_address = email_address
self.telephone_number = telephone_number
self.office = office
self.job_title = job_title
self.department = department
self.country = country
self.state = state
self.city = city
self.street = street
self.is_enabled = is_enabled
self.relationships = relationships
if not isinstance(dbot_score, Common.DBotScore):
raise ValueError('dbot_score must be of type DBotScore')
self.dbot_score = dbot_score
def to_context(self):
account_context = {
'Id': self.id
}
if self.type:
account_context['Type'] = self.type
irrelevent = ['CONTEXT_PATH', 'to_context', 'dbot_score', 'Id']
details = [detail for detail in dir(self) if not detail.startswith('__') and detail not in irrelevent]
for detail in details:
if self.__getattribute__(detail):
if detail == 'email_address':
account_context['Email'] = {
'Address': self.email_address
}
else:
Detail = camelize_string(detail, '_')
account_context[Detail] = self.__getattribute__(detail)
if self.dbot_score and self.dbot_score.score == Common.DBotScore.BAD:
account_context['Malicious'] = {
'Vendor': self.dbot_score.integration_name,
'Description': self.dbot_score.malicious_description
}
if self.relationships:
relationships_context = [relationship.to_context() for relationship in self.relationships if
relationship.to_context()]
account_context['Relationships'] = relationships_context
ret_value = {
Common.Account.CONTEXT_PATH: account_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class Cryptocurrency(Indicator):
"""
Cryptocurrency indicator - https://xsoar.pan.dev/docs/integrations/context-standards-mandatory#cryptocurrency
:type address: ``str``
:param address: The Cryptocurrency address
:type address_type: ``str``
:param address_type: The Cryptocurrency type - e.g. `bitcoin`.
:type dbot_score: ``DBotScore``
:param dbot_score: If the address has reputation then create DBotScore object.
:return: None
:rtype: ``None``
"""
CONTEXT_PATH = 'Cryptocurrency(val.Address && val.Address == obj.Address)'
def __init__(self, address, address_type, dbot_score):
self.address = address
self.address_type = address_type
self.dbot_score = dbot_score
def to_context(self):
crypto_context = {
'Address': self.address,
'AddressType': self.address_type
}
if self.dbot_score and self.dbot_score.score == Common.DBotScore.BAD:
crypto_context['Malicious'] = {
'Vendor': self.dbot_score.integration_name,
'Description': self.dbot_score.malicious_description
}
ret_value = {
Common.Cryptocurrency.CONTEXT_PATH: crypto_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class AttackPattern(Indicator):
"""
Attack Pattern indicator
:type stix_id: ``str``
:param stix_id: The Attack Pattern STIX ID
:type kill_chain_phases: ``str``
:param kill_chain_phases: The Attack Pattern kill chain phases.
:type first_seen_by_source: ``str``
:param first_seen_by_source: The Attack Pattern first seen by source
:type description: ``str``
:param description: The Attack Pattern description
:type operating_system_refs: ``str``
:param operating_system_refs: The operating system refs of the Attack Pattern.
:type publications: ``str``
:param publications: The Attack Pattern publications
:type mitre_id: ``str``
:param mitre_id: The Attack Pattern kill mitre id.
:type tags: ``str``
:param tags: The Attack Pattern kill tags.
:type dbot_score: ``DBotScore``
:param dbot_score: If the address has reputation then create DBotScore object.
:return: None
:rtype: ``None``
"""
CONTEXT_PATH = 'AttackPattern(val.value && val.value == obj.value)'
def __init__(self, stix_id, kill_chain_phases, first_seen_by_source, description,
operating_system_refs, publications, mitre_id, tags, dbot_score):
self.stix_id = stix_id
self.kill_chain_phases = kill_chain_phases
self.first_seen_by_source = first_seen_by_source
self.description = description
self.operating_system_refs = operating_system_refs
self.publications = publications
self.mitre_id = mitre_id
self.tags = tags
self.dbot_score = dbot_score
def to_context(self):
attack_pattern_context = {
'STIXID': self.stix_id,
"KillChainPhases": self.kill_chain_phases,
"FirstSeenBySource": self.first_seen_by_source,
'OperatingSystemRefs': self.operating_system_refs,
"Publications": self.publications,
"MITREID": self.mitre_id,
"Tags": self.tags,
"Description": self.description
}
if self.dbot_score and self.dbot_score.score == Common.DBotScore.BAD:
attack_pattern_context['Malicious'] = {
'Vendor': self.dbot_score.integration_name,
'Description': self.dbot_score.malicious_description
}
ret_value = {
Common.AttackPattern.CONTEXT_PATH: attack_pattern_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class CertificatePublicKey(object):
"""
CertificatePublicKey class
Defines an X509 PublicKey used in Common.Certificate
:type algorithm: ``str``
:param algorithm: The encryption algorithm: DSA, RSA, EC or UNKNOWN (Common.CertificatePublicKey.Algorithm enum)
:type length: ``int``
:param length: The length of the public key
:type publickey: ``Optional[str]``
:param publickey: publickey
:type p: ``Optional[str]``
:param p: P parameter used in DSA algorithm
:type q: ``Optional[str]``
:param q: Q parameter used in DSA algorithm
:type g: ``Optional[str]``
:param g: G parameter used in DSA algorithm
:type modulus: ``Optional[str]``
:param modulus: modulus parameter used in RSA algorithm
:type modulus: ``Optional[int]``
:param modulus: exponent parameter used in RSA algorithm
:type x: ``Optional[str]``
:param x: X parameter used in EC algorithm
:type y: ``Optional[str]``
:param y: Y parameter used in EC algorithm
:type curve: ``Optional[str]``
:param curve: curve parameter used in EC algorithm
:return: None
:rtype: ``None``
"""
class Algorithm(object):
"""
Algorithm class to enumerate available algorithms
:return: None
:rtype: ``None``
"""
DSA = "DSA"
RSA = "RSA"
EC = "EC"
UNKNOWN = "Unknown Algorithm"
@staticmethod
def is_valid_type(_type):
return _type in (
Common.CertificatePublicKey.Algorithm.DSA,
Common.CertificatePublicKey.Algorithm.RSA,
Common.CertificatePublicKey.Algorithm.EC,
Common.CertificatePublicKey.Algorithm.UNKNOWN
)
def __init__(
self,
algorithm, # type: str
length, # type: int
publickey=None, # type: str
p=None, # type: str
q=None, # type: str
g=None, # type: str
modulus=None, # type: str
exponent=None, # type: int
x=None, # type: str
y=None, # type: str
curve=None # type: str
):
if not Common.CertificatePublicKey.Algorithm.is_valid_type(algorithm):
raise TypeError('algorithm must be of type Common.CertificatePublicKey.Algorithm enum')
self.algorithm = algorithm
self.length = length
self.publickey = publickey
self.p = p
self.q = q
self.g = g
self.modulus = modulus
self.exponent = exponent
self.x = x
self.y = y
self.curve = curve
def to_context(self):
publickey_context = {
'Algorithm': self.algorithm,
'Length': self.length
}
if self.publickey:
publickey_context['PublicKey'] = self.publickey
if self.algorithm == Common.CertificatePublicKey.Algorithm.DSA:
if self.p:
publickey_context['P'] = self.p
if self.q:
publickey_context['Q'] = self.q
if self.g:
publickey_context['G'] = self.g
elif self.algorithm == Common.CertificatePublicKey.Algorithm.RSA:
if self.modulus:
publickey_context['Modulus'] = self.modulus
if self.exponent:
publickey_context['Exponent'] = self.exponent
elif self.algorithm == Common.CertificatePublicKey.Algorithm.EC:
if self.x:
publickey_context['X'] = self.x
if self.y:
publickey_context['Y'] = self.y
if self.curve:
publickey_context['Curve'] = self.curve
elif self.algorithm == Common.CertificatePublicKey.Algorithm.UNKNOWN:
pass
return publickey_context
class GeneralName(object):
"""
GeneralName class
Implements GeneralName interface from rfc5280
Enumerates the available General Name Types
:type gn_type: ``str``
:param gn_type: General Name Type
:type gn_value: ``str``
:param gn_value: General Name Value
:return: None
:rtype: ``None``
"""
OTHERNAME = 'otherName'
RFC822NAME = 'rfc822Name'
DNSNAME = 'dNSName'
DIRECTORYNAME = 'directoryName'
UNIFORMRESOURCEIDENTIFIER = 'uniformResourceIdentifier'
IPADDRESS = 'iPAddress'
REGISTEREDID = 'registeredID'
@staticmethod
def is_valid_type(_type):
return _type in (
Common.GeneralName.OTHERNAME,
Common.GeneralName.RFC822NAME,
Common.GeneralName.DNSNAME,
Common.GeneralName.DIRECTORYNAME,
Common.GeneralName.UNIFORMRESOURCEIDENTIFIER,
Common.GeneralName.IPADDRESS,
Common.GeneralName.REGISTEREDID
)
def __init__(
self,
gn_value, # type: str
gn_type # type: str
):
if not Common.GeneralName.is_valid_type(gn_type):
raise TypeError(
'gn_type must be of type Common.GeneralName enum'
)
self.gn_type = gn_type
self.gn_value = gn_value
def to_context(self):
return {
'Type': self.gn_type,
'Value': self.gn_value
}
def get_value(self):
return self.gn_value
class CertificateExtension(object):
"""
CertificateExtension class
Defines an X509 Certificate Extensions used in Common.Certificate
:type extension_type: ``str``
:param extension_type: The type of Extension (from Common.CertificateExtension.ExtensionType enum, or "Other)
:type critical: ``bool``
:param critical: Whether the extension is marked as critical
:type extension_name: ``Optional[str]``
:param extension_name: Name of the extension
:type oid: ``Optional[str]``
:param oid: OID of the extension
:type subject_alternative_names: ``Optional[List[Common.CertificateExtension.SubjectAlternativeName]]``
:param subject_alternative_names: Subject Alternative Names
:type authority_key_identifier: ``Optional[Common.CertificateExtension.AuthorityKeyIdentifier]``
:param authority_key_identifier: Authority Key Identifier
:type digest: ``Optional[str]``
:param digest: digest for Subject Key Identifier extension
:type digital_signature: ``Optional[bool]``
:param digital_signature: Digital Signature usage for Key Usage extension
:type content_commitment: ``Optional[bool]``
:param content_commitment: Content Commitment usage for Key Usage extension
:type key_encipherment: ``Optional[bool]``
:param key_encipherment: Key Encipherment usage for Key Usage extension
:type data_encipherment: ``Optional[bool]``
:param data_encipherment: Data Encipherment usage for Key Usage extension
:type key_agreement: ``Optional[bool]``
:param key_agreement: Key Agreement usage for Key Usage extension
:type key_cert_sign: ``Optional[bool]``
:param key_cert_sign: Key Cert Sign usage for Key Usage extension
:type usages: ``Optional[List[str]]``
:param usages: Usages for Extended Key Usage extension
:type distribution_points: ``Optional[List[Common.CertificateExtension.DistributionPoint]]``
:param distribution_points: Distribution Points
:type certificate_policies: ``Optional[List[Common.CertificateExtension.CertificatePolicy]]``
:param certificate_policies: Certificate Policies
:type authority_information_access: ``Optional[List[Common.CertificateExtension.AuthorityInformationAccess]]``
:param authority_information_access: Authority Information Access
:type basic_constraints: ``Optional[Common.CertificateExtension.BasicConstraints]``
:param basic_constraints: Basic Constraints
:type signed_certificate_timestamps: ``Optional[List[Common.CertificateExtension.SignedCertificateTimestamp]]``
:param signed_certificate_timestamps: (PreCertificate)Signed Certificate Timestamps
:type value: ``Optional[Union[str, List[Any], Dict[str, Any]]]``
:param value: Raw value of the Extension (used for "Other" type)
:return: None
:rtype: ``None``
"""
class SubjectAlternativeName(object):
"""
SubjectAlternativeName class
Implements Subject Alternative Name extension interface
:type gn: ``Optional[Common.GeneralName]``
:param gn: General Name Type provided as Common.GeneralName
:type gn_type: ``Optional[str]``
:param gn_type: General Name Type provided as string
:type gn_value: ``Optional[str]``
:param gn_value: General Name Value provided as string
:return: None
:rtype: ``None``
"""
def __init__(
self,
gn=None, # type: Optional[Common.GeneralName]
gn_type=None, # type: Optional[str]
gn_value=None # type: Optional[str]
):
if gn:
self.gn = gn
elif gn_type and gn_value:
self.gn = Common.GeneralName(
gn_value=gn_value,
gn_type=gn_type
)
else:
raise ValueError('either GeneralName or gn_type/gn_value required to inizialize SubjectAlternativeName')
def to_context(self):
return self.gn.to_context()
def get_value(self):
return self.gn.get_value()
class AuthorityKeyIdentifier(object):
"""
AuthorityKeyIdentifier class
Implements Authority Key Identifier extension interface
:type issuer: ``Optional[List[Common.GeneralName]]``
:param issuer: Issuer list
:type serial_number: ``Optional[str]``
:param serial_number: Serial Number
:type key_identifier: ``Optional[str]``
:param key_identifier: Key Identifier
:return: None
:rtype: ``None``
"""
def __init__(
self,
issuer=None, # type: Optional[List[Common.GeneralName]]
serial_number=None, # type: Optional[str]
key_identifier=None # type: Optional[str]
):
self.issuer = issuer
self.serial_number = serial_number
self.key_identifier = key_identifier
def to_context(self):
authority_key_identifier_context = {} # type: Dict[str, Any]
if self.issuer:
authority_key_identifier_context['Issuer'] = self.issuer,
if self.serial_number:
authority_key_identifier_context["SerialNumber"] = self.serial_number
if self.key_identifier:
authority_key_identifier_context["KeyIdentifier"] = self.key_identifier
return authority_key_identifier_context
class DistributionPoint(object):
"""
DistributionPoint class
Implements Distribution Point extension interface
:type full_name: ``Optional[List[Common.GeneralName]]``
:param full_name: Full Name list
:type relative_name: ``Optional[str]``
:param relative_name: Relative Name
:type crl_issuer: ``Optional[List[Common.GeneralName]]``
:param crl_issuer: CRL Issuer
:type reasons: ``Optional[List[str]]``
:param reasons: Reason list
:return: None
:rtype: ``None``
"""
def __init__(
self,
full_name=None, # type: Optional[List[Common.GeneralName]]
relative_name=None, # type: Optional[str]
crl_issuer=None, # type: Optional[List[Common.GeneralName]]
reasons=None # type: Optional[List[str]]
):
self.full_name = full_name
self.relative_name = relative_name
self.crl_issuer = crl_issuer
self.reasons = reasons
def to_context(self):
distribution_point_context = {} # type: Dict[str, Union[List, str]]
if self.full_name:
distribution_point_context["FullName"] = [fn.to_context() for fn in self.full_name]
if self.relative_name:
distribution_point_context["RelativeName"] = self.relative_name
if self.crl_issuer:
distribution_point_context["CRLIssuer"] = [ci.to_context() for ci in self.crl_issuer]
if self.reasons:
distribution_point_context["Reasons"] = self.reasons
return distribution_point_context
class CertificatePolicy(object):
"""
CertificatePolicy class
Implements Certificate Policy extension interface
:type policy_identifier: ``str``
:param policy_identifier: Policy Identifier
:type policy_qualifiers: ``Optional[List[str]]``
:param policy_qualifiers: Policy Qualifier list
:return: None
:rtype: ``None``
"""
def __init__(
self,
policy_identifier, # type: str
policy_qualifiers=None # type: Optional[List[str]]
):
self.policy_identifier = policy_identifier
self.policy_qualifiers = policy_qualifiers
def to_context(self):
certificate_policies_context = {
"PolicyIdentifier": self.policy_identifier
} # type: Dict[str, Union[List, str]]
if self.policy_qualifiers:
certificate_policies_context["PolicyQualifiers"] = self.policy_qualifiers
return certificate_policies_context
class AuthorityInformationAccess(object):
"""
AuthorityInformationAccess class
Implements Authority Information Access extension interface
:type access_method: ``str``
:param access_method: Access Method
:type access_location: ``Common.GeneralName``
:param access_location: Access Location
:return: None
:rtype: ``None``
"""
def __init__(
self,
access_method, # type: str
access_location # type: Common.GeneralName
):
self.access_method = access_method
self.access_location = access_location
def to_context(self):
return {
"AccessMethod": self.access_method,
"AccessLocation": self.access_location.to_context()
}
class BasicConstraints(object):
"""
BasicConstraints class
Implements Basic Constraints extension interface
:type ca: ``bool``
:param ca: Certificate Authority
:type path_length: ``int``
:param path_length: Path Length
:return: None
:rtype: ``None``
"""
def __init__(
self,
ca, # type: bool
path_length=None # type: int
):
self.ca = ca
self.path_length = path_length
def to_context(self):
basic_constraints_context = {
"CA": self.ca
} # type: Dict[str, Union[str, int]]
if self.path_length:
basic_constraints_context["PathLength"] = self.path_length
return basic_constraints_context
class SignedCertificateTimestamp(object):
"""
SignedCertificateTimestamp class
Implementsinterface for "SignedCertificateTimestamp" extensions
:type entry_type: ``str``
:param entry_type: Entry Type (from Common.CertificateExtension.SignedCertificateTimestamp.EntryType enum)
:type version: ``str``
:param version: Version
:type log_id: ``str``
:param log_id: Log ID
:type timestamp: ``str``
:param timestamp: Timestamp (ISO8601 string representation in UTC)
:return: None
:rtype: ``None``
"""
class EntryType(object):
"""
EntryType class
Enumerates Entry Types for SignedCertificateTimestamp class
:return: None
:rtype: ``None``
"""
PRECERTIFICATE = "PreCertificate"
X509CERTIFICATE = "X509Certificate"
@staticmethod
def is_valid_type(_type):
return _type in (
Common.CertificateExtension.SignedCertificateTimestamp.EntryType.PRECERTIFICATE,
Common.CertificateExtension.SignedCertificateTimestamp.EntryType.X509CERTIFICATE
)
def __init__(
self,
entry_type, # type: str
version, # type: int
log_id, # type: str
timestamp # type: str
):
if not Common.CertificateExtension.SignedCertificateTimestamp.EntryType.is_valid_type(entry_type):
raise TypeError(
'entry_type must be of type Common.CertificateExtension.SignedCertificateTimestamp.EntryType enum'
)
self.entry_type = entry_type
self.version = version
self.log_id = log_id
self.timestamp = timestamp
def to_context(self):
timestamps_context = {} # type: Dict[str, Any]
timestamps_context['Version'] = self.version
timestamps_context["LogId"] = self.log_id
timestamps_context["Timestamp"] = self.timestamp
timestamps_context["EntryType"] = self.entry_type
return timestamps_context
class ExtensionType(object):
"""
ExtensionType class
Enumerates Extension Types for Common.CertificatExtension class
:return: None
:rtype: ``None``
"""
SUBJECTALTERNATIVENAME = "SubjectAlternativeName"
AUTHORITYKEYIDENTIFIER = "AuthorityKeyIdentifier"
SUBJECTKEYIDENTIFIER = "SubjectKeyIdentifier"
KEYUSAGE = "KeyUsage"
EXTENDEDKEYUSAGE = "ExtendedKeyUsage"
CRLDISTRIBUTIONPOINTS = "CRLDistributionPoints"
CERTIFICATEPOLICIES = "CertificatePolicies"
AUTHORITYINFORMATIONACCESS = "AuthorityInformationAccess"
BASICCONSTRAINTS = "BasicConstraints"
SIGNEDCERTIFICATETIMESTAMPS = "SignedCertificateTimestamps"
PRESIGNEDCERTIFICATETIMESTAMPS = "PreCertSignedCertificateTimestamps"
OTHER = "Other"
@staticmethod
def is_valid_type(_type):
return _type in (
Common.CertificateExtension.ExtensionType.SUBJECTALTERNATIVENAME,
Common.CertificateExtension.ExtensionType.AUTHORITYKEYIDENTIFIER,
Common.CertificateExtension.ExtensionType.SUBJECTKEYIDENTIFIER,
Common.CertificateExtension.ExtensionType.KEYUSAGE,
Common.CertificateExtension.ExtensionType.EXTENDEDKEYUSAGE,
Common.CertificateExtension.ExtensionType.CRLDISTRIBUTIONPOINTS,
Common.CertificateExtension.ExtensionType.CERTIFICATEPOLICIES,
Common.CertificateExtension.ExtensionType.AUTHORITYINFORMATIONACCESS,
Common.CertificateExtension.ExtensionType.BASICCONSTRAINTS,
Common.CertificateExtension.ExtensionType.SIGNEDCERTIFICATETIMESTAMPS,
Common.CertificateExtension.ExtensionType.PRESIGNEDCERTIFICATETIMESTAMPS,
Common.CertificateExtension.ExtensionType.OTHER # for extensions that are not handled explicitly
)
def __init__(
self,
extension_type, # type: str
critical, # type: bool
oid=None, # type: Optional[str]
extension_name=None, # type: Optional[str]
subject_alternative_names=None, # type: Optional[List[Common.CertificateExtension.SubjectAlternativeName]]
authority_key_identifier=None, # type: Optional[Common.CertificateExtension.AuthorityKeyIdentifier]
digest=None, # type: str
digital_signature=None, # type: Optional[bool]
content_commitment=None, # type: Optional[bool]
key_encipherment=None, # type: Optional[bool]
data_encipherment=None, # type: Optional[bool]
key_agreement=None, # type: Optional[bool]
key_cert_sign=None, # type: Optional[bool]
crl_sign=None, # type: Optional[bool]
usages=None, # type: Optional[List[str]]
distribution_points=None, # type: Optional[List[Common.CertificateExtension.DistributionPoint]]
certificate_policies=None, # type: Optional[List[Common.CertificateExtension.CertificatePolicy]]
authority_information_access=None, # type: Optional[List[Common.CertificateExtension.AuthorityInformationAccess]]
basic_constraints=None, # type: Optional[Common.CertificateExtension.BasicConstraints]
signed_certificate_timestamps=None, # type: Optional[List[Common.CertificateExtension.SignedCertificateTimestamp]]
value=None # type: Optional[Union[str, List[Any], Dict[str, Any]]]
):
if not Common.CertificateExtension.ExtensionType.is_valid_type(extension_type):
raise TypeError('algorithm must be of type Common.CertificateExtension.ExtensionType enum')
self.extension_type = extension_type
self.critical = critical
if self.extension_type == Common.CertificateExtension.ExtensionType.SUBJECTALTERNATIVENAME:
self.subject_alternative_names = subject_alternative_names
self.oid = "2.5.29.17"
self.extension_name = "subjectAltName"
elif self.extension_type == Common.CertificateExtension.ExtensionType.SUBJECTKEYIDENTIFIER:
if not digest:
raise ValueError('digest is mandatory for SubjectKeyIdentifier extension')
self.digest = digest
self.oid = "2.5.29.14"
self.extension_name = "subjectKeyIdentifier"
elif self.extension_type == Common.CertificateExtension.ExtensionType.KEYUSAGE:
self.digital_signature = digital_signature
self.content_commitment = content_commitment
self.key_encipherment = key_encipherment
self.data_encipherment = data_encipherment
self.key_agreement = key_agreement
self.key_cert_sign = key_cert_sign
self.crl_sign = crl_sign
self.oid = "2.5.29.15"
self.extension_name = "keyUsage"
elif self.extension_type == Common.CertificateExtension.ExtensionType.EXTENDEDKEYUSAGE:
if not usages:
raise ValueError('usages is mandatory for ExtendedKeyUsage extension')
self.usages = usages
self.oid = "2.5.29.37"
self.extension_name = "extendedKeyUsage"
elif self.extension_type == Common.CertificateExtension.ExtensionType.AUTHORITYKEYIDENTIFIER:
self.authority_key_identifier = authority_key_identifier
self.oid = "2.5.29.35"
self.extension_name = "authorityKeyIdentifier"
elif self.extension_type == Common.CertificateExtension.ExtensionType.CRLDISTRIBUTIONPOINTS:
self.distribution_points = distribution_points
self.oid = "2.5.29.31"
self.extension_name = "cRLDistributionPoints"
elif self.extension_type == Common.CertificateExtension.ExtensionType.CERTIFICATEPOLICIES:
self.certificate_policies = certificate_policies
self.oid = "2.5.29.32"
self.extension_name = "certificatePolicies"
elif self.extension_type == Common.CertificateExtension.ExtensionType.AUTHORITYINFORMATIONACCESS:
self.authority_information_access = authority_information_access
self.oid = "1.3.6.1.5.5.7.1.1"
self.extension_name = "authorityInfoAccess"
elif self.extension_type == Common.CertificateExtension.ExtensionType.BASICCONSTRAINTS:
self.basic_constraints = basic_constraints
self.oid = "2.5.29.19"
self.extension_name = "basicConstraints"
elif self.extension_type == Common.CertificateExtension.ExtensionType.PRESIGNEDCERTIFICATETIMESTAMPS:
self.signed_certificate_timestamps = signed_certificate_timestamps
self.oid = "1.3.6.1.4.1.11129.2.4.2"
self.extension_name = "signedCertificateTimestampList"
elif self.extension_type == Common.CertificateExtension.ExtensionType.SIGNEDCERTIFICATETIMESTAMPS:
self.signed_certificate_timestamps = signed_certificate_timestamps
self.oid = "1.3.6.1.4.1.11129.2.4.5"
self.extension_name = "signedCertificateTimestampList"
elif self.extension_type == Common.CertificateExtension.ExtensionType.OTHER:
self.value = value
# override oid, extension_name if provided as inputs
if oid:
self.oid = oid
if extension_name:
self.extension_name = extension_name
def to_context(self):
extension_context = {
"OID": self.oid,
"Name": self.extension_name,
"Critical": self.critical
} # type: Dict[str, Any]
if (
self.extension_type == Common.CertificateExtension.ExtensionType.SUBJECTALTERNATIVENAME
and self.subject_alternative_names is not None
):
extension_context["Value"] = [san.to_context() for san in self.subject_alternative_names]
elif (
self.extension_type == Common.CertificateExtension.ExtensionType.AUTHORITYKEYIDENTIFIER
and self.authority_key_identifier is not None
):
extension_context["Value"] = self.authority_key_identifier.to_context()
elif (
self.extension_type == Common.CertificateExtension.ExtensionType.SUBJECTKEYIDENTIFIER
and self.digest is not None
):
extension_context["Value"] = {
"Digest": self.digest
}
elif self.extension_type == Common.CertificateExtension.ExtensionType.KEYUSAGE:
key_usage = {} # type: Dict[str, bool]
if self.digital_signature:
key_usage["DigitalSignature"] = self.digital_signature
if self.content_commitment:
key_usage["ContentCommitment"] = self.content_commitment
if self.key_encipherment:
key_usage["KeyEncipherment"] = self.key_encipherment
if self.data_encipherment:
key_usage["DataEncipherment"] = self.data_encipherment
if self.key_agreement:
key_usage["KeyAgreement"] = self.key_agreement
if self.key_cert_sign:
key_usage["KeyCertSign"] = self.key_cert_sign
if self.crl_sign:
key_usage["CrlSign"] = self.crl_sign
if key_usage:
extension_context["Value"] = key_usage
elif (
self.extension_type == Common.CertificateExtension.ExtensionType.EXTENDEDKEYUSAGE
and self.usages is not None
):
extension_context["Value"] = {
"Usages": [u for u in self.usages]
}
elif (
self.extension_type == Common.CertificateExtension.ExtensionType.CRLDISTRIBUTIONPOINTS
and self.distribution_points is not None
):
extension_context["Value"] = [dp.to_context() for dp in self.distribution_points]
elif (
self.extension_type == Common.CertificateExtension.ExtensionType.CERTIFICATEPOLICIES
and self.certificate_policies is not None
):
extension_context["Value"] = [cp.to_context() for cp in self.certificate_policies]
elif (
self.extension_type == Common.CertificateExtension.ExtensionType.AUTHORITYINFORMATIONACCESS
and self.authority_information_access is not None
):
extension_context["Value"] = [aia.to_context() for aia in self.authority_information_access]
elif (
self.extension_type == Common.CertificateExtension.ExtensionType.BASICCONSTRAINTS
and self.basic_constraints is not None
):
extension_context["Value"] = self.basic_constraints.to_context()
elif (
self.extension_type in [
Common.CertificateExtension.ExtensionType.SIGNEDCERTIFICATETIMESTAMPS,
Common.CertificateExtension.ExtensionType.PRESIGNEDCERTIFICATETIMESTAMPS
]
and self.signed_certificate_timestamps is not None
):
extension_context["Value"] = [sct.to_context() for sct in self.signed_certificate_timestamps]
elif (
self.extension_type == Common.CertificateExtension.ExtensionType.OTHER
and self.value is not None
):
extension_context["Value"] = self.value
return extension_context
class Certificate(Indicator):
"""
Implements the X509 Certificate interface
Certificate indicator - https://xsoar.pan.dev/docs/integrations/context-standards-mandatory#certificate
:type subject_dn: ``str``
:param subject_dn: Subject Distinguished Name
:type dbot_score: ``DBotScore``
:param dbot_score: If Certificate has a score then create and set a DBotScore object.
:type name: ``Optional[Union[str, List[str]]]``
:param name: Name (if not provided output is calculated from SubjectDN and SAN)
:type issuer_dn: ``Optional[str]``
:param issuer_dn: Issuer Distinguished Name
:type serial_number: ``Optional[str]``
:param serial_number: Serial Number
:type validity_not_after: ``Optional[str]``
:param validity_not_after: Certificate Expiration Timestamp (ISO8601 string representation)
:type validity_not_before: ``Optional[str]``
:param validity_not_before: Initial Certificate Validity Timestamp (ISO8601 string representation)
:type sha512: ``Optional[str]``
:param sha512: The SHA-512 hash of the certificate in binary encoded format (DER)
:type sha256: ``Optional[str]``
:param sha256: The SHA-256 hash of the certificate in binary encoded format (DER)
:type sha1: ``Optional[str]``
:param sha1: The SHA-1 hash of the certificate in binary encoded format (DER)
:type md5: ``Optional[str]``
:param md5: The MD5 hash of the certificate in binary encoded format (DER)
:type publickey: ``Optional[Common.CertificatePublicKey]``
:param publickey: Certificate Public Key
:type spki_sha256: ``Optional[str]``
:param sha1: The SHA-256 hash of the SPKI
:type signature_algorithm: ``Optional[str]``
:param signature_algorithm: Signature Algorithm
:type signature: ``Optional[str]``
:param signature: Certificate Signature
:type subject_alternative_name: \
``Optional[List[Union[str,Dict[str, str],Common.CertificateExtension.SubjectAlternativeName]]]``
:param subject_alternative_name: Subject Alternative Name list
:type extensions: ``Optional[List[Common.CertificateExtension]]`
:param extensions: Certificate Extension List
:type pem: ``Optional[str]``
:param pem: PEM encoded certificate
:return: None
:rtype: ``None``
"""
CONTEXT_PATH = 'Certificate(val.MD5 && val.MD5 == obj.MD5 || val.SHA1 && val.SHA1 == obj.SHA1 || ' \
'val.SHA256 && val.SHA256 == obj.SHA256 || val.SHA512 && val.SHA512 == obj.SHA512)'
def __init__(
self,
subject_dn, # type: str
dbot_score=None, # type: Optional[Common.DBotScore]
name=None, # type: Optional[Union[str, List[str]]]
issuer_dn=None, # type: Optional[str]
serial_number=None, # type: Optional[str]
validity_not_after=None, # type: Optional[str]
validity_not_before=None, # type: Optional[str]
sha512=None, # type: Optional[str]
sha256=None, # type: Optional[str]
sha1=None, # type: Optional[str]
md5=None, # type: Optional[str]
publickey=None, # type: Optional[Common.CertificatePublicKey]
spki_sha256=None, # type: Optional[str]
signature_algorithm=None, # type: Optional[str]
signature=None, # type: Optional[str]
subject_alternative_name=None, \
# type: Optional[List[Union[str,Dict[str, str],Common.CertificateExtension.SubjectAlternativeName]]]
extensions=None, # type: Optional[List[Common.CertificateExtension]]
pem=None # type: Optional[str]
):
self.subject_dn = subject_dn
self.dbot_score = dbot_score
self.name = None
if name:
if isinstance(name, str):
self.name = [name]
elif isinstance(name, list):
self.name = name
else:
raise TypeError('certificate name must be of type str or List[str]')
self.issuer_dn = issuer_dn
self.serial_number = serial_number
self.validity_not_after = validity_not_after
self.validity_not_before = validity_not_before
self.sha512 = sha512
self.sha256 = sha256
self.sha1 = sha1
self.md5 = md5
if publickey and not isinstance(publickey, Common.CertificatePublicKey):
raise TypeError('publickey must be of type Common.CertificatePublicKey')
self.publickey = publickey
self.spki_sha256 = spki_sha256
self.signature_algorithm = signature_algorithm
self.signature = signature
# if subject_alternative_name is set and is a list
# make sure it is a list of strings, dicts of strings or SAN Extensions
if (
subject_alternative_name
and isinstance(subject_alternative_name, list)
and not all(
isinstance(san, str)
or isinstance(san, dict)
or isinstance(san, Common.CertificateExtension.SubjectAlternativeName)
for san in subject_alternative_name)
):
raise TypeError(
'subject_alternative_name must be list of str or Common.CertificateExtension.SubjectAlternativeName'
)
self.subject_alternative_name = subject_alternative_name
if (
extensions
and not isinstance(extensions, list)
and any(isinstance(e, Common.CertificateExtension) for e in extensions)
):
raise TypeError('extensions must be of type List[Common.CertificateExtension]')
self.extensions = extensions
self.pem = pem
if not isinstance(dbot_score, Common.DBotScore):
raise ValueError('dbot_score must be of type DBotScore')
def to_context(self):
certificate_context = {
"SubjectDN": self.subject_dn
} # type: Dict[str, Any]
san_list = [] # type: List[Dict[str, str]]
if self.subject_alternative_name:
for san in self.subject_alternative_name:
if isinstance(san, str):
san_list.append({
'Value': san
})
elif isinstance(san, dict):
san_list.append(san)
elif(isinstance(san, Common.CertificateExtension.SubjectAlternativeName)):
san_list.append(san.to_context())
elif self.extensions: # autogenerate it from extensions
for ext in self.extensions:
if (
ext.extension_type == Common.CertificateExtension.ExtensionType.SUBJECTALTERNATIVENAME
and ext.subject_alternative_names is not None
):
for san in ext.subject_alternative_names:
san_list.append(san.to_context())
if san_list:
certificate_context['SubjectAlternativeName'] = san_list
if self.name:
certificate_context["Name"] = self.name
else: # autogenerate it
name = set() # type: Set[str]
# add subject alternative names
if san_list:
name = set([
sn['Value'] for sn in san_list
if (
'Value' in sn
and (
'Type' not in sn
or sn['Type'] in (Common.GeneralName.DNSNAME, Common.GeneralName.IPADDRESS)
)
)
])
# subject_dn is RFC4515 escaped
# replace \, and \+ with the long escaping \2c and \2b
long_escaped_subject_dn = self.subject_dn.replace("\\,", "\\2c")
long_escaped_subject_dn = long_escaped_subject_dn.replace("\\+", "\\2b")
# we then split RDN (separated by ,) and multi-valued RDN (sep by +)
rdns = long_escaped_subject_dn.replace('+', ',').split(',')
cn = next((rdn for rdn in rdns if rdn.startswith('CN=')), None)
if cn:
name.add(cn.split('=', 1)[-1])
if name:
certificate_context["Name"] = sorted(list(name))
if self.issuer_dn:
certificate_context["IssuerDN"] = self.issuer_dn
if self.serial_number:
certificate_context["SerialNumber"] = self.serial_number
if self.validity_not_before:
certificate_context["ValidityNotBefore"] = self.validity_not_before
if self.validity_not_after:
certificate_context["ValidityNotAfter"] = self.validity_not_after
if self.sha512:
certificate_context["SHA512"] = self.sha512
if self.sha256:
certificate_context["SHA256"] = self.sha256
if self.sha1:
certificate_context["SHA1"] = self.sha1
if self.md5:
certificate_context["MD5"] = self.md5
if self.publickey and isinstance(self.publickey, Common.CertificatePublicKey):
certificate_context["PublicKey"] = self.publickey.to_context()
if self.spki_sha256:
certificate_context["SPKISHA256"] = self.spki_sha256
sig = {} # type: Dict[str, str]
if self.signature_algorithm:
sig["Algorithm"] = self.signature_algorithm
if self.signature:
sig["Signature"] = self.signature
if sig:
certificate_context["Signature"] = sig
if self.extensions:
certificate_context["Extension"] = [e.to_context() for e in self.extensions]
if self.pem:
certificate_context["PEM"] = self.pem
if self.dbot_score and self.dbot_score.score == Common.DBotScore.BAD:
certificate_context['Malicious'] = {
'Vendor': self.dbot_score.integration_name,
'Description': self.dbot_score.malicious_description
}
ret_value = {
Common.Certificate.CONTEXT_PATH: certificate_context
}
if self.dbot_score:
ret_value.update(self.dbot_score.to_context())
return ret_value
class ScheduledCommand:
"""
ScheduledCommand configuration class
Holds the scheduled command configuration for the command result - managing the way the command should be polled.
:type command: ``str``
:param command: The command that'll run after next_run_in_seconds has passed.
:type next_run_in_seconds: ``int``
:param next_run_in_seconds: How long to wait before executing the command.
:type args: ``Optional[Dict[str, Any]]``
:param args: Arguments to use when executing the command.
:type timeout_in_seconds: ``Optional[int]``
:param timeout_in_seconds: Number of seconds until the polling sequence will timeout.
:return: None
:rtype: ``None``
"""
VERSION_MISMATCH_ERROR = 'This command is not supported by this XSOAR server version. Please update your server ' \
'version to 6.2.0 or later.'
def __init__(
self,
command, # type: str
next_run_in_seconds, # type: int
args=None, # type: Optional[Dict[str, Any]]
timeout_in_seconds=None, # type: Optional[int]
):
self.raise_error_if_not_supported()
self._command = command
if next_run_in_seconds < 10:
demisto.info('ScheduledCommandConfiguration provided value for next_run_in_seconds: '
'{} is '.format(next_run_in_seconds) + 'too low - minimum interval is 10 seconds. '
'next_run_in_seconds was set to 10 seconds.')
next_run_in_seconds = 10
self._next_run = str(next_run_in_seconds)
self._args = args
self._timeout = str(timeout_in_seconds) if timeout_in_seconds else None
@staticmethod
def raise_error_if_not_supported():
if not is_demisto_version_ge('6.2.0'):
raise DemistoException(ScheduledCommand.VERSION_MISMATCH_ERROR)
def to_results(self):
"""
Returns the result dictionary of the polling command
"""
return assign_params(
PollingCommand=self._command,
NextRun=self._next_run,
PollingArgs=self._args,
Timeout=self._timeout
)
def camelize_string(src_str, delim='_', upper_camel=True):
"""
Transform snake_case to CamelCase
:type src_str: ``str``
:param src_str: snake_case string to convert.
:type delim: ``str``
:param delim: indicator category.
:type upper_camel: ``bool``
:param upper_camel: When True then transforms string to camel case with the first letter capitalised
(for example: demisto_content to DemistoContent), otherwise the first letter will not be capitalised
(for example: demisto_content to demistoContent).
:return: A CammelCase string.
:rtype: ``str``
"""
if not src_str: # empty string
return ""
components = src_str.split(delim)
camelize_without_first_char = ''.join(map(lambda x: x.title(), components[1:]))
if upper_camel:
return components[0].title() + camelize_without_first_char
else:
return components[0].lower() + camelize_without_first_char
class IndicatorsTimeline:
"""
IndicatorsTimeline class - use to return Indicator Timeline object to be used in CommandResults
:type indicators: ``list``
:param indicators: expects a list of indicators.
:type category: ``str``
:param category: indicator category.
:type message: ``str``
:param message: indicator message.
:return: None
:rtype: ``None``
"""
def __init__(self, indicators=None, category=None, message=None):
# type: (list, str, str) -> None
if indicators is None:
indicators = []
# check if we are running from an integration or automation
try:
_ = demisto.params()
default_category = 'Integration Update'
except AttributeError:
default_category = 'Automation Update'
timelines = []
timeline = {}
for indicator in indicators:
timeline['Value'] = indicator
if category:
timeline['Category'] = category
else:
timeline['Category'] = default_category
if message:
timeline['Message'] = message
timelines.append(timeline)
self.indicators_timeline = timelines
def arg_to_number(arg, arg_name=None, required=False):
# type: (Any, Optional[str], bool) -> Optional[int]
"""Converts an XSOAR argument to a Python int
This function is used to quickly validate an argument provided to XSOAR
via ``demisto.args()`` into an ``int`` type. It will throw a ValueError
if the input is invalid. If the input is None, it will throw a ValueError
if required is ``True``, or ``None`` if required is ``False.
:type arg: ``Any``
:param arg: argument to convert
:type arg_name: ``str``
:param arg_name: argument name
:type required: ``bool``
:param required:
throws exception if ``True`` and argument provided is None
:return:
returns an ``int`` if arg can be converted
returns ``None`` if arg is ``None`` and required is set to ``False``
otherwise throws an Exception
:rtype: ``Optional[int]``
"""
if arg is None or arg == '':
if required is True:
if arg_name:
raise ValueError('Missing "{}"'.format(arg_name))
else:
raise ValueError('Missing required argument')
return None
if isinstance(arg, str):
if arg.isdigit():
return int(arg)
try:
return int(float(arg))
except Exception:
if arg_name:
raise ValueError('Invalid number: "{}"="{}"'.format(arg_name, arg))
else:
raise ValueError('"{}" is not a valid number'.format(arg))
if isinstance(arg, int):
return arg
if arg_name:
raise ValueError('Invalid number: "{}"="{}"'.format(arg_name, arg))
else:
raise ValueError('"{}" is not a valid number'.format(arg))
def arg_to_datetime(arg, arg_name=None, is_utc=True, required=False, settings=None):
# type: (Any, Optional[str], bool, bool, dict) -> Optional[datetime]
"""Converts an XSOAR argument to a datetime
This function is used to quickly validate an argument provided to XSOAR
via ``demisto.args()`` into an ``datetime``. It will throw a ValueError if the input is invalid.
If the input is None, it will throw a ValueError if required is ``True``,
or ``None`` if required is ``False.
:type arg: ``Any``
:param arg: argument to convert
:type arg_name: ``str``
:param arg_name: argument name
:type is_utc: ``bool``
:param is_utc: if True then date converted as utc timezone, otherwise will convert with local timezone.
:type required: ``bool``
:param required:
throws exception if ``True`` and argument provided is None
:type settings: ``dict``
:param settings: If provided, passed to dateparser.parse function.
:return:
returns an ``datetime`` if conversion works
returns ``None`` if arg is ``None`` and required is set to ``False``
otherwise throws an Exception
:rtype: ``Optional[datetime]``
"""
if arg is None:
if required is True:
if arg_name:
raise ValueError('Missing "{}"'.format(arg_name))
else:
raise ValueError('Missing required argument')
return None
if isinstance(arg, str) and arg.isdigit() or isinstance(arg, (int, float)):
# timestamp is a str containing digits - we just convert it to int
ms = float(arg)
if ms > 2000000000.0:
# in case timestamp was provided as unix time (in milliseconds)
ms = ms / 1000.0
if is_utc:
return datetime.utcfromtimestamp(ms).replace(tzinfo=timezone.utc)
else:
return datetime.fromtimestamp(ms)
if isinstance(arg, str):
# we use dateparser to handle strings either in ISO8601 format, or
# relative time stamps.
# For example: format 2019-10-23T00:00:00 or "3 days", etc
if settings:
date = dateparser.parse(arg, settings=settings)
else:
date = dateparser.parse(arg, settings={'TIMEZONE': 'UTC'})
if date is None:
# if d is None it means dateparser failed to parse it
if arg_name:
raise ValueError('Invalid date: "{}"="{}"'.format(arg_name, arg))
else:
raise ValueError('"{}" is not a valid date'.format(arg))
return date
if arg_name:
raise ValueError('Invalid date: "{}"="{}"'.format(arg_name, arg))
else:
raise ValueError('"{}" is not a valid date'.format(arg))
# -------------------------------- Relationships----------------------------------- #
class EntityRelationship:
"""
XSOAR entity relationship.
:type name: ``str``
:param name: Relationship name.
:type relationship_type: ``str``
:param relationship_type: Relationship type. (e.g. IndicatorToIndicator...).
:type entity_a: ``str``
:param entity_a: A value, A aka source of the relationship.
:type entity_a_family: ``str``
:param entity_a_family: Entity family of A, A aka source of the relationship. (e.g. Indicator...)
:type entity_a_type: ``str``
:param entity_a_type: Entity A type, A aka source of the relationship. (e.g. IP/URL/...).
:type entity_b: ``str``
:param entity_b: B value, B aka destination of the relationship.
:type entity_b_family: ``str``
:param entity_b_family: Entity family of B, B aka destination of the relationship. (e.g. Indicator...)
:type entity_b_type: ``str``
:param entity_b_type: Entity B type, B aka destination of the relationship. (e.g. IP/URL/...).
:type source_reliability: ``str``
:param source_reliability: Source reliability.
:type fields: ``dict``
:param fields: Custom fields. (Optional)
:type brand: ``str``
:param brand: Source brand name. (Optional)
:return: None
:rtype: ``None``
"""
class RelationshipsTypes(object):
"""
Relationships Types objects.
:return: None
:rtype: ``None``
"""
# dict which keys is a relationship type and the value is the reverse type.
RELATIONSHIP_TYPES = ['IndicatorToIndicator']
@staticmethod
def is_valid_type(_type):
# type: (str) -> bool
return _type in EntityRelationship.RelationshipsTypes.RELATIONSHIP_TYPES
class RelationshipsFamily(object):
"""
Relationships Family object list.
:return: None
:rtype: ``None``
"""
INDICATOR = ["Indicator"]
@staticmethod
def is_valid_type(_type):
# type: (str) -> bool
return _type in EntityRelationship.RelationshipsFamily.INDICATOR
class Relationships(object):
"""
Enum: Relations names and their reverse
:return: None
:rtype: ``None``
"""
APPLIED = 'applied'
ATTACHMENT_OF = 'attachment-of'
ATTACHES = 'attaches'
ATTRIBUTE_OF = 'attribute-of'
ATTRIBUTED_BY = 'attributed-by'
ATTRIBUTED_TO = 'attributed-to'
AUTHORED_BY = 'authored-by'
BEACONS_TO = 'beacons-to'
BUNDLED_IN = 'bundled-in'
BUNDLES = 'bundles'
COMMUNICATED_WITH = 'communicated-with'
COMMUNICATED_BY = 'communicated-by'
COMMUNICATES_WITH = 'communicates-with'
COMPROMISES = 'compromises'
CONTAINS = 'contains'
CONTROLS = 'controls'
CREATED_BY = 'created-by'
CREATES = 'creates'
DELIVERED_BY = 'delivered-by'
DELIVERS = 'delivers'
DOWNLOADS = 'downloads'
DOWNLOADS_FROM = 'downloads-from'
DROPPED_BY = 'dropped-by'
DROPS = 'drops'
DUPLICATE_OF = 'duplicate-of'
EMBEDDED_IN = 'embedded-in'
EMBEDS = 'embeds'
EXECUTED = 'executed'
EXECUTED_BY = 'executed-by'
EXFILTRATES_TO = 'exfiltrates-to'
EXPLOITS = 'exploits'
HAS = 'has'
HOSTED_ON = 'hosted-on'
HOSTS = 'hosts'
IMPERSONATES = 'impersonates'
INDICATED_BY = 'indicated-by'
INDICATOR_OF = 'indicator-of'
INJECTED_FROM = 'injected-from'
INJECTS_INTO = 'injects-into'
INVESTIGATES = 'investigates'
IS_ALSO = 'is-also'
MITIGATED_BY = 'mitigated-by'
MITIGATES = 'mitigates'
ORIGINATED_FROM = 'originated-from'
OWNED_BY = 'owned-by'
OWNS = 'owns'
PART_OF = 'part-of'
RELATED_TO = 'related-to'
REMEDIATES = 'remediates'
RESOLVED_BY = 'resolved-by'
RESOLVED_FROM = 'resolved-from'
RESOLVES_TO = 'resolves-to'
SEEN_ON = 'seen-on'
SENT = 'sent'
SENT_BY = 'sent-by'
SENT_FROM = 'sent-from'
SENT_TO = 'sent-to'
SIMILAR_TO = 'similar-to'
SUB_DOMAIN_OF = 'sub-domain-of'
SUB_TECHNIQUE_OF = 'subtechnique-of'
PARENT_TECHNIQUE_OF = 'parent-technique-of'
SUPRA_DOMAIN_OF = 'supra-domain-of'
TARGETED_BY = 'targeted-by'
TARGETS = 'targets'
TYPES = 'Types'
UPLOADED_TO = 'uploaded-to'
USED_BY = 'used-by'
USED_ON = 'used-on'
USES = 'uses'
VARIANT_OF = 'variant-of'
RELATIONSHIPS_NAMES = {'applied': 'applied-on',
'attachment-of': 'attaches',
'attaches': 'attachment-of',
'attribute-of': 'owns',
'attributed-by': 'attributed-to',
'attributed-to': 'attributed-by',
'authored-by': 'author-of',
'beacons-to': 'communicated-by',
'bundled-in': 'bundles',
'bundles': 'bundled-in',
'communicated-with': 'communicated-by',
'communicated-by': 'communicates-with',
'communicates-with': 'communicated-by',
'compromises': 'compromised-by',
'contains': 'part-of',
'controls': 'controlled-by',
'created-by': 'creates',
'creates': 'created-by',
'delivered-by': 'delivers',
'delivers': 'delivered-by',
'downloads': 'downloaded-by',
'downloads-from': 'hosts',
'dropped-by': 'drops',
'drops': 'dropped-by',
'duplicate-of': 'duplicate-of',
'embedded-in': 'embeds',
'embeds': 'embedded-on',
'executed': 'executed-by',
'executed-by': 'executes',
'exfiltrates-to': 'exfiltrated-from',
'exploits': 'exploited-by',
'has': 'seen-on',
'hosted-on': 'hosts',
'hosts': 'hosted-on',
'impersonates': 'impersonated-by',
'indicated-by': 'indicator-of',
'indicator-of': 'indicated-by',
'injected-from': 'injects-into',
'injects-into': 'injected-from',
'investigates': 'investigated-by',
'is-also': 'is-also',
'mitigated-by': 'mitigates',
'mitigates': 'mitigated-by',
'originated-from': 'source-of',
'owned-by': 'owns',
'owns': 'owned-by',
'part-of': 'contains',
'related-to': 'related-to',
'remediates': 'remediated-by',
'resolved-by': 'resolves-to',
'resolved-from': 'resolves-to',
'resolves-to': 'resolved-from',
'seen-on': 'has',
'sent': 'attached-to',
'sent-by': 'sent',
'sent-from': 'received-by',
'sent-to': 'received-by',
'similar-to': 'similar-to',
'sub-domain-of': 'supra-domain-of',
'supra-domain-of': 'sub-domain-of',
'subtechnique-of': 'parent-technique-of',
'parent-technique-of': 'subtechnique-of',
'targeted-by': 'targets',
'targets': 'targeted-by',
'Types': 'Reverse',
'uploaded-to': 'hosts',
'used-by': 'uses',
'used-on': 'targeted-by',
'uses': 'used-by',
'variant-of': 'variant-of'}
@staticmethod
def is_valid(_type):
"""
:type _type: ``str``
:param _type: the data to be returned and will be set to context
:return: Is the given type supported
:rtype: ``bool``
"""
return _type in EntityRelationship.Relationships.RELATIONSHIPS_NAMES.keys()
@staticmethod
def get_reverse(name):
"""
:type name: ``str``
:param name: Relationship name
:return: Returns the reversed relationship name
:rtype: ``str``
"""
return EntityRelationship.Relationships.RELATIONSHIPS_NAMES[name]
def __init__(self, name, entity_a, entity_a_type, entity_b, entity_b_type,
reverse_name='', relationship_type='IndicatorToIndicator', entity_a_family='Indicator',
entity_b_family='Indicator', source_reliability="", fields=None, brand=""):
# Relationship
if not EntityRelationship.Relationships.is_valid(name):
raise ValueError("Invalid relationship: " + name)
self._name = name
if reverse_name:
if not EntityRelationship.Relationships.is_valid(reverse_name):
raise ValueError("Invalid reverse relationship: " + reverse_name)
self._reverse_name = reverse_name
else:
self._reverse_name = EntityRelationship.Relationships.get_reverse(name)
if not EntityRelationship.RelationshipsTypes.is_valid_type(relationship_type):
raise ValueError("Invalid relationship type: " + relationship_type)
self._relationship_type = relationship_type
# Entity A - Source
self._entity_a = entity_a
self._entity_a_type = entity_a_type
if not EntityRelationship.RelationshipsFamily.is_valid_type(entity_a_family):
raise ValueError("Invalid entity A Family type: " + entity_a_family)
self._entity_a_family = entity_a_family
# Entity B - Destination
if not entity_b:
demisto.info(
"WARNING: Invalid entity B - Relationships will not be created to entity A {} with relationship name {}".format(
str(entity_a), str(name)))
self._entity_b = entity_b
self._entity_b_type = entity_b_type
if not EntityRelationship.RelationshipsFamily.is_valid_type(entity_b_family):
raise ValueError("Invalid entity B Family type: " + entity_b_family)
self._entity_b_family = entity_b_family
# Custom fields
if fields:
self._fields = fields
else:
self._fields = {}
# Source
if brand:
self._brand = brand
else:
self._brand = ''
if source_reliability:
if not DBotScoreReliability.is_valid_type(source_reliability):
raise ValueError("Invalid source reliability value", source_reliability)
self._source_reliability = source_reliability
else:
self._source_reliability = ''
def to_entry(self):
""" Convert object to XSOAR entry
:return: XSOAR entry representation.
:rtype: ``dict``
"""
entry = {}
if self._entity_b:
entry = {
"name": self._name,
"reverseName": self._reverse_name,
"type": self._relationship_type,
"entityA": self._entity_a,
"entityAFamily": self._entity_a_family,
"entityAType": self._entity_a_type,
"entityB": self._entity_b,
"entityBFamily": self._entity_b_family,
"entityBType": self._entity_b_type,
"fields": self._fields,
}
if self._source_reliability:
entry["reliability"] = self._source_reliability
if self._brand:
entry["brand"] = self._brand
return entry
def to_indicator(self):
""" Convert object to XSOAR entry
:return: XSOAR entry representation.
:rtype: ``dict``
"""
indicator_relationship = {}
if self._entity_b:
indicator_relationship = {
"name": self._name,
"reverseName": self._reverse_name,
"type": self._relationship_type,
"entityA": self._entity_a,
"entityAFamily": self._entity_a_family,
"entityAType": self._entity_a_type,
"entityB": self._entity_b,
"entityBFamily": self._entity_b_family,
"entityBType": self._entity_b_type,
"fields": self._fields,
}
return indicator_relationship
def to_context(self):
""" Convert object to XSOAR context
:return: XSOAR context representation.
:rtype: ``dict``
"""
indicator_relationship_context = {}
if self._entity_b:
indicator_relationship_context = {
"Relationship": self._name,
"EntityA": self._entity_a,
"EntityAType": self._entity_a_type,
"EntityB": self._entity_b,
"EntityBType": self._entity_b_type,
}
return indicator_relationship_context
class CommandResults:
"""
CommandResults class - use to return results to warroom
:type outputs_prefix: ``str``
:param outputs_prefix: should be identical to the prefix in the yml contextPath in yml file. for example:
CortexXDR.Incident
:type outputs_key_field: ``str`` or ``list[str]``
:param outputs_key_field: primary key field in the main object. If the command returns Incidents, and of the
properties of Incident is incident_id, then outputs_key_field='incident_id'. If object has multiple
unique keys, then list of strings is supported outputs_key_field=['id1', 'id2']
:type outputs: ``list`` or ``dict``
:param outputs: the data to be returned and will be set to context
:type indicators: ``list``
:param indicators: DEPRECATED: use 'indicator' instead.
:type indicator: ``Common.Indicator``
:param indicator: single indicator like Common.IP, Common.URL, Common.File, etc.
:type readable_output: ``str``
:param readable_output: (Optional) markdown string that will be presented in the warroom, should be human readable -
(HumanReadable) - if not set, readable output will be generated
:type raw_response: ``dict`` | ``list``
:param raw_response: must be dictionary, if not provided then will be equal to outputs. usually must be the original
raw response from the 3rd party service (originally Contents)
:type indicators_timeline: ``IndicatorsTimeline``
:param indicators_timeline: must be an IndicatorsTimeline. used by the server to populate an indicator's timeline.
:type ignore_auto_extract: ``bool``
:param ignore_auto_extract: must be a boolean, default value is False. Used to prevent AutoExtract on output.
:type relationships: ``list of EntityRelationship``
:param relationships: List of relationships of the indicator.
:type mark_as_note: ``bool``
:param mark_as_note: must be a boolean, default value is False. Used to mark entry as note.
:type entry_type: ``int`` code of EntryType
:param entry_type: type of return value, see EntryType
:type scheduled_command: ``ScheduledCommand``
:param scheduled_command: manages the way the command should be polled.
:return: None
:rtype: ``None``
"""
def __init__(self, outputs_prefix=None, outputs_key_field=None, outputs=None, indicators=None, readable_output=None,
raw_response=None, indicators_timeline=None, indicator=None, ignore_auto_extract=False,
mark_as_note=False, scheduled_command=None, relationships=None, entry_type=None):
# type: (str, object, object, list, str, object, IndicatorsTimeline, Common.Indicator, bool, bool, ScheduledCommand, list, int) -> None # noqa: E501
if raw_response is None:
raw_response = outputs
if outputs is not None and not isinstance(outputs, dict) and not outputs_prefix:
raise ValueError('outputs_prefix is missing')
if indicators and indicator:
raise ValueError('indicators is DEPRECATED, use only indicator')
if entry_type is None:
entry_type = EntryType.NOTE
self.indicators = indicators # type: Optional[List[Common.Indicator]]
self.indicator = indicator # type: Optional[Common.Indicator]
self.entry_type = entry_type # type: int
self.outputs_prefix = outputs_prefix
# this is public field, it is used by a lot of unit tests, so I don't change it
self.outputs_key_field = outputs_key_field
self._outputs_key_field = None # type: Optional[List[str]]
if not outputs_key_field:
self._outputs_key_field = None
elif isinstance(outputs_key_field, STRING_TYPES):
self._outputs_key_field = [outputs_key_field]
elif isinstance(outputs_key_field, list):
self._outputs_key_field = outputs_key_field
else:
raise TypeError('outputs_key_field must be of type str or list')
self.outputs = outputs
self.raw_response = raw_response
self.readable_output = readable_output
self.indicators_timeline = indicators_timeline
self.ignore_auto_extract = ignore_auto_extract
self.mark_as_note = mark_as_note
self.scheduled_command = scheduled_command
self.relationships = relationships
def to_context(self):
outputs = {} # type: dict
relationships = [] # type: list
if self.readable_output:
human_readable = self.readable_output
else:
human_readable = None # type: ignore[assignment]
raw_response = None # type: ignore[assignment]
indicators_timeline = [] # type: ignore[assignment]
ignore_auto_extract = False # type: bool
mark_as_note = False # type: bool
indicators = [self.indicator] if self.indicator else self.indicators
if indicators:
for indicator in indicators:
context_outputs = indicator.to_context()
for key, value in context_outputs.items():
if key not in outputs:
outputs[key] = []
outputs[key].append(value)
if self.raw_response:
raw_response = self.raw_response
if self.ignore_auto_extract:
ignore_auto_extract = True
if self.mark_as_note:
mark_as_note = True
if self.indicators_timeline:
indicators_timeline = self.indicators_timeline.indicators_timeline
if self.outputs is not None and self.outputs != []:
if not self.readable_output:
# if markdown is not provided then create table by default
human_readable = tableToMarkdown('Results', self.outputs)
if self.outputs_prefix and self._outputs_key_field:
# if both prefix and key field provided then create DT key
formatted_outputs_key = ' && '.join(['val.{0} && val.{0} == obj.{0}'.format(key_field)
for key_field in self._outputs_key_field])
outputs_key = '{0}({1})'.format(self.outputs_prefix, formatted_outputs_key)
outputs[outputs_key] = self.outputs
elif self.outputs_prefix:
outputs_key = '{}'.format(self.outputs_prefix)
outputs[outputs_key] = self.outputs
else:
outputs.update(self.outputs) # type: ignore[call-overload]
if self.relationships:
relationships = [relationship.to_entry() for relationship in self.relationships if relationship.to_entry()]
content_format = EntryFormat.JSON
if isinstance(raw_response, STRING_TYPES) or isinstance(raw_response, int):
content_format = EntryFormat.TEXT
return_entry = {
'Type': self.entry_type,
'ContentsFormat': content_format,
'Contents': raw_response,
'HumanReadable': human_readable,
'EntryContext': outputs,
'IndicatorTimeline': indicators_timeline,
'IgnoreAutoExtract': True if ignore_auto_extract else False,
'Note': mark_as_note,
'Relationships': relationships,
}
if self.scheduled_command:
return_entry.update(self.scheduled_command.to_results())
return return_entry
def return_results(results):
"""
This function wraps the demisto.results(), supports.
:type results: ``CommandResults`` or ``str`` or ``dict`` or ``BaseWidget`` or ``list``
:param results: A result object to return as a War-Room entry.
:return: None
:rtype: ``None``
"""
if results is None:
# backward compatibility reasons
demisto.results(None)
return
elif results and isinstance(results, list):
result_list = []
for result in results:
if isinstance(result, (dict, str)):
# Results of type dict or str are of the old results format and work with demisto.results()
result_list.append(result)
else:
# The rest are of the new format and have a corresponding function (to_context, to_display, etc...)
return_results(result)
if result_list:
demisto.results(result_list)
elif isinstance(results, CommandResults):
demisto.results(results.to_context())
elif isinstance(results, BaseWidget):
demisto.results(results.to_display())
elif isinstance(results, GetMappingFieldsResponse):
demisto.results(results.extract_mapping())
elif isinstance(results, GetRemoteDataResponse):
demisto.results(results.extract_for_local())
elif isinstance(results, GetModifiedRemoteDataResponse):
demisto.results(results.to_entry())
elif hasattr(results, 'to_entry'):
demisto.results(results.to_entry())
else:
demisto.results(results)
# deprecated
def return_outputs(readable_output, outputs=None, raw_response=None, timeline=None, ignore_auto_extract=False):
"""
DEPRECATED: use return_results() instead
This function wraps the demisto.results(), makes the usage of returning results to the user more intuitively.
:type readable_output: ``str`` | ``int``
:param readable_output: markdown string that will be presented in the warroom, should be human readable -
(HumanReadable)
:type outputs: ``dict``
:param outputs: the outputs that will be returned to playbook/investigation context (originally EntryContext)
:type raw_response: ``dict`` | ``list`` | ``str``
:param raw_response: must be dictionary, if not provided then will be equal to outputs. usually must be the original
raw response from the 3rd party service (originally Contents)
:type timeline: ``dict`` | ``list``
:param timeline: expects a list, if a dict is passed it will be put into a list. used by server to populate an
indicator's timeline. if the 'Category' field is not present in the timeline dict(s), it will automatically
be be added to the dict(s) with its value set to 'Integration Update'.
:type ignore_auto_extract: ``bool``
:param ignore_auto_extract: expects a bool value. if true then the warroom entry readable_output will not be auto enriched.
:return: None
:rtype: ``None``
"""
timeline_list = [timeline] if isinstance(timeline, dict) else timeline
if timeline_list:
for tl_obj in timeline_list:
if 'Category' not in tl_obj.keys():
tl_obj['Category'] = 'Integration Update'
return_entry = {
"Type": entryTypes["note"],
"HumanReadable": readable_output,
"ContentsFormat": formats["text"] if isinstance(raw_response, STRING_TYPES) else formats['json'],
"Contents": raw_response,
"EntryContext": outputs,
'IgnoreAutoExtract': ignore_auto_extract,
"IndicatorTimeline": timeline_list
}
# Return 'readable_output' only if needed
if readable_output and not outputs and not raw_response:
return_entry["Contents"] = readable_output
return_entry["ContentsFormat"] = formats["text"]
elif outputs and raw_response is None:
# if raw_response was not provided but outputs were provided then set Contents as outputs
return_entry["Contents"] = outputs
demisto.results(return_entry)
def return_error(message, error='', outputs=None):
"""
Returns error entry with given message and exits the script
:type message: ``str``
:param message: The message to return in the entry (required)
:type error: ``str`` or Exception
:param error: The raw error message to log (optional)
:type outputs: ``dict or None``
:param outputs: the outputs that will be returned to playbook/investigation context (optional)
:return: Error entry object
:rtype: ``dict``
"""
is_command = hasattr(demisto, 'command')
is_server_handled = is_command and demisto.command() in ('fetch-incidents',
'fetch-credentials',
'long-running-execution',
'fetch-indicators')
if is_debug_mode() and not is_server_handled and any(sys.exc_info()): # Checking that an exception occurred
message = "{}\n\n{}".format(message, traceback.format_exc())
message = LOG(message)
if error:
LOG(str(error))
LOG.print_log()
if not isinstance(message, str):
message = message.encode('utf8') if hasattr(message, 'encode') else str(message)
if is_command and demisto.command() == 'get-modified-remote-data':
if (error and not isinstance(error, NotImplementedError)) or sys.exc_info()[0] != NotImplementedError:
message = 'skip update. error: ' + message
if is_server_handled:
raise Exception(message)
else:
demisto.results({
'Type': entryTypes['error'],
'ContentsFormat': formats['text'],
'Contents': message,
'EntryContext': outputs
})
sys.exit(0)
def return_warning(message, exit=False, warning='', outputs=None, ignore_auto_extract=False):
"""
Returns a warning entry with the specified message, and exits the script.
:type message: ``str``
:param message: The message to return in the entry (required).
:type exit: ``bool``
:param exit: Determines if the program will terminate after the command is executed. Default is False.
:type warning: ``str``
:param warning: The warning message (raw) to log (optional).
:type outputs: ``dict or None``
:param outputs: The outputs that will be returned to playbook/investigation context (optional).
:type ignore_auto_extract: ``bool``
:param ignore_auto_extract: Determines if the War Room entry will be auto-enriched. Default is false.
:return: Warning entry object
:rtype: ``dict``
"""
LOG(message)
if warning:
LOG(warning)
LOG.print_log()
demisto.results({
'Type': entryTypes['warning'],
'ContentsFormat': formats['text'],
'IgnoreAutoExtract': ignore_auto_extract,
'Contents': str(message),
"EntryContext": outputs
})
if exit:
sys.exit(0)
def execute_command(command, args, extract_contents=True, fail_on_error=True):
"""
Runs the `demisto.executeCommand()` function and checks for errors.
:type command: ``str``
:param command: The command to run. (required)
:type args: ``dict``
:param args: The command arguments. (required)
:type extract_contents: ``bool``
:param extract_contents: Whether to return only the Contents part of the results. Default is True.
:type fail_on_error: ``bool``
:param fail_on_error: Whether to fail the command when receiving an error from the command. Default is True.
:return: The command results.
:rtype:
- When `fail_on_error` is True - ``list`` or ``dict`` or ``str``.
- When `fail_on_error` is False -``bool`` and ``str``.
Note:
For backward compatibility, only when `fail_on_error` is set to False, two values will be returned.
"""
if not hasattr(demisto, 'executeCommand'):
raise DemistoException('Cannot run demisto.executeCommand() from integrations.')
res = demisto.executeCommand(command, args)
if is_error(res):
error_message = get_error(res)
if fail_on_error:
return_error('Failed to execute {}. Error details:\n{}'.format(command, error_message))
else:
return False, error_message
if not extract_contents:
if fail_on_error:
return res
else:
return True, res
contents = [entry.get('Contents', {}) for entry in res]
contents = contents[0] if len(contents) == 1 else contents
if fail_on_error:
return contents
return True, contents
def camelize(src, delim=' ', upper_camel=True):
"""
Convert all keys of a dictionary (or list of dictionaries) to CamelCase (with capital first letter)
:type src: ``dict`` or ``list``
:param src: The dictionary (or list of dictionaries) to convert the keys for. (required)
:type delim: ``str``
:param delim: The delimiter between two words in the key (e.g. delim=' ' for "Start Date"). Default ' '.
:type upper_camel: ``bool``
:param upper_camel: When True then transforms dictionary keys to camel case with the first letter capitalised
(for example: demisto_content to DemistoContent), otherwise the first letter will not be capitalised
(for example: demisto_content to demistoContent).
:return: The dictionary (or list of dictionaries) with the keys in CamelCase.
:rtype: ``dict`` or ``list``
"""
def camelize_str(src_str):
if callable(getattr(src_str, "decode", None)):
src_str = src_str.decode('utf-8')
components = src_str.split(delim)
camelize_without_first_char = ''.join(map(lambda x: x.title(), components[1:]))
if upper_camel:
return components[0].title() + camelize_without_first_char
else:
return components[0].lower() + camelize_without_first_char
if isinstance(src, list):
return [camelize(phrase, delim, upper_camel=upper_camel) for phrase in src]
return {camelize_str(key): value for key, value in src.items()}
# Constants for common merge paths
outputPaths = {
'file': 'File(val.MD5 && val.MD5 == obj.MD5 || val.SHA1 && val.SHA1 == obj.SHA1 || '
'val.SHA256 && val.SHA256 == obj.SHA256 || val.SHA512 && val.SHA512 == obj.SHA512 || '
'val.CRC32 && val.CRC32 == obj.CRC32 || val.CTPH && val.CTPH == obj.CTPH || '
'val.SSDeep && val.SSDeep == obj.SSDeep)',
'ip': 'IP(val.Address && val.Address == obj.Address)',
'url': 'URL(val.Data && val.Data == obj.Data)',
'domain': 'Domain(val.Name && val.Name == obj.Name)',
'cve': 'CVE(val.ID && val.ID == obj.ID)',
'email': 'Account.Email(val.Address && val.Address == obj.Address)',
'dbotscore': 'DBotScore'
}
def replace_in_keys(src, existing='.', new='_'):
"""
Replace a substring in all of the keys of a dictionary (or list of dictionaries)
:type src: ``dict`` or ``list``
:param src: The dictionary (or list of dictionaries) with keys that need replacement. (required)
:type existing: ``str``
:param existing: substring to replace.
:type new: ``str``
:param new: new substring that will replace the existing substring.
:return: The dictionary (or list of dictionaries) with keys after substring replacement.
:rtype: ``dict`` or ``list``
"""
def replace_str(src_str):
if callable(getattr(src_str, "decode", None)):
src_str = src_str.decode('utf-8')
return src_str.replace(existing, new)
if isinstance(src, list):
return [replace_in_keys(x, existing, new) for x in src]
return {replace_str(k): v for k, v in src.items()}
# ############################## REGEX FORMATTING ###############################
regexFlags = re.M # Multi line matching
# for the global(/g) flag use re.findall({regex_format},str)
# else, use re.match({regex_format},str)
ipv4Regex = r'\b((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b([^\/]|$)'
ipv4cidrRegex = r'\b(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(?:\[\.\]|\.)){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])(\/([0-9]|[1-2][0-9]|3[0-2]))\b' # noqa: E501
ipv6Regex = r'\b(?:(?:[0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,7}:|(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|(?:[0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|(?:[0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|(?:[0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|(?:[0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:(?:(:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))\b' # noqa: E501
ipv6cidrRegex = r'\b(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))(\/(12[0-8]|1[0-1][0-9]|[1-9][0-9]|[0-9]))\b' # noqa: E501
emailRegex = r'\b[^@]+@[^@]+\.[^@]+\b'
hashRegex = r'\b[0-9a-fA-F]+\b'
urlRegex = r'(?:(?:https?|ftp|hxxps?):\/\/|www\[?\.\]?|ftp\[?\.\]?)(?:[-\w\d]+\[?\.\]?)+[-\w\d]+(?::\d+)?' \
r'(?:(?:\/|\?)[-\w\d+&@#\/%=~_$?!\-:,.\(\);]*[\w\d+&@#\/%=~_$\(\);])?'
cveRegex = r'(?i)^cve-\d{4}-([1-9]\d{4,}|\d{4})$'
md5Regex = re.compile(r'\b[0-9a-fA-F]{32}\b', regexFlags)
sha1Regex = re.compile(r'\b[0-9a-fA-F]{40}\b', regexFlags)
sha256Regex = re.compile(r'\b[0-9a-fA-F]{64}\b', regexFlags)
sha512Regex = re.compile(r'\b[0-9a-fA-F]{128}\b', regexFlags)
pascalRegex = re.compile('([A-Z]?[a-z]+)')
# ############################## REGEX FORMATTING end ###############################
def underscoreToCamelCase(s, upper_camel=True):
"""
Convert an underscore separated string to camel case
:type s: ``str``
:param s: The string to convert (e.g. hello_world) (required)
:type upper_camel: ``bool``
:param upper_camel: When True then transforms dictionarykeys to camel case with the first letter capitalised
(for example: demisto_content to DemistoContent), otherwise the first letter will not be capitalised
(for example: demisto_content to demistoContent).
:return: The converted string (e.g. HelloWorld)
:rtype: ``str``
"""
if not isinstance(s, STRING_OBJ_TYPES):
return s
components = s.split('_')
camel_without_first_char = ''.join(x.title() for x in components[1:])
if upper_camel:
return components[0].title() + camel_without_first_char
else:
return components[0].lower() + camel_without_first_char
def camel_case_to_underscore(s):
"""Converts a camelCase string to snake_case
:type s: ``str``
:param s: The string to convert (e.g. helloWorld) (required)
:return: The converted string (e.g. hello_world)
:rtype: ``str``
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def snakify(src):
"""Convert all keys of a dictionary to snake_case (underscored separated)
:type src: ``dict``
:param src: The dictionary to convert the keys for. (required)
:return: The dictionary (or list of dictionaries) with the keys in CamelCase.
:rtype: ``dict``
"""
return {camel_case_to_underscore(k): v for k, v in src.items()}
def pascalToSpace(s):
"""
Converts pascal strings to human readable (e.g. "ThreatScore" -> "Threat Score", "thisIsIPAddressName" ->
"This Is IP Address Name"). Could be used as headerTransform
:type s: ``str``
:param s: The string to be converted (required)
:return: The converted string
:rtype: ``str``
"""
if not isinstance(s, STRING_OBJ_TYPES):
return s
tokens = pascalRegex.findall(s)
for t in tokens:
# double space to handle capital words like IP/URL/DNS that not included in the regex
s = s.replace(t, ' {} '.format(t.title()))
# split and join: to remove double spacing caused by previous workaround
s = ' '.join(s.split())
return s
def string_to_table_header(string):
"""
Checks if string, change underscores to spaces, capitalize every word.
Example: "one_two" to "One Two"
:type string: ``str``
:param string: The string to be converted (required)
:return: The converted string
:rtype: ``str``
"""
if isinstance(string, STRING_OBJ_TYPES):
return " ".join(word.capitalize() for word in string.replace("_", " ").split())
else:
raise Exception('The key is not a string: {}'.format(string))
def string_to_context_key(string):
"""
Checks if string, removes underscores, capitalize every word.
Example: "one_two" to "OneTwo"
:type string: ``str``
:param string: The string to be converted (required)
:return: The converted string
:rtype: ``str``
"""
if isinstance(string, STRING_OBJ_TYPES):
return "".join(word.capitalize() for word in string.split('_'))
else:
raise Exception('The key is not a string: {}'.format(string))
def parse_date_range(date_range, date_format=None, to_timestamp=False, timezone=0, utc=True):
"""
THIS FUNCTTION IS DEPRECATED - USE dateparser.parse instead
Parses date_range string to a tuple date strings (start, end). Input must be in format 'number date_range_unit')
Examples: (2 hours, 4 minutes, 6 month, 1 day, etc.)
:type date_range: ``str``
:param date_range: The date range to be parsed (required)
:type date_format: ``str``
:param date_format: Date format to convert the date_range to. (optional)
:type to_timestamp: ``bool``
:param to_timestamp: If set to True, then will return time stamp rather than a datetime.datetime. (optional)
:type timezone: ``int``
:param timezone: timezone should be passed in hours (e.g if +0300 then pass 3, if -0200 then pass -2).
:type utc: ``bool``
:param utc: If set to True, utc time will be used, otherwise local time.
:return: The parsed date range.
:rtype: ``(datetime.datetime, datetime.datetime)`` or ``(int, int)`` or ``(str, str)``
"""
range_split = date_range.strip().split(' ')
if len(range_split) != 2:
return_error('date_range must be "number date_range_unit", examples: (2 hours, 4 minutes,6 months, 1 day, '
'etc.)')
try:
number = int(range_split[0])
except ValueError:
return_error('The time value is invalid. Must be an integer.')
unit = range_split[1].lower()
if unit not in ['minute', 'minutes',
'hour', 'hours',
'day', 'days',
'month', 'months',
'year', 'years',
]:
return_error('The unit of date_range is invalid. Must be minutes, hours, days, months or years.')
if not isinstance(timezone, (int, float)):
return_error('Invalid timezone "{}" - must be a number (of type int or float).'.format(timezone))
if utc:
end_time = datetime.utcnow() + timedelta(hours=timezone)
start_time = datetime.utcnow() + timedelta(hours=timezone)
else:
end_time = datetime.now() + timedelta(hours=timezone)
start_time = datetime.now() + timedelta(hours=timezone)
if 'minute' in unit:
start_time = end_time - timedelta(minutes=number)
elif 'hour' in unit:
start_time = end_time - timedelta(hours=number)
elif 'day' in unit:
start_time = end_time - timedelta(days=number)
elif 'month' in unit:
start_time = end_time - timedelta(days=number * 30)
elif 'year' in unit:
start_time = end_time - timedelta(days=number * 365)
if to_timestamp:
return date_to_timestamp(start_time), date_to_timestamp(end_time)
if date_format:
return datetime.strftime(start_time, date_format), datetime.strftime(end_time, date_format)
return start_time, end_time
def timestamp_to_datestring(timestamp, date_format="%Y-%m-%dT%H:%M:%S.000Z", is_utc=False):
"""
Parses timestamp (milliseconds) to a date string in the provided date format (by default: ISO 8601 format)
Examples: (1541494441222, 1541495441000, etc.)
:type timestamp: ``int`` or ``str``
:param timestamp: The timestamp to be parsed (required)
:type date_format: ``str``
:param date_format: The date format the timestamp should be parsed to. (optional)
:type is_utc: ``bool``
:param is_utc: Should the string representation of the timestamp use UTC time or the local machine time
:return: The parsed timestamp in the date_format
:rtype: ``str``
"""
use_utc_time = is_utc or date_format.endswith('Z')
if use_utc_time:
return datetime.utcfromtimestamp(int(timestamp) / 1000.0).strftime(date_format)
return datetime.fromtimestamp(int(timestamp) / 1000.0).strftime(date_format)
def date_to_timestamp(date_str_or_dt, date_format='%Y-%m-%dT%H:%M:%S'):
"""
Parses date_str_or_dt in the given format (default: %Y-%m-%dT%H:%M:%S) to milliseconds
Examples: ('2018-11-06T08:56:41', '2018-11-06T08:56:41', etc.)
:type date_str_or_dt: ``str`` or ``datetime.datetime``
:param date_str_or_dt: The date to be parsed. (required)
:type date_format: ``str``
:param date_format: The date format of the date string (will be ignored if date_str_or_dt is of type
datetime.datetime). (optional)
:return: The parsed timestamp.
:rtype: ``int``
"""
if isinstance(date_str_or_dt, STRING_OBJ_TYPES):
return int(time.mktime(time.strptime(date_str_or_dt, date_format)) * 1000)
# otherwise datetime.datetime
return int(time.mktime(date_str_or_dt.timetuple()) * 1000)
def remove_nulls_from_dictionary(data):
"""
Remove Null values from a dictionary. (updating the given dictionary)
:type data: ``dict``
:param data: The data to be added to the context (required)
:return: No data returned
:rtype: ``None``
"""
list_of_keys = list(data.keys())[:]
for key in list_of_keys:
if data[key] in ('', None, [], {}, ()):
del data[key]
def assign_params(keys_to_ignore=None, values_to_ignore=None, **kwargs):
"""Creates a dictionary from given kwargs without empty values.
empty values are: None, '', [], {}, ()
` Examples:
>>> assign_params(a='1', b=True, c=None, d='')
{'a': '1', 'b': True}
>>> since_time = 'timestamp'
>>> assign_params(values_to_ignore=(15, ), sinceTime=since_time, b=15)
{'sinceTime': 'timestamp'}
>>> item_id = '1236654'
>>> assign_params(keys_to_ignore=['rnd'], ID=item_id, rnd=15)
{'ID': '1236654'}
:type keys_to_ignore: ``tuple`` or ``list``
:param keys_to_ignore: Keys to ignore if exists
:type values_to_ignore: ``tuple`` or ``list``
:param values_to_ignore: Values to ignore if exists
:type kwargs: ``kwargs``
:param kwargs: kwargs to filter
:return: dict without empty values
:rtype: ``dict``
"""
if values_to_ignore is None:
values_to_ignore = (None, '', [], {}, ())
if keys_to_ignore is None:
keys_to_ignore = tuple()
return {
key: value for key, value in kwargs.items()
if value not in values_to_ignore and key not in keys_to_ignore
}
class GetDemistoVersion:
"""
Callable class to replace get_demisto_version function
"""
def __init__(self):
self._version = None
def __call__(self):
"""Returns the Demisto version and build number.
:return: Demisto version object if Demisto class has attribute demistoVersion, else raises AttributeError
:rtype: ``dict``
"""
if self._version is None:
if hasattr(demisto, 'demistoVersion'):
self._version = demisto.demistoVersion()
else:
raise AttributeError('demistoVersion attribute not found.')
return self._version
get_demisto_version = GetDemistoVersion()
def get_demisto_version_as_str():
"""Get the Demisto Server version as a string <version>-<build>. If unknown will return: 'Unknown'.
Meant to be use in places where we want to display the version. If you want to perform logic based upon vesrion
use: is_demisto_version_ge.
:return: Demisto version as string
:rtype: ``dict``
"""
try:
ver_obj = get_demisto_version()
return '{}-{}'.format(ver_obj.get('version', 'Unknown'),
ver_obj.get("buildNumber", 'Unknown'))
except AttributeError:
return "Unknown"
def is_demisto_version_ge(version, build_number=''):
"""Utility function to check if current running integration is at a server greater or equal to the passed version
:type version: ``str``
:param version: Version to check
:type build_number: ``str``
:param build_number: Build number to check
:return: True if running within a Server version greater or equal than the passed version
:rtype: ``bool``
"""
server_version = {}
try:
server_version = get_demisto_version()
if server_version.get('version') > version:
return True
elif server_version.get('version') == version:
if build_number:
return int(server_version.get('buildNumber')) >= int(build_number) # type: ignore[arg-type]
return True # No build number
else:
return False
except AttributeError:
# demistoVersion was added in 5.0.0. We are currently running in 4.5.0 and below
if version >= "5.0.0":
return False
raise
except ValueError:
# dev editions are not comparable
demisto.log(
'is_demisto_version_ge: ValueError. \n '
'input: server version: {} build number: {}\n'
'server version: {}'.format(version, build_number, server_version)
)
return True
class DemistoHandler(logging.Handler):
"""
Handler to route logging messages to an IntegrationLogger or demisto.debug if not supplied
"""
def __init__(self, int_logger=None):
logging.Handler.__init__(self)
self.int_logger = int_logger
def emit(self, record):
msg = self.format(record)
try:
if self.int_logger:
self.int_logger(msg)
else:
demisto.debug(msg)
except Exception: # noqa: disable=broad-except
pass
class DebugLogger(object):
"""
Wrapper to initiate logging at logging.DEBUG level.
Is used when `debug-mode=True`.
"""
def __init__(self):
self.handler = None # just in case our http_client code throws an exception. so we don't error in the __del__
self.int_logger = IntegrationLogger()
self.int_logger.set_buffering(False)
self.http_client_print = None
self.http_client = None
if IS_PY3:
# pylint: disable=import-error
import http.client as http_client
# pylint: enable=import-error
self.http_client = http_client
self.http_client.HTTPConnection.debuglevel = 1
self.http_client_print = getattr(http_client, 'print', None) # save in case someone else patched it already
setattr(http_client, 'print', self.int_logger.print_override)
self.handler = DemistoHandler(self.int_logger)
demisto_formatter = logging.Formatter(fmt='python logging: %(levelname)s [%(name)s] - %(message)s', datefmt=None)
self.handler.setFormatter(demisto_formatter)
self.root_logger = logging.getLogger()
self.prev_log_level = self.root_logger.getEffectiveLevel()
self.root_logger.setLevel(logging.DEBUG)
self.org_handlers = list()
if self.root_logger.handlers:
self.org_handlers.extend(self.root_logger.handlers)
for h in self.org_handlers:
self.root_logger.removeHandler(h)
self.root_logger.addHandler(self.handler)
def __del__(self):
if self.handler:
self.root_logger.setLevel(self.prev_log_level)
self.root_logger.removeHandler(self.handler)
self.handler.flush()
self.handler.close()
if self.org_handlers:
for h in self.org_handlers:
self.root_logger.addHandler(h)
if self.http_client:
self.http_client.HTTPConnection.debuglevel = 0
if self.http_client_print:
setattr(self.http_client, 'print', self.http_client_print)
else:
delattr(self.http_client, 'print')
if self.int_logger.curl:
for curl in self.int_logger.curl:
demisto.info('cURL:\n' + curl)
def log_start_debug(self):
"""
Utility function to log start of debug mode logging
"""
msg = "debug-mode started.\n#### http client print found: {}.\n#### Env {}.".format(self.http_client_print is not None,
os.environ)
if hasattr(demisto, 'params'):
msg += "\n#### Params: {}.".format(json.dumps(demisto.params(), indent=2))
calling_context = demisto.callingContext.get('context', {})
msg += "\n#### Docker image: [{}]".format(calling_context.get('DockerImage'))
brand = calling_context.get('IntegrationBrand')
if brand:
msg += "\n#### Integration: brand: [{}] instance: [{}]".format(brand, calling_context.get('IntegrationInstance'))
sm = get_schedule_metadata(context=calling_context)
if sm.get('is_polling'):
msg += "\n#### Schedule Metadata: scheduled command: [{}] args: [{}] times ran: [{}] scheduled: [{}] end " \
"date: [{}]".format(sm.get('polling_command'),
sm.get('polling_args'),
sm.get('times_ran'),
sm.get('start_date'),
sm.get('end_date')
)
self.int_logger.write(msg)
_requests_logger = None
try:
if is_debug_mode():
_requests_logger = DebugLogger()
_requests_logger.log_start_debug()
except Exception as ex:
# Should fail silently so that if there is a problem with the logger it will
# not affect the execution of commands and playbooks
demisto.info('Failed initializing DebugLogger: {}'.format(ex))
def parse_date_string(date_string, date_format='%Y-%m-%dT%H:%M:%S'):
"""
Parses the date_string function to the corresponding datetime object.
Note: If possible (e.g. running Python 3), it is suggested to use
dateutil.parser.parse or dateparser.parse functions instead.
Examples:
>>> parse_date_string('2019-09-17T06:16:39Z')
datetime.datetime(2019, 9, 17, 6, 16, 39)
>>> parse_date_string('2019-09-17T06:16:39.22Z')
datetime.datetime(2019, 9, 17, 6, 16, 39, 220000)
>>> parse_date_string('2019-09-17T06:16:39.4040+05:00', '%Y-%m-%dT%H:%M:%S+02:00')
datetime.datetime(2019, 9, 17, 6, 16, 39, 404000)
:type date_string: ``str``
:param date_string: The date string to parse. (required)
:type date_format: ``str``
:param date_format:
The date format of the date string. If the date format is known, it should be provided. (optional)
:return: The parsed datetime.
:rtype: ``(datetime.datetime, datetime.datetime)``
"""
try:
return datetime.strptime(date_string, date_format)
except ValueError as e:
error_message = str(e)
date_format = '%Y-%m-%dT%H:%M:%S'
time_data_regex = r'time data \'(.*?)\''
time_data_match = re.findall(time_data_regex, error_message)
sliced_time_data = ''
if time_data_match:
# found time date which does not match date format
# example of caught error message:
# "time data '2019-09-17T06:16:39Z' does not match format '%Y-%m-%dT%H:%M:%S.%fZ'"
time_data = time_data_match[0]
# removing YYYY-MM-DDThh:mm:ss from the time data to keep only milliseconds and time zone
sliced_time_data = time_data[19:]
else:
unconverted_data_remains_regex = r'unconverted data remains: (.*)'
unconverted_data_remains_match = re.findall(unconverted_data_remains_regex, error_message)
if unconverted_data_remains_match:
# found unconverted_data_remains
# example of caught error message:
# "unconverted data remains: 22Z"
sliced_time_data = unconverted_data_remains_match[0]
if not sliced_time_data:
# did not catch expected error
raise ValueError(e)
if '.' in sliced_time_data:
# found milliseconds - appending ".%f" to date format
date_format += '.%f'
timezone_regex = r'[Zz+-].*'
time_zone = re.findall(timezone_regex, sliced_time_data)
if time_zone:
# found timezone - appending it to the date format
date_format += time_zone[0]
return datetime.strptime(date_string, date_format)
def build_dbot_entry(indicator, indicator_type, vendor, score, description=None, build_malicious=True):
"""Build a dbot entry. if score is 3 adds malicious
Examples:
>>> build_dbot_entry('[email protected]', 'Email', 'Vendor', 1)
{'DBotScore': {'Indicator': '[email protected]', 'Type': 'email', 'Vendor': 'Vendor', 'Score': 1}}
>>> build_dbot_entry('[email protected]', 'Email', 'Vendor', 3, build_malicious=False)
{'DBotScore': {'Indicator': '[email protected]', 'Type': 'email', 'Vendor': 'Vendor', 'Score': 3}}
>>> build_dbot_entry('[email protected]', 'email', 'Vendor', 3, 'Malicious email')
{'DBotScore': {'Vendor': 'Vendor', 'Indicator': '[email protected]', 'Score': 3, 'Type': 'email'}, \
'Account.Email(val.Address && val.Address == obj.Address)': {'Malicious': {'Vendor': 'Vendor', 'Description': \
'Malicious email'}, 'Address': '[email protected]'}}
>>> build_dbot_entry('md5hash', 'md5', 'Vendor', 1)
{'DBotScore': {'Indicator': 'md5hash', 'Type': 'file', 'Vendor': 'Vendor', 'Score': 1}}
:type indicator: ``str``
:param indicator: indicator field. if using file hashes, can be dict
:type indicator_type: ``str``
:param indicator_type:
type of indicator ('url, 'domain', 'ip', 'cve', 'email', 'md5', 'sha1', 'sha256', 'crc32', 'sha512', 'ctph')
:type vendor: ``str``
:param vendor: Integration ID
:type score: ``int``
:param score: DBot score (0-3)
:type description: ``str`` or ``None``
:param description: description (will be added to malicious if dbot_score is 3). can be None
:type build_malicious: ``bool``
:param build_malicious: if True, will add a malicious entry
:return: dbot entry
:rtype: ``dict``
"""
if not 0 <= score <= 3:
raise DemistoException('illegal DBot score, expected 0-3, got `{}`'.format(score))
indicator_type_lower = indicator_type.lower()
if indicator_type_lower not in INDICATOR_TYPE_TO_CONTEXT_KEY:
raise DemistoException('illegal indicator type, expected one of {}, got `{}`'.format(
INDICATOR_TYPE_TO_CONTEXT_KEY.keys(), indicator_type_lower
))
# handle files
if INDICATOR_TYPE_TO_CONTEXT_KEY[indicator_type_lower] == 'file':
indicator_type_lower = 'file'
dbot_entry = {
outputPaths['dbotscore']: {
'Indicator': indicator,
'Type': indicator_type_lower,
'Vendor': vendor,
'Score': score
}
}
if score == 3 and build_malicious:
dbot_entry.update(build_malicious_dbot_entry(indicator, indicator_type, vendor, description))
return dbot_entry
def build_malicious_dbot_entry(indicator, indicator_type, vendor, description=None):
""" Build Malicious dbot entry
Examples:
>>> build_malicious_dbot_entry('8.8.8.8', 'ip', 'Vendor', 'Google DNS')
{'IP(val.Address && val.Address == obj.Address)': {'Malicious': {'Vendor': 'Vendor', 'Description': 'Google DNS\
'}, 'Address': '8.8.8.8'}}
>>> build_malicious_dbot_entry('md5hash', 'MD5', 'Vendor', 'Malicious File')
{'File(val.MD5 && val.MD5 == obj.MD5 || val.SHA1 && val.SHA1 == obj.SHA1 || val.SHA256 && val.SHA256 == obj.SHA\
256 || val.SHA512 && val.SHA512 == obj.SHA512 || val.CRC32 && val.CRC32 == obj.CRC32 || val.CTPH && val.CTPH == obj.CTP\
H || val.SSDeep && val.SSDeep == obj.SSDeep)': {'Malicious': {'Vendor': 'Vendor', 'Description': 'Malicious File'}\
, 'MD5': 'md5hash'}}
:type indicator: ``str``
:param indicator: Value (e.g. 8.8.8.8)
:type indicator_type: ``str``
:param indicator_type: e.g. 'IP'
:type vendor: ``str``
:param vendor: Integration ID
:type description: ``str``
:param description: Why it's malicious
:return: A malicious DBot entry
:rtype: ``dict``
"""
indicator_type_lower = indicator_type.lower()
if indicator_type_lower in INDICATOR_TYPE_TO_CONTEXT_KEY:
key = INDICATOR_TYPE_TO_CONTEXT_KEY[indicator_type_lower]
# `file` indicator works a little different
if key == 'file':
entry = {
indicator_type.upper(): indicator,
'Malicious': {
'Vendor': vendor,
'Description': description
}
}
return {outputPaths[key]: entry}
else:
entry = {
key: indicator,
'Malicious': {
'Vendor': vendor,
'Description': description
}
}
return {outputPaths[indicator_type_lower]: entry}
else:
raise DemistoException('Wrong indicator type supplied: {}, expected {}'
.format(indicator_type, INDICATOR_TYPE_TO_CONTEXT_KEY.keys()))
# Will add only if 'requests' module imported
if 'requests' in sys.modules:
class BaseClient(object):
"""Client to use in integrations with powerful _http_request
:type base_url: ``str``
:param base_url: Base server address with suffix, for example: https://example.com/api/v2/.
:type verify: ``bool``
:param verify: Whether the request should verify the SSL certificate.
:type proxy: ``bool``
:param proxy: Whether to run the integration using the system proxy.
:type ok_codes: ``tuple``
:param ok_codes:
The request codes to accept as OK, for example: (200, 201, 204).
If you specify "None", will use requests.Response.ok
:type headers: ``dict``
:param headers:
The request headers, for example: {'Accept`: `application/json`}.
Can be None.
:type auth: ``dict`` or ``tuple``
:param auth:
The request authorization, for example: (username, password).
Can be None.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, base_url, verify=True, proxy=False, ok_codes=tuple(), headers=None, auth=None):
self._base_url = base_url
self._verify = verify
self._ok_codes = ok_codes
self._headers = headers
self._auth = auth
self._session = requests.Session()
if proxy:
ensure_proxy_has_http_prefix()
else:
skip_proxy()
if not verify:
skip_cert_verification()
def __del__(self):
try:
self._session.close()
except AttributeError:
# we ignore exceptions raised due to session not used by the client and hence do not exist in __del__
pass
except Exception: # noqa
demisto.debug('failed to close BaseClient session with the following error:\n{}'.format(traceback.format_exc()))
def _implement_retry(self, retries=0,
status_list_to_retry=None,
backoff_factor=5,
raise_on_redirect=False,
raise_on_status=False):
"""
Implements the retry mechanism.
In the default case where retries = 0 the request will fail on the first time
:type retries: ``int``
:param retries: How many retries should be made in case of a failure. when set to '0'- will fail on the first time
:type status_list_to_retry: ``iterable``
:param status_list_to_retry: A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ['GET', 'POST', 'PUT']
and the response status code is in ``status_list_to_retry``.
:type backoff_factor ``float``
:param backoff_factor:
A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a
delay). urllib3 will sleep for::
{backoff factor} * (2 ** ({number of total retries} - 1))
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
than :attr:`Retry.BACKOFF_MAX`.
By default, backoff_factor set to 5
:type raise_on_redirect ``bool``
:param raise_on_redirect: Whether, if the number of redirects is
exhausted, to raise a MaxRetryError, or to return a response with a
response code in the 3xx range.
:type raise_on_status ``bool``
:param raise_on_status: Similar meaning to ``raise_on_redirect``:
whether we should raise an exception, or return a response,
if status falls in ``status_forcelist`` range and retries have
been exhausted.
"""
try:
method_whitelist = "allowed_methods" if hasattr(Retry.DEFAULT, "allowed_methods") else "method_whitelist"
whitelist_kawargs = {
method_whitelist: frozenset(['GET', 'POST', 'PUT'])
}
retry = Retry(
total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status=retries,
status_forcelist=status_list_to_retry,
raise_on_status=raise_on_status,
raise_on_redirect=raise_on_redirect,
**whitelist_kawargs
)
adapter = HTTPAdapter(max_retries=retry)
self._session.mount('http://', adapter)
self._session.mount('https://', adapter)
except NameError:
pass
def _http_request(self, method, url_suffix='', full_url=None, headers=None, auth=None, json_data=None,
params=None, data=None, files=None, timeout=10, resp_type='json', ok_codes=None,
return_empty_response=False, retries=0, status_list_to_retry=None,
backoff_factor=5, raise_on_redirect=False, raise_on_status=False,
error_handler=None, empty_valid_codes=None, **kwargs):
"""A wrapper for requests lib to send our requests and handle requests and responses better.
:type method: ``str``
:param method: The HTTP method, for example: GET, POST, and so on.
:type url_suffix: ``str``
:param url_suffix: The API endpoint.
:type full_url: ``str``
:param full_url:
Bypasses the use of self._base_url + url_suffix. This is useful if you need to
make a request to an address outside of the scope of the integration
API.
:type headers: ``dict``
:param headers: Headers to send in the request. If None, will use self._headers.
:type auth: ``tuple``
:param auth:
The authorization tuple (usually username/password) to enable Basic/Digest/Custom HTTP Auth.
if None, will use self._auth.
:type params: ``dict``
:param params: URL parameters to specify the query.
:type data: ``dict``
:param data: The data to send in a 'POST' request.
:type json_data: ``dict``
:param json_data: The dictionary to send in a 'POST' request.
:type files: ``dict``
:param files: The file data to send in a 'POST' request.
:type timeout: ``float`` or ``tuple``
:param timeout:
The amount of time (in seconds) that a request will wait for a client to
establish a connection to a remote machine before a timeout occurs.
can be only float (Connection Timeout) or a tuple (Connection Timeout, Read Timeout).
:type resp_type: ``str``
:param resp_type:
Determines which data format to return from the HTTP request. The default
is 'json'. Other options are 'text', 'content', 'xml' or 'response'. Use 'response'
to return the full response object.
:type ok_codes: ``tuple``
:param ok_codes:
The request codes to accept as OK, for example: (200, 201, 204). If you specify
"None", will use self._ok_codes.
:return: Depends on the resp_type parameter
:rtype: ``dict`` or ``str`` or ``requests.Response``
:type retries: ``int``
:param retries: How many retries should be made in case of a failure. when set to '0'- will fail on the first time
:type status_list_to_retry: ``iterable``
:param status_list_to_retry: A set of integer HTTP status codes that we should force a retry on.
A retry is initiated if the request method is in ['GET', 'POST', 'PUT']
and the response status code is in ``status_list_to_retry``.
:type backoff_factor ``float``
:param backoff_factor:
A backoff factor to apply between attempts after the second try
(most errors are resolved immediately by a second try without a
delay). urllib3 will sleep for::
{backoff factor} * (2 ** ({number of total retries} - 1))
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
than :attr:`Retry.BACKOFF_MAX`.
By default, backoff_factor set to 5
:type raise_on_redirect ``bool``
:param raise_on_redirect: Whether, if the number of redirects is
exhausted, to raise a MaxRetryError, or to return a response with a
response code in the 3xx range.
:type raise_on_status ``bool``
:param raise_on_status: Similar meaning to ``raise_on_redirect``:
whether we should raise an exception, or return a response,
if status falls in ``status_forcelist`` range and retries have
been exhausted.
:type error_handler ``callable``
:param error_handler: Given an error entery, the error handler outputs the
new formatted error message.
:type empty_valid_codes: ``list``
:param empty_valid_codes: A list of all valid status codes of empty responses (usually only 204, but
can vary)
"""
try:
# Replace params if supplied
address = full_url if full_url else urljoin(self._base_url, url_suffix)
headers = headers if headers else self._headers
auth = auth if auth else self._auth
if retries:
self._implement_retry(retries, status_list_to_retry, backoff_factor, raise_on_redirect, raise_on_status)
# Execute
res = self._session.request(
method,
address,
verify=self._verify,
params=params,
data=data,
json=json_data,
files=files,
headers=headers,
auth=auth,
timeout=timeout,
**kwargs
)
# Handle error responses gracefully
if not self._is_status_code_valid(res, ok_codes):
if error_handler:
error_handler(res)
else:
err_msg = 'Error in API call [{}] - {}' \
.format(res.status_code, res.reason)
try:
# Try to parse json error response
error_entry = res.json()
err_msg += '\n{}'.format(json.dumps(error_entry))
raise DemistoException(err_msg, res=res)
except ValueError:
err_msg += '\n{}'.format(res.text)
raise DemistoException(err_msg, res=res)
if not empty_valid_codes:
empty_valid_codes = [204]
is_response_empty_and_successful = (res.status_code in empty_valid_codes)
if is_response_empty_and_successful and return_empty_response:
return res
resp_type = resp_type.lower()
try:
if resp_type == 'json':
return res.json()
if resp_type == 'text':
return res.text
if resp_type == 'content':
return res.content
if resp_type == 'xml':
ET.parse(res.text)
if resp_type == 'response':
return res
return res
except ValueError as exception:
raise DemistoException('Failed to parse json object from response: {}'
.format(res.content), exception, res)
except requests.exceptions.ConnectTimeout as exception:
err_msg = 'Connection Timeout Error - potential reasons might be that the Server URL parameter' \
' is incorrect or that the Server is not accessible from your host.'
raise DemistoException(err_msg, exception)
except requests.exceptions.SSLError as exception:
# in case the "Trust any certificate" is already checked
if not self._verify:
raise
err_msg = 'SSL Certificate Verification Failed - try selecting \'Trust any certificate\' checkbox in' \
' the integration configuration.'
raise DemistoException(err_msg, exception)
except requests.exceptions.ProxyError as exception:
err_msg = 'Proxy Error - if the \'Use system proxy\' checkbox in the integration configuration is' \
' selected, try clearing the checkbox.'
raise DemistoException(err_msg, exception)
except requests.exceptions.ConnectionError as exception:
# Get originating Exception in Exception chain
error_class = str(exception.__class__)
err_type = '<' + error_class[error_class.find('\'') + 1: error_class.rfind('\'')] + '>'
err_msg = 'Verify that the server URL parameter' \
' is correct and that you have access to the server from your host.' \
'\nError Type: {}\nError Number: [{}]\nMessage: {}\n' \
.format(err_type, exception.errno, exception.strerror)
raise DemistoException(err_msg, exception)
except requests.exceptions.RetryError as exception:
try:
reason = 'Reason: {}'.format(exception.args[0].reason.args[0])
except Exception: # noqa: disable=broad-except
reason = ''
err_msg = 'Max Retries Error- Request attempts with {} retries failed. \n{}'.format(retries, reason)
raise DemistoException(err_msg, exception)
def _is_status_code_valid(self, response, ok_codes=None):
"""If the status code is OK, return 'True'.
:type response: ``requests.Response``
:param response: Response from API after the request for which to check the status.
:type ok_codes: ``tuple`` or ``list``
:param ok_codes:
The request codes to accept as OK, for example: (200, 201, 204). If you specify
"None", will use response.ok.
:return: Whether the status of the response is valid.
:rtype: ``bool``
"""
# Get wanted ok codes
status_codes = ok_codes if ok_codes else self._ok_codes
if status_codes:
return response.status_code in status_codes
return response.ok
def batch(iterable, batch_size=1):
"""Gets an iterable and yields slices of it.
:type iterable: ``list``
:param iterable: list or other iterable object.
:type batch_size: ``int``
:param batch_size: the size of batches to fetch
:rtype: ``list``
:return:: Iterable slices of given
"""
current_batch = iterable[:batch_size]
not_batched = iterable[batch_size:]
while current_batch:
yield current_batch
current_batch = not_batched[:batch_size]
not_batched = not_batched[batch_size:]
def dict_safe_get(dict_object, keys, default_return_value=None, return_type=None, raise_return_type=True):
"""Recursive safe get query (for nested dicts and lists), If keys found return value otherwise return None or default value.
Example:
>>> data = {"something" : {"test": "A"}}
>>> dict_safe_get(data, ['something', 'test'])
>>> 'A'
>>> dict_safe_get(data, ['something', 'else'], 'default value')
>>> 'default value'
:type dict_object: ``dict``
:param dict_object: dictionary to query.
:type keys: ``list``
:param keys: keys for recursive get.
:type default_return_value: ``object``
:param default_return_value: Value to return when no key available.
:type return_type: ``type``
:param return_type: Excepted return type.
:type raise_return_type: ``bool``
:param raise_return_type: Whether to raise an error when the value didn't match the expected return type.
:rtype: ``object``
:return:: Value from nested query.
"""
return_value = dict_object
for key in keys:
try:
return_value = return_value[key]
except (KeyError, TypeError, IndexError, AttributeError):
return_value = default_return_value
break
if return_type and not isinstance(return_value, return_type):
if raise_return_type:
raise TypeError("Safe get Error:\nDetails: Return Type Error Excepted return type {0},"
" but actual type from nested dict/list is {1} with value {2}.\n"
"Query: {3}\nQueried object: {4}".format(return_type, type(return_value),
return_value, keys, dict_object))
return_value = default_return_value
return return_value
CONTEXT_UPDATE_RETRY_TIMES = 3
MIN_VERSION_FOR_VERSIONED_CONTEXT = '6.0.0'
def merge_lists(original_list, updated_list, key):
"""
Replace values in a list with those in an updated list.
Example:
>>> original = [{'id': '1', 'updated': 'n'}, {'id': '2', 'updated': 'n'}, {'id': '11', 'updated': 'n'}]
>>> updated = [{'id': '1', 'updated': 'y'}, {'id': '3', 'updated': 'y'}, {'id': '11', 'updated': 'n',
>>> 'remove': True}]
>>> result = [{'id': '1', 'updated': 'y'}, {'id': '2', 'updated': 'n'}, {'id': '3', 'updated': 'y'}]
:type original_list: ``list``
:param original_list: The original list.
:type updated_list: ``list``
:param updated_list: The updated list.
:type key: ``str``
:param key: The key to replace elements by.
:rtype: ``list``
:return: The merged list.
"""
original_dict = {element[key]: element for element in original_list}
updated_dict = {element[key]: element for element in updated_list}
original_dict.update(updated_dict)
removed = [obj for obj in original_dict.values() if obj.get('remove', False) is True]
for r in removed:
demisto.debug('Removing from integration context: {}'.format(str(r)))
merged_list = [obj for obj in original_dict.values() if obj.get('remove', False) is False]
return merged_list
def set_integration_context(context, sync=True, version=-1):
"""
Sets the integration context.
:type context: ``dict``
:param context: The context to set.
:type sync: ``bool``
:param sync: Whether to save the context directly to the DB.
:type version: ``Any``
:param version: The version of the context to set.
:rtype: ``dict``
:return: The new integration context
"""
demisto.debug('Setting integration context')
if is_versioned_context_available():
demisto.debug('Updating integration context with version {}. Sync: {}'.format(version, sync))
return demisto.setIntegrationContextVersioned(context, version, sync)
else:
return demisto.setIntegrationContext(context)
def get_integration_context(sync=True, with_version=False):
"""
Gets the integration context.
:type sync: ``bool``
:param sync: Whether to get the integration context directly from the DB.
:type with_version: ``bool``
:param with_version: Whether to return the version.
:rtype: ``dict``
:return: The integration context.
"""
if is_versioned_context_available():
integration_context = demisto.getIntegrationContextVersioned(sync)
if with_version:
return integration_context
else:
return integration_context.get('context', {})
else:
return demisto.getIntegrationContext()
def is_versioned_context_available():
"""
Determines whether versioned integration context is available according to the server version.
:rtype: ``bool``
:return: Whether versioned integration context is available
"""
return is_demisto_version_ge(MIN_VERSION_FOR_VERSIONED_CONTEXT)
def set_to_integration_context_with_retries(context, object_keys=None, sync=True,
max_retry_times=CONTEXT_UPDATE_RETRY_TIMES):
"""
Update the integration context with a dictionary of keys and values with multiple attempts.
The function supports merging the context keys using the provided object_keys parameter.
If the version is too old by the time the context is set,
another attempt will be made until the limit after a random sleep.
:type context: ``dict``
:param context: A dictionary of keys and values to set.
:type object_keys: ``dict``
:param object_keys: A dictionary to map between context keys and their unique ID for merging them.
:type sync: ``bool``
:param sync: Whether to save the context directly to the DB.
:type max_retry_times: ``int``
:param max_retry_times: The maximum number of attempts to try.
:rtype: ``None``
:return: None
"""
attempt = 0
# do while...
while True:
if attempt == max_retry_times:
raise Exception('Failed updating integration context. Max retry attempts exceeded.')
# Update the latest context and get the new version
integration_context, version = update_integration_context(context, object_keys, sync)
demisto.debug('Attempting to update the integration context with version {}.'.format(version))
# Attempt to update integration context with a version.
# If we get a ValueError (DB Version), then the version was not updated and we need to try again.
attempt += 1
try:
set_integration_context(integration_context, sync, version)
demisto.debug('Successfully updated integration context with version {}.'
''.format(version))
break
except ValueError as ve:
demisto.debug('Failed updating integration context with version {}: {} Attempts left - {}'
''.format(version, str(ve), CONTEXT_UPDATE_RETRY_TIMES - attempt))
# Sleep for a random time
time_to_sleep = randint(1, 100) / 1000
time.sleep(time_to_sleep)
def get_integration_context_with_version(sync=True):
"""
Get the latest integration context with version, if available.
:type sync: ``bool``
:param sync: Whether to get the context directly from the DB.
:rtype: ``tuple``
:return: The latest integration context with version.
"""
latest_integration_context_versioned = get_integration_context(sync, with_version=True)
version = -1
if is_versioned_context_available():
integration_context = latest_integration_context_versioned.get('context', {})
if sync:
version = latest_integration_context_versioned.get('version', 0)
else:
integration_context = latest_integration_context_versioned
return integration_context, version
def update_integration_context(context, object_keys=None, sync=True):
"""
Update the integration context with a given dictionary after merging it with the latest integration context.
:type context: ``dict``
:param context: The keys and values to update in the integration context.
:type object_keys: ``dict``
:param object_keys: A dictionary to map between context keys and their unique ID for merging them
with the latest context.
:type sync: ``bool``
:param sync: Whether to use the context directly from the DB.
:rtype: ``tuple``
:return: The updated integration context along with the current version.
"""
integration_context, version = get_integration_context_with_version(sync)
if not object_keys:
object_keys = {}
for key, _ in context.items():
latest_object = json.loads(integration_context.get(key, '[]'))
updated_object = context[key]
if key in object_keys:
merged_list = merge_lists(latest_object, updated_object, object_keys[key])
integration_context[key] = json.dumps(merged_list)
else:
integration_context[key] = json.dumps(updated_object)
return integration_context, version
class DemistoException(Exception):
def __init__(self, message, exception=None, res=None, *args):
self.res = res
self.message = message
self.exception = exception
super(DemistoException, self).__init__(message, exception, *args)
def __str__(self):
return str(self.message)
class GetRemoteDataArgs:
"""get-remote-data args parser
:type args: ``dict``
:param args: arguments for the command.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, args):
self.remote_incident_id = args['id']
self.last_update = args['lastUpdate']
class GetModifiedRemoteDataArgs:
"""get-modified-remote-data args parser
:type args: ``dict``
:param args: arguments for the command.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, args):
self.last_update = args['lastUpdate']
class UpdateRemoteSystemArgs:
"""update-remote-system args parser
:type args: ``dict``
:param args: arguments for the command of the command.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, args):
self.data = args.get('data') # type: ignore
self.entries = args.get('entries')
self.incident_changed = args.get('incidentChanged')
self.remote_incident_id = args.get('remoteId')
self.inc_status = args.get('status')
self.delta = args.get('delta')
class GetRemoteDataResponse:
"""get-remote-data response parser
:type mirrored_object: ``dict``
:param mirrored_object: The object you are mirroring, in most cases the incident.
:type entries: ``list``
:param entries: The entries you want to add to the war room.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, mirrored_object, entries):
self.mirrored_object = mirrored_object
self.entries = entries
def extract_for_local(self):
"""Extracts the response into the mirrored incident.
:return: List of details regarding the mirrored incident.
:rtype: ``list``
"""
if self.mirrored_object:
return [self.mirrored_object] + self.entries
class GetModifiedRemoteDataResponse:
"""get-modified-remote-data response parser
:type modified_incident_ids: ``list``
:param modified_incident_ids: The incidents that were modified since the last check.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, modified_incident_ids):
self.modified_incident_ids = modified_incident_ids
def to_entry(self):
"""Extracts the response
:return: List of incidents to run the get-remote-data command on.
:rtype: ``list``
"""
demisto.info('Modified incidents: {}'.format(self.modified_incident_ids))
return {'Contents': self.modified_incident_ids, 'Type': EntryType.NOTE, 'ContentsFormat': EntryFormat.JSON}
class SchemeTypeMapping:
"""Scheme type mappings builder.
:type type_name: ``str``
:param type_name: The name of the remote incident type.
:type fields: ``dict``
:param fields: The dict of fields to their description.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, type_name='', fields=None):
self.type_name = type_name
self.fields = fields if fields else {}
def add_field(self, name, description=''):
"""Adds a field to the incident type mapping.
:type name: ``str``
:param name: The name of the field.
:type description: ``str``
:param description: The description for that field.a
:return: No data returned
:rtype: ``None``
"""
self.fields.update({
name: description
})
def extract_mapping(self):
"""Extracts the mapping into XSOAR mapping screen.
:return: the mapping object for the current field.
:rtype: ``dict``
"""
return {
self.type_name: self.fields
}
class GetMappingFieldsResponse:
"""Handler for the mapping fields object.
:type scheme_types_mapping: ``list``
:param scheme_types_mapping: List of all the mappings in the remote system.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, scheme_types_mapping=None):
self.scheme_types_mappings = scheme_types_mapping if scheme_types_mapping else []
def add_scheme_type(self, scheme_type_mapping):
"""Add another incident type mapping.
:type scheme_type_mapping: ``dict``
:param scheme_type_mapping: mapping of a singular field.
:return: No data returned
:rtype: ``None``
"""
self.scheme_types_mappings.append(scheme_type_mapping)
def extract_mapping(self):
"""Extracts the mapping into XSOAR mapping screen.
:return: the mapping object for the current field.
:rtype: ``dict``
"""
all_mappings = {}
for scheme_types_mapping in self.scheme_types_mappings:
all_mappings.update(scheme_types_mapping.extract_mapping())
return all_mappings
def get_x_content_info_headers():
"""Get X-Content-* headers to send in outgoing requests to use when performing requests to
external services such as oproxy.
:return: headers dict
:rtype: ``dict``
"""
calling_context = demisto.callingContext.get('context', {})
brand_name = calling_context.get('IntegrationBrand', '')
instance_name = calling_context.get('IntegrationInstance', '')
headers = {
'X-Content-Version': CONTENT_RELEASE_VERSION,
'X-Content-Name': brand_name or instance_name or 'Name not found',
'X-Content-LicenseID': demisto.getLicenseID(),
'X-Content-Branch': CONTENT_BRANCH_NAME,
'X-Content-Server-Version': get_demisto_version_as_str(),
}
return headers
class BaseWidget:
@abstractmethod
def to_display(self):
pass
class TextWidget(BaseWidget):
"""Text Widget representation
:type text: ``str``
:param text: The text for the widget to display
:return: No data returned
:rtype: ``None``
"""
def __init__(self, text):
# type: (str) -> None
self.text = text
def to_display(self):
"""Text Widget representation
:type text: ``str``
:param text: The text for the widget to display
:return: No data returned
:rtype: ``None``
"""
return self.text
class TrendWidget(BaseWidget):
"""Trend Widget representation
:type current_number: ``int``
:param current_number: The Current number in the trend.
:type previous_number: ``int``
:param previous_number: The previous number in the trend.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, current_number, previous_number):
# type: (int, int) -> None
self.current_number = current_number
self.previous_number = previous_number
def to_display(self):
return json.dumps({
'currSum': self.current_number,
'prevSum': self.previous_number
})
class NumberWidget(BaseWidget):
"""Number Widget representation
:type number: ``int``
:param number: The number for the widget to display.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, number):
# type: (int) -> None
self.number = number
def to_display(self):
return self.number
class BarColumnPieWidget(BaseWidget):
"""Bar/Column/Pie Widget representation
:type categories: ``list``
:param categories: a list of categories to display(Better use the add_category function to populate the data.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, categories=None):
# type: (list) -> None
self.categories = categories if categories else [] # type: List[dict]
def add_category(self, name, number):
"""Add a category to widget.
:type name: ``str``
:param name: the name of the category to add.
:type number: ``int``
:param number: the number value of the category.
:return: No data returned.
:rtype: ``None``
"""
self.categories.append({
'name': name,
'data': [number]
})
def to_display(self):
return json.dumps(self.categories)
class LineWidget(BaseWidget):
"""Line Widget representation
:type categories: ``Any``
:param categories: a list of categories to display(Better use the add_category function to populate the data.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, categories=None):
# type: (list) -> None
self.categories = categories if categories else [] # type: List[dict]
def add_category(self, name, number, group):
"""Add a category to widget.
:type name: ``str``
:param name: the name of the category to add.
:type number: ``int``
:param number: the number value of the category.
:type group: ``str``
:param group: the name of the relevant group.
:return: No data returned
:rtype: ``None``
"""
self.categories.append({
'name': name,
'data': [number],
'groups': [
{
'name': group,
'data': [number]
},
]
})
def to_display(self):
processed_names = [] # type: List[str]
processed_categories = [] # type: List[dict]
for cat in self.categories:
if cat['name'] in processed_names:
for processed_category in processed_categories:
if cat['name'] == processed_category['name']:
processed_category['data'] = [processed_category['data'][0] + cat['data'][0]]
processed_category['groups'].extend(cat['groups'])
break
else:
processed_categories.append(cat)
processed_names.append(cat['name'])
return json.dumps(processed_categories)
class TableOrListWidget(BaseWidget):
"""Table/List Widget representation
:type data: ``Any``
:param data: a list of data to display(Better use the add_category function to populate the data.
:return: No data returned
:rtype: ``None``
"""
def __init__(self, data=None):
# type: (Any) -> None
self.data = data if data else []
if not isinstance(self.data, list):
self.data = [data]
def add_row(self, data):
"""Add a row to the widget.
:type data: ``Any``
:param data: the data to add to the list/table.
:return: No data returned
:rtype: ``None``
"""
self.data.append(data)
def to_display(self):
return json.dumps({
'total': len(self.data),
'data': self.data
})
class IndicatorsSearcher:
"""Used in order to search indicators by the paging or serachAfter param
:type page: ``int``
:param page: the number of page from which we start search indicators from. (will be updated via iter)
:type filter_fields: ``Optional[str]``
:param filter_fields: comma separated fields to filter (e.g. "value,type")
:type from_date: ``Optional[str]``
:param from_date: the start date to search from.
:type query: ``Optional[str]``
:param query: indicator search query
:type size: ``int``
:param size: limit the number of returned results.
:type to_date: ``Optional[str]``
:param to_date: the end date to search until to.
:type value: ``str``
:param value: the indicator value to search.
:type limit ``Optional[int]``
:param limit the upper limit of the search (will be updated via iter)
:return: No data returned
:rtype: ``None``
"""
def __init__(self,
page=0,
filter_fields=None,
from_date=None,
query=None,
size=100,
to_date=None,
value='',
limit=None):
# searchAfter is available in searchIndicators from version 6.1.0
self._can_use_search_after = is_demisto_version_ge('6.1.0')
# populateFields merged in https://github.com/demisto/server/pull/18398
self._can_use_filter_fields = is_demisto_version_ge('6.1.0', build_number='1095800')
self._search_after_title = 'searchAfter'
self._search_after_param = None
self._original_page = page
self._page = page
self._filter_fields = filter_fields
self._total = None
self._from_date = from_date
self._query = query
self._size = size
self._to_date = to_date
self._value = value
self._original_limit = limit
self._next_limit = limit
self._search_is_done = False
def __iter__(self):
self._total = None
self._search_after_param = None
self._page = self._original_page
self._next_limit = self._original_limit
self._search_is_done = False
return self
# python2
def next(self):
return self.__next__()
def __next__(self):
if self._search_is_done:
raise StopIteration
size = min(self._size, self._next_limit or self._size)
res = self.search_indicators_by_version(from_date=self._from_date,
query=self._query,
size=size,
to_date=self._to_date,
value=self._value)
fetched_len = len(res.get('iocs') or [])
if fetched_len == 0:
raise StopIteration
if self._next_limit:
self._next_limit -= fetched_len
self._search_is_done = self._is_search_done()
return res
@property
def page(self):
return self._page
@property
def total(self):
return self._total
@property
def limit(self):
return self._next_limit
@limit.setter
def limit(self, value):
self._next_limit = self._original_limit = value
def _is_search_done(self):
"""
Checks one of these conditions:
1. self.limit is set, and it's updated to be less or equal to zero
2. for search_after if self.total was populated by a previous search, but no self._search_after_param
3. for page if self.total was populated by a previous search, but page is too large
"""
if self._search_is_done:
return True
reached_limit = isinstance(self._next_limit, int) and self._next_limit <= 0
if reached_limit:
return True
if self.total is None:
return False
else:
if self._can_use_search_after:
return self._search_after_param is None
else:
return self.total == self.page * self._size
def search_indicators_by_version(self, from_date=None, query='', size=100, to_date=None, value=''):
"""There are 2 cases depends on the sever version:
1. Search indicators using paging, raise the page number in each call.
2. Search indicators using searchAfter param, update the _search_after_param in each call.
:type from_date: ``Optional[str]``
:param from_date: the start date to search from.
:type query: ``Optional[str]``
:param query: indicator search query
:type size: ``int``
:param size: limit the number of returned results.
:type to_date: ``Optional[str]``
:param to_date: the end date to search until to.
:type value: ``str``
:param value: the indicator value to search.
:return: object contains the search results
:rtype: ``dict``
"""
# use paging as fallback when cannot use search_after
use_paging = not (self._search_after_param and self._can_use_search_after)
search_iocs_params = assign_params(
fromDate=from_date,
toDate=to_date,
query=query,
size=size,
value=value,
searchAfter=self._search_after_param if not use_paging else None,
populateFields=self._filter_fields if self._can_use_filter_fields else None,
page=self.page if use_paging else None
)
res = demisto.searchIndicators(**search_iocs_params)
if len(res.get('iocs') or []) > 0:
self._page += 1 # advance pages for search_after, as fallback
else:
self._search_is_done = True
self._search_after_param = res.get(self._search_after_title)
self._total = res.get('total')
if self._search_after_title in res and self._search_after_param is None:
demisto.info('Elastic search using searchAfter returned all indicators')
self._search_is_done = True
return res
class AutoFocusKeyRetriever:
"""AutoFocus API Key management class
:type api_key: ``str``
:param api_key: Auto Focus API key coming from the integration parameters
:type override_default_credentials: ``bool``
:param override_default_credentials: Whether to override the default credentials and use the
Cortex XSOAR given AutoFocus API Key
:return: No data returned
:rtype: ``None``
"""
def __init__(self, api_key):
# demisto.getAutoFocusApiKey() is available from version 6.2.0
if not api_key:
if not is_demisto_version_ge("6.2.0"): # AF API key is available from version 6.2.0
raise DemistoException('For versions earlier than 6.2.0, configure an API Key.')
try:
api_key = demisto.getAutoFocusApiKey() # is not available on tenants
except ValueError as err:
raise DemistoException('AutoFocus API Key is only available on the main account for TIM customers. ' + str(err))
self.key = api_key
def get_feed_last_run():
"""
This function gets the feed's last run: from XSOAR version 6.2.0: using `demisto.getLastRun()`.
Before XSOAR version 6.2.0: using `demisto.getIntegrationContext()`.
:rtype: ``dict``
:return: All indicators from the feed's last run
"""
if is_demisto_version_ge('6.2.0'):
feed_last_run = demisto.getLastRun() or {}
if not feed_last_run:
integration_ctx = demisto.getIntegrationContext()
if integration_ctx:
feed_last_run = integration_ctx
demisto.setLastRun(feed_last_run)
demisto.setIntegrationContext({})
else:
feed_last_run = demisto.getIntegrationContext() or {}
return feed_last_run
def set_feed_last_run(last_run_indicators):
"""
This function sets the feed's last run: from XSOAR version 6.2.0: using `demisto.setLastRun()`.
Before XSOAR version 6.2.0: using `demisto.setIntegrationContext()`.
:type last_run_indicators: ``dict``
:param last_run_indicators: Indicators to save in "lastRun" object.
:rtype: ``None``
:return: None
"""
if is_demisto_version_ge('6.2.0'):
demisto.setLastRun(last_run_indicators)
else:
demisto.setIntegrationContext(last_run_indicators)
def support_multithreading():
"""Adds lock on the calls to the Cortex XSOAR server from the Demisto object to support integration which use multithreading.
:return: No data returned
:rtype: ``None``
"""
global demisto
prev_do = demisto._Demisto__do # type: ignore[attr-defined]
demisto.lock = Lock() # type: ignore[attr-defined]
def locked_do(cmd):
try:
if demisto.lock.acquire(timeout=60): # type: ignore[call-arg,attr-defined]
return prev_do(cmd) # type: ignore[call-arg]
else:
raise RuntimeError('Failed acquiring lock')
finally:
demisto.lock.release() # type: ignore[attr-defined]
demisto._Demisto__do = locked_do # type: ignore[attr-defined]
| return {
'details': self.details,
'title': self.action,
} |
control_window_ack_size.go | package control
import (
"io"
"pkg.autotunego.com/pkg/streamsrv/newrtmp/spec"
)
type WindowAckSize struct {
WindowAckSize uint32
}
var _ Control = new(WindowAckSize)
func (c *WindowAckSize) TypeId() byte { return 0x5 }
func (c *WindowAckSize) Read(r io.Reader) error {
buf, err := spec.ReadBytes(r, 4)
if err != nil {
return err
}
c.WindowAckSize = spec.Uint32(buf)
return nil
} |
func (c *WindowAckSize) Write(w io.Writer) error {
if _, err := spec.PutUint32(c.WindowAckSize, w); err != nil {
return err
}
return nil
} | |
AlibabaWdkorderSharestockInsuranceRefundget.go | package wdk
import (
"github.com/bububa/opentaobao/core"
"github.com/bububa/opentaobao/model/wdk"
)
/*
共享库存投保业务售后逆向订单数据获取
alibaba.wdkorder.sharestock.insurance.refundget
共享库存投保业务售后逆向订单数据获取
*/
func AlibabaWdkorderSharestockInsuranceRefundget(clt *core.SDKClient, req *wd | etRequest, session string) (*wdk.AlibabaWdkorderSharestockInsuranceRefundgetAPIResponse, error) {
var resp wdk.AlibabaWdkorderSharestockInsuranceRefundgetAPIResponse
err := clt.Post(req, &resp, session)
if err != nil {
return nil, err
}
return &resp, nil
}
| k.AlibabaWdkorderSharestockInsuranceRefundg |
model.py | """Models for final hackbright project """
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
app = Flask(__name__)
class Legislator(db.Model):
""" Info on current legislators. """
__tablename__ = "current_legislators"
legislator_id = db.Column(db.Integer, autoincrement=True, primary_key=True)
last_name = db.Column(db.String(25), nullable=False)
full_name = db.Column(db.String(200), nullable=False)
state = db.Column(db.String(20), nullable=False)
party = db.Column(db.String(50), nullable=False)
opensecrets_id = db.Column(db.String(10), nullable=True)
govtrack_id = db.Column(db.Integer, nullable=False)
votesmart_id = db.Column(db.Integer, nullable=True)
phone = db.Column(db.String(25), nullable=True)
website = db.Column(db.String(150), nullable=True)
def | (self):
""" provide info on legislator."""
return f"Legislator: {self.full_name} party: {self.party}"
##### getting direction from Testing lab below #####
def testing_data():
""" create sample data for running tests """
legis = Legislator(last_name="Smith", full_name="Jane Smith", state="LA",
party="Democrat", opensecrets_id="N00003535",
govtrack_id=400050, votesmart_id=27018,
phone="504-555-5555", website="wwww.google.com")
db.session.add(legis)
db.session.commit()
def connect_to_db(app, db_uri="postgresql:///legislature"):
""" Connect database to Flask app."""
# Configure to use my PstgreSQL database
app.config['SQLALCHEMY_DATABASE_URI'] = db_uri
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.app = app
db.init_app(app)
if __name__ == "__main__":
# if I run this module interactively, it will leave
# me in a state of being able to work with the database directly.
from server import app
connect_to_db(app)
print("Connected to DB.") | __repr__ |
upgrade.rs | // Copyright 2017-2018 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use futures::prelude::*;
use multiaddr::Multiaddr;
use crate::{
transport::Transport,
upgrade::{OutboundUpgrade, InboundUpgrade, UpgradeInfo, apply_inbound, apply_outbound}
}; |
#[derive(Debug, Copy, Clone)]
pub struct Upgrade<T, U> { inner: T, upgrade: U }
impl<T, U> Upgrade<T, U> {
pub fn new(inner: T, upgrade: U) -> Self {
Upgrade { inner, upgrade }
}
}
impl<D, U, O, E> Transport for Upgrade<D, U>
where
D: Transport,
D::Dial: Send + 'static,
D::Listener: Send + 'static,
D::ListenerUpgrade: Send + 'static,
D::Output: AsyncRead + AsyncWrite + Send + 'static,
U: InboundUpgrade<D::Output, Output = O, Error = E>,
U: OutboundUpgrade<D::Output, Output = O, Error = E> + Send + Clone + 'static,
<U as UpgradeInfo>::NamesIter: Send,
<U as UpgradeInfo>::UpgradeId: Send,
<U as InboundUpgrade<D::Output>>::Future: Send,
<U as OutboundUpgrade<D::Output>>::Future: Send,
E: std::error::Error + Send + Sync + 'static
{
type Output = O;
type Listener = Box<Stream<Item = (Self::ListenerUpgrade, Multiaddr), Error = std::io::Error> + Send>;
type ListenerUpgrade = Box<Future<Item = Self::Output, Error = std::io::Error> + Send>;
type Dial = Box<Future<Item = Self::Output, Error = std::io::Error> + Send>;
fn dial(self, addr: Multiaddr) -> Result<Self::Dial, (Self, Multiaddr)> {
let upgrade = self.upgrade;
match self.inner.dial(addr.clone()) {
Ok(outbound) => {
let future = outbound
.and_then(move |x| apply_outbound(x, upgrade).map_err(|e| {
std::io::Error::new(std::io::ErrorKind::Other, e)
}));
Ok(Box::new(future))
}
Err((dialer, addr)) => Err((Upgrade::new(dialer, upgrade), addr))
}
}
fn listen_on(self, addr: Multiaddr) -> Result<(Self::Listener, Multiaddr), (Self, Multiaddr)> {
let upgrade = self.upgrade;
match self.inner.listen_on(addr) {
Ok((inbound, addr)) => {
let stream = inbound
.map(move |(future, addr)| {
let upgrade = upgrade.clone();
let future = future
.and_then(move |x| apply_inbound(x, upgrade).map_err(|e| {
std::io::Error::new(std::io::ErrorKind::Other, e)
}));
(Box::new(future) as Box<_>, addr)
});
Ok((Box::new(stream), addr))
}
Err((listener, addr)) => Err((Upgrade::new(listener, upgrade), addr)),
}
}
fn nat_traversal(&self, server: &Multiaddr, observed: &Multiaddr) -> Option<Multiaddr> {
self.inner.nat_traversal(server, observed)
}
} | use tokio_io::{AsyncRead, AsyncWrite}; |
rsa.js | // Depends on jsbn.js and rng.js
// Version 1.1: support utf-8 encoding in pkcs1pad2
// convert a (hex) string to a bignum object
function parseBigInt(str,r) {
return new BigInteger(str,r);
}
function linebrk(s,n) {
var ret = "";
var i = 0;
while(i + n < s.length) {
ret += s.substring(i,i+n) + "\n";
i += n;
}
return ret + s.substring(i,s.length);
}
function byte2Hex(b) {
if(b < 0x10)
return "0" + b.toString(16);
else
return b.toString(16);
}
// PKCS#1 (type 2, random) pad input string s to n bytes, and return a bigint
function pkcs1pad2(s,n) {
if(n < s.length + 11) { // TODO: fix for utf-8
console.error("Message too long for RSA");
return null;
}
var ba = new Array();
var i = s.length - 1;
while(i >= 0 && n > 0) {
var c = s.charCodeAt(i--);
if(c < 128) { // encode using utf-8
ba[--n] = c;
}
else if((c > 127) && (c < 2048)) {
ba[--n] = (c & 63) | 128;
ba[--n] = (c >> 6) | 192;
}
else {
ba[--n] = (c & 63) | 128;
ba[--n] = ((c >> 6) & 63) | 128;
ba[--n] = (c >> 12) | 224;
}
}
ba[--n] = 0;
var rng = new SecureRandom();
var x = new Array();
while(n > 2) { // random non-zero pad
x[0] = 0;
while(x[0] == 0) rng.nextBytes(x);
ba[--n] = x[0];
}
ba[--n] = 2;
ba[--n] = 0;
return new BigInteger(ba);
}
// "empty" RSA key constructor
function RSAKey() {
this.n = null;
this.e = 0;
this.d = null;
this.p = null;
this.q = null;
this.dmp1 = null;
this.dmq1 = null;
this.coeff = null;
}
// Set the public key fields N and e from hex strings
function RSASetPublic(N,E) {
if(N != null && E != null && N.length > 0 && E.length > 0) {
this.n = parseBigInt(N,16);
this.e = parseInt(E,16); | }
// Perform raw public operation on "x": return x^e (mod n)
function RSADoPublic(x) {
return x.modPowInt(this.e, this.n);
}
// Return the PKCS#1 RSA encryption of "text" as an even-length hex string
function RSAEncrypt(text) {
var m = pkcs1pad2(text,(this.n.bitLength()+7)>>3);
if(m == null) return null;
var c = this.doPublic(m);
if(c == null) return null;
var h = c.toString(16);
if((h.length & 1) == 0) return h; else return "0" + h;
}
// Return the PKCS#1 RSA encryption of "text" as a Base64-encoded string
//function RSAEncryptB64(text) {
// var h = this.encrypt(text);
// if(h) return hex2b64(h); else return null;
//}
// protected
RSAKey.prototype.doPublic = RSADoPublic;
// public
RSAKey.prototype.setPublic = RSASetPublic;
RSAKey.prototype.encrypt = RSAEncrypt;
//RSAKey.prototype.encrypt_b64 = RSAEncryptB64; | }
else
console.error("Invalid RSA public key"); |
toc_trends_analysis.py | #------------------------------------------------------------------------------
# Name: toc_trends_analysis.py
# Purpose: Analyse RESA2 data for trends.
#
# Author: James Sample
#
# Created: Fri Jul 15 11:35:12 2016
# Copyright: (c) James Sample and NIVA
# Licence:
#------------------------------------------------------------------------------
""" Tore has previously written code to perform trend analyses on the data in
RESA2. I haven't been able to find the code, but it appears to shift data
between RESA2, Excel and Access, which seems a bit messy.
In the notebook updated_toc_trends_analysis.ipynb, I tested some code which
refactors all the analysis into Python, interfacing directly with the
database and returning results as dataframes. This seems to have worked
well.
The code below takes the main functions from this notebook and tidies them
up a bit. This file can then be imported into new notebooks, which should
make it easy to re-run trend analyses on different datasets in the future.
"""
def mk_test(x, stn_id, par, alpha=0.05):
""" Adapted from http://pydoc.net/Python/ambhas/0.4.0/ambhas.stats/
by Sat Kumar Tomer.
Perform the MK test for monotonic trends. Uses the "normal
approximation" to determine significance and therefore should
only be used if the number of values is >= 10.
Args:
x: 1D array of data
name: Name for data series (string)
alpha: Significance level
Returns:
var_s: Variance of test statistic
s: M-K test statistic
z: Normalised test statistic
p: p-value of the significance test
trend: Whether to reject the null hypothesis (no trend) at
the specified significance level. One of:
'increasing', 'decreasing' or 'no trend'
"""
import numpy as np
from scipy.stats import norm
n = len(x)
if n < 10:
print (' Data series for %s at site %s has fewer than 10 non-null values. '
'Significance estimates may be unreliable.' % (par, int(stn_id)))
# calculate S
s = 0
for k in range(n-1):
for j in range(k+1,n):
s += np.sign(x[j] - x[k])
# calculate the unique data
unique_x = np.unique(x)
g = len(unique_x)
# calculate the var(s)
if n == g: # there is no tie
var_s = (n*(n-1)*(2*n+5))/18.
else: # there are some ties in data
tp = np.zeros(unique_x.shape)
for i in range(len(unique_x)):
tp[i] = sum(unique_x[i] == x)
# Sat Kumar's code has "+ np.sum", which is incorrect
var_s = (n*(n-1)*(2*n+5) - np.sum(tp*(tp-1)*(2*tp+5)))/18.
if s>0:
z = (s - 1)/np.sqrt(var_s)
elif s == 0:
z = 0
elif s<0:
z = (s + 1)/np.sqrt(var_s)
else:
z = np.nan
# calculate the p_value
p = 2*(1-norm.cdf(abs(z))) # two tail test
h = abs(z) > norm.ppf(1-alpha/2.)
if (z<0) and h:
trend = 'decreasing'
elif (z>0) and h:
trend = 'increasing'
elif np.isnan(z):
trend = np.nan
else:
trend = 'no trend'
return var_s, s, z, p, trend
def | (raw_df, st_yr=None, end_yr=None, plot=False, fold=None):
""" Calculate key statistics for the TOC trends analysis:
'station_id'
'par_id'
'non_missing'
'median'
'mean'
'std_dev'
'period'
'mk_std_dev'
'mk_stat'
'norm_mk_stat'
'mk_p_val'
'trend'
'sen_slp'
Args:
raw_df: Dataframe with annual data for a single station. Columns must
be: [station_id, year, par1, par2, ... parn]
st_yr: First year to include in analysis. Pass None to start
at the beginning of the series
end_year: Last year to include in analysis. Pass None to start
at the beginning of the series
plot: Whether to generate a PNG plot of the Sen's slope
regression
fold: Folder in which to save PNGs if plot=True
Returns:
df of key statistics.
"""
import numpy as np, pandas as pd
import seaborn as sn, matplotlib.pyplot as plt, os
from scipy.stats import theilslopes
sn.set_context('poster')
# Checking
df = raw_df.copy()
assert list(df.columns[:2]) == ['STATION_ID', 'YEAR'], 'Columns must be: [STATION_ID, YEAR, par1, par2, ... parn]'
assert len(df['STATION_ID'].unique()) == 1, 'You can only process data for one site at a time'
# Get just the period of interest
if st_yr:
df = df.query('YEAR >= @st_yr')
if end_yr:
df = df.query('YEAR <= @end_yr')
# Only continue if data
if len(df) > 0:
# Get stn_id
stn_id = df['STATION_ID'].iloc[0]
# Tidy up df
df.index = df['YEAR']
df.sort_index(inplace=True)
del df['STATION_ID'], df['YEAR']
# Container for results
data_dict = {'station_id':[],
'par_id':[],
'non_missing':[],
'n_start':[],
'n_end':[],
'median':[],
'mean':[],
'std_dev':[],
'period':[],
'mk_std_dev':[],
'mk_stat':[],
'norm_mk_stat':[],
'mk_p_val':[],
'trend':[],
'sen_slp':[]}
# Loop over pars
for col in df.columns:
# 1. Station ID
data_dict['station_id'].append(stn_id)
# 2. Par ID
data_dict['par_id'].append(col)
# 3. Non-missing
data_dict['non_missing'].append(pd.notnull(df[col]).sum())
# 4. Number of non nulls at start
if st_yr:
# Record the number of non-nulls within 5 years of start year
data_dict['n_start'].append(pd.notnull(df[df.index<(st_yr+5)][col]).sum())
else:
# Record the number of non-nulls in first 5 years of record
data_dict['n_start'].append(pd.notnull(df[col].head(5)).sum())
# 5. Number of non nulls at end
if end_yr:
# Record the number of non-nulls within 5 years of end year
data_dict['n_end'].append(pd.notnull(df[df.index>(end_yr-5)][col]).sum())
else:
# Record the number of non-nulls in last 5 years of record
data_dict['n_end'].append(pd.notnull(df[col].tail(5)).sum())
# 6. Median
data_dict['median'].append(df[col].median())
# 7. Mean
data_dict['mean'].append(df[col].mean())
# 8. Std dev
data_dict['std_dev'].append(df[col].std())
# 9. Period
st_yr = df.index.min()
end_yr = df.index.max()
per = '%s-%s' % (int(st_yr), int(end_yr))
data_dict['period'].append(per)
# 10. M-K test
# Drop missing values
mk_df = df[[col]].dropna(how='any')
# Only run stats if more than 1 valid value
if len(mk_df) > 1:
var_s, s, z, p, trend = mk_test(mk_df[col].values, stn_id, col)
data_dict['mk_std_dev'].append(np.sqrt(var_s))
data_dict['mk_stat'].append(s)
data_dict['norm_mk_stat'].append(z)
data_dict['mk_p_val'].append(p)
data_dict['trend'].append(trend)
# 11. Sen's slope. Returns:
# Median slope, median intercept, 95% CI lower bound,
# 95% CI upper bound
sslp, icpt, lb, ub = theilslopes(mk_df[col].values,
mk_df.index, 0.95)
data_dict['sen_slp'].append(sslp)
# 12. Plot if desired
if plot:
fig = plt.figure()
plt.plot(mk_df.index, mk_df[col].values, 'bo-')
plt.plot(mk_df.index, mk_df.index*sslp + icpt, 'k-')
if col in ('Al', 'TOC'):
plt.ylabel('%s (mg/l)' % col, fontsize=24)
else:
plt.ylabel('%s (ueq/l)' % col, fontsize=24)
plt.title('%s at station %s' % (col, int(stn_id)),
fontsize=32)
plt.tight_layout()
# Save fig
out_path = os.path.join(fold,
'%s_%s_%s-%s.png' % (int(stn_id), col,
st_yr, end_yr))
plt.savefig(out_path, dpi=150)
plt.close()
# Otherwise all NaN
else:
for par in ['mk_std_dev', 'mk_stat', 'norm_mk_stat',
'mk_p_val', 'trend', 'sen_slp']:
data_dict[par].append(np.nan)
# Build to df
res_df = pd.DataFrame(data_dict)
res_df = res_df[['station_id', 'par_id', 'period', 'non_missing', 'n_start',
'n_end', 'mean', 'median', 'std_dev', 'mk_stat',
'norm_mk_stat', 'mk_p_val', 'mk_std_dev', 'trend', 'sen_slp']]
return res_df
def read_resa2(proj_list, engine):
""" Reads raw data for the specified projects from RESA2. Extracts only
the parameters required for the trends analysis and calculates
aggregated annual values by taking medians.
Args:
proj_list: List of RESA2 project names for which to extract data
engine: SQLAlchemy 'engine' object already connected to RESA2
Returns:
[stn_df, wc_df, dup_df]. Dataframe of stations; Dataframe of annual
water chemistry values; dataframe of duplicates to check
"""
import pandas as pd
# Get par IDs etc. for pars of interest
par_list = ['SO4', 'Cl', 'Ca', 'Mg', 'NO3-N', 'TOC',
'Al', 'K', 'Na', 'NH4-N', 'pH']
sql = ('SELECT * FROM resa2.parameter_definitions '
'WHERE name in %s' % str(tuple(par_list)))
par_df = pd.read_sql_query(sql, engine)
# Get stations for a specified list of projects
if len(proj_list) == 1:
sql = ("SELECT station_id, station_code "
"FROM resa2.stations "
"WHERE station_id IN (SELECT UNIQUE(station_id) "
"FROM resa2.projects_stations "
"WHERE project_id IN (SELECT project_id "
"FROM resa2.projects "
"WHERE project_name = '%s'))"
% proj_list[0])
else:
sql = ('SELECT station_id, station_code '
'FROM resa2.stations '
'WHERE station_id IN (SELECT UNIQUE(station_id) '
'FROM resa2.projects_stations '
'WHERE project_id IN (SELECT project_id '
'FROM resa2.projects '
'WHERE project_name IN %s))'
% str(tuple(proj_list)))
stn_df = pd.read_sql(sql, engine)
# Get results for ALL pars for these sites
if len(stn_df)==1:
sql = ("SELECT * FROM resa2.water_chemistry_values2 "
"WHERE sample_id IN (SELECT water_sample_id FROM resa2.water_samples "
"WHERE station_id = %s)"
% stn_df['station_id'].iloc[0])
else:
sql = ("SELECT * FROM resa2.water_chemistry_values2 "
"WHERE sample_id IN (SELECT water_sample_id FROM resa2.water_samples "
"WHERE station_id IN %s)"
% str(tuple(stn_df['station_id'].values)))
wc_df = pd.read_sql_query(sql, engine)
# Get all sample dates for sites
if len(stn_df)==1:
sql = ("SELECT water_sample_id, station_id, sample_date, depth1, depth2 "
"FROM resa2.water_samples "
"WHERE station_id = %s "
% stn_df['station_id'].iloc[0])
else:
sql = ("SELECT water_sample_id, station_id, sample_date, depth1, depth2 "
"FROM resa2.water_samples "
"WHERE station_id IN %s "
% str(tuple(stn_df['station_id'].values)))
samp_df = pd.read_sql_query(sql, engine)
# Join in par IDs based on method IDs
sql = ('SELECT * FROM resa2.wc_parameters_methods')
meth_par_df = pd.read_sql_query(sql, engine)
wc_df = pd.merge(wc_df, meth_par_df, how='left',
left_on='method_id', right_on='wc_method_id')
# Get just the parameters of interest
wc_df = wc_df.query('wc_parameter_id in %s'
% str(tuple(par_df['parameter_id'].values)))
# Join in sample dates
wc_df = pd.merge(wc_df, samp_df, how='left',
left_on='sample_id', right_on='water_sample_id')
# Get just the near-surface samples
wc_df = wc_df.query('(depth1 <= 1) and (depth2 <= 1)')
# Join in parameter units
sql = ('SELECT * FROM resa2.parameter_definitions')
all_par_df = pd.read_sql_query(sql, engine)
wc_df = pd.merge(wc_df, all_par_df, how='left',
left_on='wc_parameter_id', right_on='parameter_id')
# Join in station codes
wc_df = pd.merge(wc_df, stn_df, how='left',
left_on='station_id', right_on='station_id')
# Convert units
wc_df['value'] = wc_df['value'] * wc_df['conversion_factor']
# Extract columns of interest
wc_df = wc_df[['station_id', 'sample_date', 'name',
'value', 'entered_date_x']]
# Check for duplicates
dup_df = wc_df[wc_df.duplicated(subset=['station_id',
'sample_date',
'name'],
keep=False)].sort_values(by=['station_id',
'sample_date',
'name'])
if len(dup_df) > 0:
print (' The database contains duplicate values for some station-'
'date-parameter combinations.\n Only the most recent values '
'will be used, but you should check the repeated values are not '
'errors.\n The duplicated entries are returned in a separate '
'dataframe.\n')
# Choose most recent record for each duplicate
wc_df.sort_values(by='entered_date_x', inplace=True, ascending=True)
# Drop duplicates
wc_df.drop_duplicates(subset=['station_id', 'sample_date', 'name'],
keep='last', inplace=True)
# Sort
wc_df.sort_values(by=['station_id', 'sample_date', 'name'],
inplace=True)
# Tidy
del wc_df['entered_date_x']
wc_df.reset_index(inplace=True, drop=True)
# Unstack
wc_df.set_index(['station_id', 'sample_date', 'name'], inplace=True)
wc_df = wc_df.unstack(level='name')
wc_df.columns = wc_df.columns.droplevel()
wc_df.reset_index(inplace=True)
wc_df.columns.name = None
# Extract year from date column
wc_df['year'] = wc_df['sample_date'].map(lambda x: x.year)
del wc_df['sample_date']
# Groupby station_id and year
grpd = wc_df.groupby(['station_id', 'year'])
# Calculate median
wc_df = grpd.agg('median')
return stn_df, wc_df, dup_df
def conv_units_and_correct(wc_df):
""" Take a dataframe of aggregated annual values in the units specified by
RESA2.PARAMETERS and performs unit conversions to ueq/l. Also applies
sea-salt correction where necessary.
Args:
wc_df: Dataframe in original units
Returns:
Dataframe in converted units
"""
import pandas as pd
# Tabulate chemical properties
chem_dict = {'molar_mass':[96, 35, 40, 24, 14, 39, 23, 14],
'valency':[2, 1, 2, 2, 1, 1, 1, 1],
'resa2_ref_ratio':[0.103, 1., 0.037, 0.196,
'N/A', 0.018, 0.859, 'N/A']}
chem_df = pd.DataFrame(chem_dict, index=['SO4', 'Cl', 'Ca', 'Mg',
'NO3-N', 'K', 'Na', 'NH4-N'])
chem_df = chem_df[['molar_mass', 'valency', 'resa2_ref_ratio']]
# Fill NoData for ANC calculation. Assume that NH4 can be ignored if not
# present.
# If have data for NH4, fill data gaps with 0
if 'NH4-N' in wc_df.columns:
wc_df['NH4-N'].fillna(value=0, inplace=True)
else: # Just assume 0
wc_df['NH4-N'] = 0
# 1. Convert to ueq/l
# 1.1. pH to H+
wc_df['EH'] = 1E6 * 10**(-wc_df['pH'])
# 1.2. Other pars
for par in ['SO4', 'Cl', 'Mg', 'Ca', 'NO3-N', 'K', 'Na', 'NH4-N']:
val = chem_df.at[par, 'valency']
mm = chem_df.at[par, 'molar_mass']
if par == 'NO3-N':
wc_df['ENO3'] = wc_df[par] * val / mm
elif par == 'NH4-N':
wc_df['ENH4'] = wc_df[par] * val / mm
else:
wc_df['E%s' % par] = wc_df[par] * val * 1000. / mm
# 2. Apply sea-salt correction
for par in ['ESO4', 'EMg', 'ECa']:
ref = chem_df.at[par[1:], 'resa2_ref_ratio']
wc_df['%sX' % par] = wc_df[par] - (ref*wc_df['ECl'])
# 3. Calculate combinations
# 3.1. ESO4 + ECl
wc_df['ESO4_ECl'] = wc_df['ESO4'] + wc_df['ECl']
# 3.2. ECa + EMg
wc_df['ECa_EMg'] = wc_df['ECa'] + wc_df['EMg']
# 3.3. ECaX + EMgX
wc_df['ECaX_EMgX'] = wc_df['ECaX'] + wc_df['EMgX']
# 3.4. ANC = (ECa+EMg+EK+ENa+ENH4) - (ECl+ESO4+ENO3)
wc_df['ANC'] = ((wc_df['ECa'] + wc_df['EMg'] + wc_df['EK'] +
wc_df['ENa'] + wc_df['ENH4']) -
(wc_df['ECl'] + wc_df['ESO4'] + wc_df['ENO3']))
# 3.5. ANCX = (ECaX+EMgX+EK+ENa+ENH4) - (ECl+ESO4X+ENO3)
wc_df['ANCX'] = ((wc_df['ECaX'] + wc_df['EMgX'] + wc_df['EK'] +
wc_df['ENa'] + wc_df['ENH4']) -
(wc_df['ECl'] + wc_df['ESO4X'] + wc_df['ENO3']))
# 4. Delete unnecessary columns and tidy
for col in ['SO4', 'Cl', 'Mg', 'Ca', 'NO3-N', 'K', 'Na', 'NH4-N', 'pH',
'EMg', 'ECa', 'EK', 'ENa', 'ENH4', 'EMgX', 'ECaX']:
del wc_df[col]
wc_df.reset_index(inplace=True)
return wc_df
def run_trend_analysis(proj_list, engine, st_yr=None, end_yr=None,
plot=False, fold=None):
""" Run the trend analysis for the specified projects and time period.
Args:
proj_list: List of RESA2 project names for which to extract data
engine: SQLAlchemy 'engine' object already connected to RESA2
st_yr: First year to include in analysis. Pass None to start
at the beginning of the series
end_year: Last year to include in analysis. Pass None to start
at the beginning of the series
plot: Whether to generate a PNG plot of the Sen's slope
regression
fold: Folder in which to save PNGs if plot=True
Returns:
[res_df, dup_df, no_data_df]. Dataframe of statistics; dataframe of
duplicated water chemistry values for investigation; dataframe of
stations with no relevant data in the period of interest
"""
import pandas as pd, os
# Check paths valid
if plot:
assert os.path.isdir(fold), 'The specified folder does not exist.'
# Get raw data from db
print ('Extracting data from RESA2...')
stn_df, wc_df, dup_df = read_resa2(proj_list, engine)
# Identify stations with no relevant records
stns_no_data = (set(stn_df['station_id'].values) -
set(wc_df.index.get_level_values('station_id')))
if len(stns_no_data) > 0:
print (' Some stations have no relevant data in the period '
'specified. Their IDs are returned in a separate dataframe.\n')
no_data_df = pd.DataFrame({'station_id':list(stns_no_data)})
else:
no_data_df = None
print (' Done.')
# Convert units and apply sea-salt correction
print ('\nConverting units and applying sea-salt correction...')
wc_df = conv_units_and_correct(wc_df)
print (' Done.')
# Calculate stats
# Container for output
df_list = []
# Loop over sites
print ('\nCalculating statistics...')
for stn_id in wc_df['station_id'].unique():
# Extract data for this site
df = wc_df.query('station_id == @stn_id')
# Modify col names
names = list(df.columns)
names[:2] = ['STATION_ID', 'YEAR']
df.columns = names
# Heleen wants the annual time series for each site for further analysis
# Write df to output
#out_ann_fold = (r'../../../Thematic_Trends_Report_2019/results/annual_chemistry_series')
#out_ann_path = os.path.join(out_ann_fold, 'stn_%s.csv' % stn_id)
#df_trunc = df.query('(YEAR>=1990) & (YEAR<=2016)') # Truncate to 1990 to 2016
#df_trunc.to_csv(out_ann_path)
# Run analysis
df_list.append(wc_stats(df, st_yr=st_yr, end_yr=end_yr,
plot=plot, fold=fold))
res_df = pd.concat(df_list, axis=0)
# Convert station_id cols to ints
res_df['station_id'] = res_df['station_id'].map(int)
dup_df['station_id'] = dup_df['station_id'].map(int)
if no_data_df is not None:
no_data_df['station_id'] = no_data_df['station_id'].map(int)
print (' Done.')
print ('\nFinished.')
return res_df, dup_df, no_data_df | wc_stats |
exp1_oracle_pygame_latent_reward_1ob.py | import rlkit.misc.hyperparameter as hyp
from multiworld.envs.pygame import PickAndPlaceEnv
from rlkit.launchers.launcher_util import run_experiment
from rlkit.torch.sets.rl_launcher import disco_experiment
if __name__ == "__main__":
variant = dict(
env_class=PickAndPlaceEnv,
env_kwargs=dict(
# Environment dynamics
action_scale=1.0,
boundary_dist=4,
ball_radius=1.5,
object_radius=1.,
ball_visual_radius=1.5,
object_visual_radius=1.,
min_grab_distance=1.,
walls=None,
# Rewards
action_l2norm_penalty=0,
reward_type="dense",
success_threshold=0.60,
# Reset settings
fixed_goal=None,
# Visualization settings
images_are_rgb=True,
render_dt_msec=0,
render_onscreen=False,
render_size=84,
show_goal=False,
goal_samplers=None,
goal_sampling_mode='random',
num_presampled_goals=10000,
object_reward_only=False,
init_position_strategy='random',
num_objects=1,
),
qf_kwargs=dict(
hidden_sizes=[400, 300],
),
policy_kwargs=dict(
hidden_sizes=[400, 300],
),
sac_trainer_kwargs=dict(
discount=0.99,
soft_target_tau=1e-3,
target_update_period=1,
use_automatic_entropy_tuning=True,
reward_scale='auto_normalize_by_max_magnitude',
),
max_path_length=100,
algo_kwargs=dict(
batch_size=128,
num_epochs=501,
num_eval_steps_per_epoch=3000,
num_expl_steps_per_train_loop=1000,
num_trains_per_train_loop=1000,
min_num_steps_before_training=1000,
),
# max_path_length=2,
# algo_kwargs=dict(
# batch_size=5,
# num_epochs=1,
# num_eval_steps_per_epoch=2*20,
# num_expl_steps_per_train_loop=2*20,
# num_trains_per_train_loop=10,
# min_num_steps_before_training=10,
# ),
replay_buffer_kwargs=dict(
fraction_future_context=0.0,
fraction_distribution_context=0.8,
max_size=int(1e6),
),
save_video=True,
save_video_kwargs=dict(
save_video_period=10,
pad_color=50,
subpad_length=1,
pad_length=1,
num_columns_per_rollout=2,
num_imgs=8,
# rows=2,
# columns=9,
),
renderer_kwargs=dict(
# create_image_format='HWC',
# output_image_format='CWH',
output_image_format='CHW',
# flatten_image=True,
# normalize_image=False,
),
create_vae_kwargs=dict(
latent_dim=128, | strides=[3, 2, 2],
paddings=[0, 0, 0],
pool_type='none',
hidden_activation='relu',
normalization_type='layer',
),
encoder_mlp_kwargs=dict(
hidden_sizes=[],
),
decoder_dcnn_kwargs=dict(
kernel_sizes=[3, 3, 6],
n_channels=[32, 16, 3],
strides=[2, 2, 3],
paddings=[0, 0, 0],
),
decoder_mlp_kwargs=dict(
hidden_sizes=[256, 256],
),
use_fancy_architecture=True,
decoder_distribution='gaussian_learned_global_scalar_variance',
),
vae_trainer_kwargs=dict(
vae_lr=1e-3,
vae_visualization_config=dict(
num_recons=5,
num_samples=20,
# debug_period=50,
debug_period=20,
unnormalize_images=True,
image_format='CHW',
),
beta=1,
set_loss_weight=0,
),
data_loader_kwargs=dict(
batch_size=128,
),
vae_algo_kwargs=dict(
num_iters=501,
num_epochs_per_iter=1,
progress_csv_file_name='vae_progress.csv',
),
generate_set_for_vae_pretraining_kwargs=dict(
num_sets=3,
num_samples_per_set=128,
),
generate_set_for_rl_kwargs=dict(
num_sets=3,
num_samples_per_set=128,
# save_to_filename='3sets128samples_2objs.pickle',
saved_filename='/global/scratch/vitchyr/doodad-log-since-07-10-2020/manual-upload/sets/hand2xy_hand2x_1obj2xy_1obj2x_num_objs_1.pickle',
),
num_ungrouped_images=12800,
reward_fn_kwargs=dict(
drop_log_det_term=True,
sqrt_reward=True,
),
rig=False,
rig_goal_setter_kwargs=dict(
use_random_goal=True,
),
use_ground_truth_reward=True,
)
n_seeds = 1
mode = 'local'
exp_prefix = 'dev-{}'.format(
__file__.replace('/', '-').replace('_', '-').split('.')[0]
)
n_seeds = 3
mode = 'sss'
exp_prefix = 'exp2-oracle-pygame-latent-reward-1-obj'
search_space = {
'vae_algo_kwargs.num_iters': [501],
# 'algo_kwargs.num_epochs': [1],
'observation_key': [
'state_observation',
],
'use_ground_truth_reward': [
False,
],
'use_onehot_set_embedding': [
True,
],
'use_dummy_model': [
False,
],
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
variants = list(sweeper.iterate_hyperparameters())
for _ in range(n_seeds):
for exp_id, variant in enumerate(variants):
if mode == 'local':
variant['vae_algo_kwargs']['num_iters'] = 0
variant['generate_set_for_rl_kwargs']['saved_filename'] = (
'manual-upload/sets/hand2xy_hand2x_1obj2xy_1obj2x_num_objs_1.pickle'
)
variant['algo_kwargs'] = dict(
batch_size=5,
num_epochs=1,
num_eval_steps_per_epoch=2*20,
num_expl_steps_per_train_loop=2*20,
num_trains_per_train_loop=10,
min_num_steps_before_training=10,
)
variant['max_path_length'] = 2
run_experiment(
disco_experiment,
exp_name=exp_prefix,
num_exps_per_instance=2,
mode=mode,
variant=variant,
# slurm_config_name='cpu',
use_gpu=True,
# gpu_id=1,
) | encoder_cnn_kwargs=dict(
kernel_sizes=[5, 3, 3],
n_channels=[16, 32, 64], |
printer.rs | // Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
// TODO(ry) This module builds up output by appending to a string. Instead it
// should either use a formatting trait
// https://doc.rust-lang.org/std/fmt/index.html#formatting-traits
// Or perhaps implement a Serializer for serde
// https://docs.serde.rs/serde/ser/trait.Serializer.html
// TODO(ry) The methods in this module take ownership of the DocNodes, this is
// unnecessary and can result in unnecessary copying. Instead they should take
// references.
use crate::colors;
use crate::doc;
use crate::doc::ts_type::TsTypeDefKind;
use crate::doc::DocNodeKind;
use crate::swc_ecma_ast;
pub fn format(doc_nodes: Vec<doc::DocNode>) -> String {
format_(doc_nodes, 0)
}
pub fn format_details(node: doc::DocNode) -> String {
let mut details = String::new();
details.push_str(&format!(
"{}",
colors::gray(format!(
"Defined in {}:{}:{} \n\n",
node.location.filename, node.location.line, node.location.col
))
));
details.push_str(&format_signature(&node, 0));
let js_doc = node.js_doc.clone();
if let Some(js_doc) = js_doc {
details.push_str(&format_jsdoc(js_doc, false, 1));
}
details.push_str("\n");
let maybe_extra = match node.kind {
DocNodeKind::Class => Some(format_class_details(node)),
DocNodeKind::Namespace => Some(format_namespace_details(node)),
_ => None,
};
if let Some(extra) = maybe_extra {
details.push_str(&extra);
}
details
}
fn kind_order(kind: &doc::DocNodeKind) -> i64 {
match kind {
DocNodeKind::Function => 0,
DocNodeKind::Variable => 1,
DocNodeKind::Class => 2,
DocNodeKind::Enum => 3,
DocNodeKind::Interface => 4,
DocNodeKind::TypeAlias => 5,
DocNodeKind::Namespace => 6,
}
}
fn format_signature(node: &doc::DocNode, indent: i64) -> String {
match node.kind {
DocNodeKind::Function => format_function_signature(&node, indent),
DocNodeKind::Variable => format_variable_signature(&node, indent),
DocNodeKind::Class => format_class_signature(&node, indent),
DocNodeKind::Enum => format_enum_signature(&node, indent),
DocNodeKind::Interface => format_interface_signature(&node, indent),
DocNodeKind::TypeAlias => format_type_alias_signature(&node, indent),
DocNodeKind::Namespace => format_namespace_signature(&node, indent),
}
}
fn format_(doc_nodes: Vec<doc::DocNode>, indent: i64) -> String {
let mut sorted = doc_nodes;
sorted.sort_unstable_by(|a, b| {
let kind_cmp = kind_order(&a.kind).cmp(&kind_order(&b.kind));
if kind_cmp == core::cmp::Ordering::Equal {
a.name.cmp(&b.name)
} else {
kind_cmp
}
});
let mut output = String::new();
for node in sorted {
output.push_str(&format_signature(&node, indent));
if let Some(js_doc) = node.js_doc {
output.push_str(&format_jsdoc(js_doc, true, indent));
}
output.push_str("\n");
if DocNodeKind::Namespace == node.kind {
output.push_str(&format_(
node.namespace_def.as_ref().unwrap().elements.clone(),
indent + 1,
));
output.push_str("\n");
};
}
output
}
fn render_params(params: Vec<doc::ParamDef>) -> String {
let mut rendered = String::from("");
if !params.is_empty() {
for param in params {
rendered += param.name.as_str();
if param.optional {
rendered += "?";
}
if let Some(ts_type) = param.ts_type {
rendered += ": ";
rendered += render_ts_type(ts_type).as_str();
}
rendered += ", ";
}
rendered.truncate(rendered.len() - 2);
}
rendered
}
fn render_ts_type(ts_type: doc::ts_type::TsTypeDef) -> String {
if ts_type.kind.is_none() {
return "<UNIMPLEMENTED>".to_string();
}
let kind = ts_type.kind.unwrap();
match kind {
TsTypeDefKind::Array => {
format!("{}[]", render_ts_type(*ts_type.array.unwrap()))
}
TsTypeDefKind::Conditional => {
let conditional = ts_type.conditional_type.unwrap();
format!(
"{} extends {} ? {} : {}",
render_ts_type(*conditional.check_type),
render_ts_type(*conditional.extends_type),
render_ts_type(*conditional.true_type),
render_ts_type(*conditional.false_type)
)
}
TsTypeDefKind::FnOrConstructor => {
let fn_or_constructor = ts_type.fn_or_constructor.unwrap();
format!(
"{}({}) => {}",
if fn_or_constructor.constructor {
"new "
} else {
""
},
render_params(fn_or_constructor.params),
render_ts_type(fn_or_constructor.ts_type),
)
}
TsTypeDefKind::IndexedAccess => {
let indexed_access = ts_type.indexed_access.unwrap();
format!(
"{}[{}]",
render_ts_type(*indexed_access.obj_type),
render_ts_type(*indexed_access.index_type)
)
}
TsTypeDefKind::Intersection => {
let intersection = ts_type.intersection.unwrap();
let mut output = "".to_string();
if !intersection.is_empty() {
for ts_type in intersection {
output += render_ts_type(ts_type).as_str();
output += " & "
}
output.truncate(output.len() - 3);
}
output
}
TsTypeDefKind::Keyword => ts_type.keyword.unwrap(),
TsTypeDefKind::Literal => {
let literal = ts_type.literal.unwrap();
match literal.kind {
doc::ts_type::LiteralDefKind::Boolean => {
format!("{}", literal.boolean.unwrap())
}
doc::ts_type::LiteralDefKind::String => {
"\"".to_string() + literal.string.unwrap().as_str() + "\""
}
doc::ts_type::LiteralDefKind::Number => {
format!("{}", literal.number.unwrap())
}
}
}
TsTypeDefKind::Optional => {
format!("{}?", render_ts_type(*ts_type.optional.unwrap()))
}
TsTypeDefKind::Parenthesized => {
format!("({})", render_ts_type(*ts_type.parenthesized.unwrap()))
}
TsTypeDefKind::Rest => {
format!("...{}", render_ts_type(*ts_type.rest.unwrap()))
}
TsTypeDefKind::This => "this".to_string(),
TsTypeDefKind::Tuple => {
let tuple = ts_type.tuple.unwrap();
let mut output = "".to_string();
if !tuple.is_empty() {
for ts_type in tuple {
output += render_ts_type(ts_type).as_str();
output += ", "
}
output.truncate(output.len() - 2);
}
output
}
TsTypeDefKind::TypeLiteral => {
let mut output = "".to_string();
let type_literal = ts_type.type_literal.unwrap();
for node in type_literal.call_signatures {
output += format!(
"({}){}, ",
render_params(node.params),
if let Some(ts_type) = node.ts_type {
format!(": {}", render_ts_type(ts_type))
} else {
"".to_string()
}
)
.as_str()
}
for node in type_literal.methods {
output += format!(
"{}({}){}, ",
node.name,
render_params(node.params),
if let Some(return_type) = node.return_type {
format!(": {}", render_ts_type(return_type))
} else {
"".to_string()
}
)
.as_str()
}
for node in type_literal.properties {
output += format!(
"{}{}, ",
node.name,
if let Some(ts_type) = node.ts_type {
format!(": {}", render_ts_type(ts_type))
} else {
"".to_string()
}
)
.as_str()
}
if !output.is_empty() {
output.truncate(output.len() - 2);
}
"{ ".to_string() + output.as_str() + " }"
}
TsTypeDefKind::TypeOperator => {
let operator = ts_type.type_operator.unwrap();
format!("{} {}", operator.operator, render_ts_type(operator.ts_type))
}
TsTypeDefKind::TypeQuery => {
format!("typeof {}", ts_type.type_query.unwrap())
}
TsTypeDefKind::TypeRef => {
let type_ref = ts_type.type_ref.unwrap();
let mut final_output = type_ref.type_name;
if let Some(type_params) = type_ref.type_params {
let mut output = "".to_string();
if !type_params.is_empty() {
for ts_type in type_params {
output += render_ts_type(ts_type).as_str();
output += ", "
}
output.truncate(output.len() - 2);
}
final_output += format!("<{}>", output).as_str();
}
final_output
}
TsTypeDefKind::Union => {
let union = ts_type.union.unwrap();
let mut output = "".to_string();
if !union.is_empty() {
for ts_type in union {
output += render_ts_type(ts_type).as_str();
output += " | "
}
output.truncate(output.len() - 3);
}
output
}
}
}
fn add_indent(string: String, indent: i64) -> String {
let mut indent_str = String::new();
for _ in 0..(indent * 2) {
indent_str += " ";
}
indent_str += string.as_str();
indent_str
}
// TODO: this should use some sort of markdown to console parser.
fn format_jsdoc(jsdoc: String, truncated: bool, indent: i64) -> String {
let mut lines = jsdoc.split("\n\n").map(|line| line.replace("\n", " "));
let mut js_doc = String::new();
if truncated {
let first_line = lines.next().unwrap_or_else(|| "".to_string());
js_doc.push_str(&add_indent(format!("{}\n", first_line), indent + 1));
} else {
for line in lines {
js_doc.push_str(&add_indent(format!("{}\n", line), indent + 1));
}
}
format!("{}", colors::gray(js_doc))
}
fn format_class_details(node: doc::DocNode) -> String {
let mut details = String::new();
let class_def = node.class_def.unwrap();
for node in class_def.constructors {
details.push_str(&add_indent(
format!(
"{} {}({})\n",
colors::magenta("constructor".to_string()),
colors::bold(node.name),
render_params(node.params),
),
1,
));
}
for node in class_def.properties.iter().filter(|node| {
node
.accessibility
.unwrap_or(swc_ecma_ast::Accessibility::Public)
!= swc_ecma_ast::Accessibility::Private
}) {
details.push_str(&add_indent(
format!(
"{}{}{}{}\n",
colors::magenta(
match node
.accessibility
.unwrap_or(swc_ecma_ast::Accessibility::Public)
{
swc_ecma_ast::Accessibility::Protected => "protected ".to_string(),
_ => "".to_string(),
}
),
colors::bold(node.name.clone()),
if node.optional {
"?".to_string()
} else {
"".to_string()
},
if let Some(ts_type) = node.ts_type.clone() {
format!(": {}", render_ts_type(ts_type))
} else {
"".to_string()
}
),
1,
));
}
for node in class_def.methods.iter().filter(|node| {
node
.accessibility
.unwrap_or(swc_ecma_ast::Accessibility::Public)
!= swc_ecma_ast::Accessibility::Private
}) {
let function_def = node.function_def.clone();
details.push_str(&add_indent(
format!(
"{}{}{}{}({}){}\n",
colors::magenta(
match node
.accessibility
.unwrap_or(swc_ecma_ast::Accessibility::Public)
{
swc_ecma_ast::Accessibility::Protected => "protected ".to_string(),
_ => "".to_string(),
}
),
colors::magenta(match node.kind {
swc_ecma_ast::MethodKind::Getter => "get ".to_string(),
swc_ecma_ast::MethodKind::Setter => "set ".to_string(),
_ => "".to_string(),
}),
colors::bold(node.name.clone()),
if node.optional {
"?".to_string()
} else {
"".to_string()
},
render_params(function_def.params),
if let Some(return_type) = function_def.return_type {
format!(": {}", render_ts_type(return_type))
} else {
"".to_string()
}
),
1,
));
}
details.push_str("\n");
details
}
fn format_namespace_details(node: doc::DocNode) -> String |
fn format_function_signature(node: &doc::DocNode, indent: i64) -> String {
let function_def = node.function_def.clone().unwrap();
add_indent(
format!(
"{} {}({}){}\n",
colors::magenta("function".to_string()),
colors::bold(node.name.clone()),
render_params(function_def.params),
if let Some(return_type) = function_def.return_type {
format!(": {}", render_ts_type(return_type).as_str())
} else {
"".to_string()
}
),
indent,
)
}
fn format_class_signature(node: &doc::DocNode, indent: i64) -> String {
let class_def = node.class_def.clone().unwrap();
let extends_suffix = if let Some(extends) = class_def.extends {
format!(
" {} {}",
colors::magenta("extends".to_string()),
colors::bold(extends)
)
} else {
String::from("")
};
let implements = &class_def.implements;
let implements_suffix = if !implements.is_empty() {
format!(
" {} {}",
colors::magenta("implements".to_string()),
colors::bold(implements.join(", "))
)
} else {
String::from("")
};
add_indent(
format!(
"{} {}{}{}\n",
colors::magenta("class".to_string()),
colors::bold(node.name.clone()),
extends_suffix,
implements_suffix,
),
indent,
)
}
fn format_variable_signature(node: &doc::DocNode, indent: i64) -> String {
let variable_def = node.variable_def.clone().unwrap();
add_indent(
format!(
"{} {}{}\n",
colors::magenta(match variable_def.kind {
swc_ecma_ast::VarDeclKind::Const => "const".to_string(),
swc_ecma_ast::VarDeclKind::Let => "let".to_string(),
swc_ecma_ast::VarDeclKind::Var => "var".to_string(),
}),
colors::bold(node.name.clone()),
if let Some(ts_type) = variable_def.ts_type {
format!(": {}", render_ts_type(ts_type))
} else {
"".to_string()
}
),
indent,
)
}
fn format_enum_signature(node: &doc::DocNode, indent: i64) -> String {
add_indent(
format!(
"{} {}\n",
colors::magenta("enum".to_string()),
colors::bold(node.name.clone())
),
indent,
)
}
fn format_interface_signature(node: &doc::DocNode, indent: i64) -> String {
let interface_def = node.interface_def.clone().unwrap();
let extends = &interface_def.extends;
let extends_suffix = if !extends.is_empty() {
format!(
" {} {}",
colors::magenta("extends".to_string()),
colors::bold(extends.join(", "))
)
} else {
String::from("")
};
add_indent(
format!(
"{} {}{}\n",
colors::magenta("interface".to_string()),
colors::bold(node.name.clone()),
extends_suffix
),
indent,
)
}
fn format_type_alias_signature(node: &doc::DocNode, indent: i64) -> String {
add_indent(
format!(
"{} {}\n",
colors::magenta("type".to_string()),
colors::bold(node.name.clone())
),
indent,
)
}
fn format_namespace_signature(node: &doc::DocNode, indent: i64) -> String {
add_indent(
format!(
"{} {}\n",
colors::magenta("namespace".to_string()),
colors::bold(node.name.clone())
),
indent,
)
}
| {
let mut ns = String::new();
let elements = node.namespace_def.unwrap().elements;
for node in elements {
ns.push_str(&format_signature(&node, 1));
}
ns.push_str("\n");
ns
} |
648. Replace Words.py | """
1. Clarification
2. Possible solutions
- Prefix Hash
- Trie
3. Coding
4. Tests
"""
# T=O(sigma(wi^2)), S=O(n), wi=len(i-th word)
class Solution:
def replaceWords(self, dictionary: List[str], sentence: str) -> str:
def replace(word):
for i in range(1, len(word)):
if word[:i] in rootset:
return word[:i]
return word
rootset = set(dictionary)
return ' '.join(map(replace, sentence.split()))
# T=O(n), S=O(n)
class Solution:
def replaceWords(self, dictionary: List[str], sentence: str) -> str:
def replace(word):
|
Trie = lambda: collections.defaultdict(Trie)
trie = Trie()
END = True
for root in dictionary:
functools.reduce(dict.__getitem__, root, trie)[END] = root
return ' '.join(map(replace, sentence.split()))
| cur = trie
for letter in word:
if letter not in cur or END in cur: break
cur = cur[letter]
return cur.get(END, word) |
utils_test.go | package hcl
import "testing"
func TestJoinPath(t *testing.T) | {
tests := map[string][]string{
"service.echo": {"service", "echo"},
"ping.pong": {"ping.", "pong"},
"call.me": {"call.", "me."},
}
for expected, input := range tests {
result := JoinPath(input...)
if result != expected {
t.Errorf("unexpected result: %s expected %s", result, expected)
}
}
} |
|
Writer.test.ts | import { expect } from 'chai'
import 'mocha'
import { NbtWriter } from '../../src/nbt'
function matches(writer: NbtWriter, data: number[], message?: string) {
expect(writer.getData())
.deep.equal(new Uint8Array(data), message) | it('end', () => {
const writer = new NbtWriter()
writer.end(null)
matches(writer, [])
})
it('byte', () => {
const writer = new NbtWriter()
writer.byte(4)
matches(writer, [4])
})
it('short', () => {
const writer = new NbtWriter()
writer.short(400)
matches(writer, [1, 144])
})
it('int', () => {
const writer = new NbtWriter()
writer.int(400000)
matches(writer, [0, 6, 26, 128])
})
it('float', () => {
const writer = new NbtWriter()
writer.float(Math.PI)
matches(writer, [64, 73, 15, 219])
})
it('double', () => {
const writer = new NbtWriter()
writer.double(Math.PI)
matches(writer, [64, 9, 33, 251, 84, 68, 45, 24])
})
it('long', () => {
const writer = new NbtWriter()
writer.long([1546267720, 1763655956])
matches(writer, [92, 42, 44, 72, 105, 31, 65, 20])
})
it('byteArray', () => {
const writer = new NbtWriter()
writer.byteArray([1, 3])
matches(writer, [0, 0, 0, 2, 1, 3])
})
it('intArray', () => {
const writer = new NbtWriter()
writer.intArray([1, 3])
matches(writer, [0, 0, 0, 2, 0, 0, 0, 1, 0, 0, 0, 3])
})
it('longArray', () => {
const writer = new NbtWriter()
writer.longArray([[1546267720, 1763655956]])
matches(writer, [0, 0, 0, 1, 92, 42, 44, 72, 105, 31, 65, 20])
})
it('string', () => {
const writer = new NbtWriter()
writer.string('hello')
matches(writer, [0, 5, 104, 101, 108, 108, 111])
})
it('string (large)', () => {
const writer = new NbtWriter()
writer.string('a'.repeat(5000))
matches(writer, [19, 136, ...Array(5000).fill(97)])
})
it('list', () => {
const writer = new NbtWriter()
writer.list({ type: 'byte', value: [1, 3] })
matches(writer, [1, 0, 0, 0, 2, 1, 3])
const writer2 = new NbtWriter()
writer2.list({ type: 'end', value: [] })
matches(writer2, [0, 0, 0, 0, 0])
})
it('compound', () => {
const writer = new NbtWriter()
writer.compound({ foo: { type: 'byte', value: 4 } })
matches(writer, [1, 0, 3, 102, 111, 111, 4, 0])
const writer2 = new NbtWriter()
writer2.compound({})
matches(writer2, [0])
})
it('offset', () => {
const writer = new NbtWriter()
writer.offset = 2
writer.byte(4)
matches(writer, [0, 0, 4])
})
it('offset (outside buffer)', () => {
const writer = new NbtWriter()
writer.offset = 2000
writer.byte(4)
matches(writer, [...Array(2000).fill(0), 4])
})
}) | }
describe('Writer', () => { |
parsed_query_test.go | /*
Copyright 2019 The Vitess Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sqlparser
import (
"reflect"
"testing"
"github.com/Bill-cc/go-sql2struct/sqltypes"
querypb "github.com/Bill-cc/go-sql2struct/proto/query"
)
func TestNewParsedQuery(t *testing.T) {
stmt, err := Parse("select * from a where id =:id")
if err != nil {
t.Error(err)
return
}
pq := NewParsedQuery(stmt)
want := &ParsedQuery{
Query: "select * from a where id = :id",
bindLocations: []bindLocation{{offset: 27, length: 3}},
}
if !reflect.DeepEqual(pq, want) {
t.Errorf("GenerateParsedQuery: %+v, want %+v", pq, want)
}
}
func TestGenerateQuery(t *testing.T) {
tcases := []struct {
desc string
query string
bindVars map[string]*querypb.BindVariable
extras map[string]Encodable
output string
}{
{
desc: "no substitutions",
query: "select * from a where id = 2",
bindVars: map[string]*querypb.BindVariable{
"id": sqltypes.Int64BindVariable(1),
},
output: "select * from a where id = 2",
}, {
desc: "missing bind var",
query: "select * from a where id1 = :id1 and id2 = :id2",
bindVars: map[string]*querypb.BindVariable{
"id1": sqltypes.Int64BindVariable(1),
},
output: "missing bind var id2",
}, {
desc: "simple bindvar substitution",
query: "select * from a where id1 = :id1 and id2 = :id2",
bindVars: map[string]*querypb.BindVariable{
"id1": sqltypes.Int64BindVariable(1),
"id2": sqltypes.NullBindVariable,
},
output: "select * from a where id1 = 1 and id2 = null",
}, {
desc: "tuple *querypb.BindVariable",
query: "select * from a where id in ::vals",
bindVars: map[string]*querypb.BindVariable{
"vals": sqltypes.TestBindVariable([]interface{}{1, "aa"}),
},
output: "select * from a where id in (1, 'aa')",
}, {
desc: "list bind vars 0 arguments",
query: "select * from a where id in ::vals",
bindVars: map[string]*querypb.BindVariable{
"vals": sqltypes.TestBindVariable([]interface{}{}),
},
output: "empty list supplied for vals",
}, {
desc: "non-list bind var supplied",
query: "select * from a where id in ::vals",
bindVars: map[string]*querypb.BindVariable{
"vals": sqltypes.Int64BindVariable(1),
},
output: "unexpected list arg type (INT64) for key vals",
}, {
desc: "list bind var for non-list",
query: "select * from a where id = :vals",
bindVars: map[string]*querypb.BindVariable{
"vals": sqltypes.TestBindVariable([]interface{}{1}),
},
output: "unexpected arg type (TUPLE) for non-list key vals",
}, {
desc: "single column tuple equality",
query: "select * from a where b = :equality",
extras: map[string]Encodable{
"equality": &TupleEqualityList{
Columns: []ColIdent{NewColIdent("pk")},
Rows: [][]sqltypes.Value{
{sqltypes.NewInt64(1)},
{sqltypes.NewVarBinary("aa")},
},
},
},
output: "select * from a where b = pk in (1, 'aa')",
}, {
desc: "multi column tuple equality",
query: "select * from a where b = :equality",
extras: map[string]Encodable{
"equality": &TupleEqualityList{
Columns: []ColIdent{NewColIdent("pk1"), NewColIdent("pk2")},
Rows: [][]sqltypes.Value{
{
sqltypes.NewInt64(1),
sqltypes.NewVarBinary("aa"),
},
{
sqltypes.NewInt64(2),
sqltypes.NewVarBinary("bb"),
},
},
},
},
output: "select * from a where b = (pk1 = 1 and pk2 = 'aa') or (pk1 = 2 and pk2 = 'bb')",
},
}
for _, tcase := range tcases {
tree, err := Parse(tcase.query)
if err != nil {
t.Errorf("parse failed for %s: %v", tcase.desc, err)
continue
}
buf := NewTrackedBuffer(nil)
buf.Myprintf("%v", tree)
pq := buf.ParsedQuery()
bytes, err := pq.GenerateQuery(tcase.bindVars, tcase.extras)
var got string
if err != nil {
got = err.Error()
} else {
got = string(bytes)
}
if got != tcase.output {
t.Errorf("for test case: %s, got: '%s', want '%s'", tcase.desc, got, tcase.output)
} | }
} |
|
clientset_generated.go | /*
Copyright 2021 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package fake
import (
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/watch"
"k8s.io/client-go/discovery"
fakediscovery "k8s.io/client-go/discovery/fake"
"k8s.io/client-go/testing"
clientset "knative.dev/eventing/pkg/client/clientset/versioned"
configsv1alpha1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/configs/v1alpha1"
fakeconfigsv1alpha1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/configs/v1alpha1/fake"
eventingv1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/eventing/v1"
fakeeventingv1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/eventing/v1/fake"
eventingv1beta1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/eventing/v1beta1"
fakeeventingv1beta1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/eventing/v1beta1/fake"
flowsv1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/flows/v1"
fakeflowsv1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/flows/v1/fake"
flowsv1beta1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/flows/v1beta1"
fakeflowsv1beta1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/flows/v1beta1/fake"
messagingv1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/messaging/v1"
fakemessagingv1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/messaging/v1/fake"
messagingv1beta1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/messaging/v1beta1"
fakemessagingv1beta1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/messaging/v1beta1/fake"
sourcesv1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1"
fakesourcesv1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1/fake"
sourcesv1alpha1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1alpha1"
fakesourcesv1alpha1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1alpha1/fake"
sourcesv1alpha2 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1alpha2"
fakesourcesv1alpha2 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1alpha2/fake"
sourcesv1beta1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1beta1"
fakesourcesv1beta1 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1beta1/fake"
sourcesv1beta2 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1beta2"
fakesourcesv1beta2 "knative.dev/eventing/pkg/client/clientset/versioned/typed/sources/v1beta2/fake"
)
// NewSimpleClientset returns a clientset that will respond with the provided objects.
// It's backed by a very simple object tracker that processes creates, updates and deletions as-is,
// without applying any validations and/or defaults. It shouldn't be considered a replacement
// for a real clientset and is mostly useful in simple unit tests.
func | (objects ...runtime.Object) *Clientset {
o := testing.NewObjectTracker(scheme, codecs.UniversalDecoder())
for _, obj := range objects {
if err := o.Add(obj); err != nil {
panic(err)
}
}
cs := &Clientset{tracker: o}
cs.discovery = &fakediscovery.FakeDiscovery{Fake: &cs.Fake}
cs.AddReactor("*", "*", testing.ObjectReaction(o))
cs.AddWatchReactor("*", func(action testing.Action) (handled bool, ret watch.Interface, err error) {
gvr := action.GetResource()
ns := action.GetNamespace()
watch, err := o.Watch(gvr, ns)
if err != nil {
return false, nil, err
}
return true, watch, nil
})
return cs
}
// Clientset implements clientset.Interface. Meant to be embedded into a
// struct to get a default implementation. This makes faking out just the method
// you want to test easier.
type Clientset struct {
testing.Fake
discovery *fakediscovery.FakeDiscovery
tracker testing.ObjectTracker
}
func (c *Clientset) Discovery() discovery.DiscoveryInterface {
return c.discovery
}
func (c *Clientset) Tracker() testing.ObjectTracker {
return c.tracker
}
var _ clientset.Interface = &Clientset{}
// ConfigsV1alpha1 retrieves the ConfigsV1alpha1Client
func (c *Clientset) ConfigsV1alpha1() configsv1alpha1.ConfigsV1alpha1Interface {
return &fakeconfigsv1alpha1.FakeConfigsV1alpha1{Fake: &c.Fake}
}
// EventingV1beta1 retrieves the EventingV1beta1Client
func (c *Clientset) EventingV1beta1() eventingv1beta1.EventingV1beta1Interface {
return &fakeeventingv1beta1.FakeEventingV1beta1{Fake: &c.Fake}
}
// EventingV1 retrieves the EventingV1Client
func (c *Clientset) EventingV1() eventingv1.EventingV1Interface {
return &fakeeventingv1.FakeEventingV1{Fake: &c.Fake}
}
// FlowsV1beta1 retrieves the FlowsV1beta1Client
func (c *Clientset) FlowsV1beta1() flowsv1beta1.FlowsV1beta1Interface {
return &fakeflowsv1beta1.FakeFlowsV1beta1{Fake: &c.Fake}
}
// FlowsV1 retrieves the FlowsV1Client
func (c *Clientset) FlowsV1() flowsv1.FlowsV1Interface {
return &fakeflowsv1.FakeFlowsV1{Fake: &c.Fake}
}
// MessagingV1beta1 retrieves the MessagingV1beta1Client
func (c *Clientset) MessagingV1beta1() messagingv1beta1.MessagingV1beta1Interface {
return &fakemessagingv1beta1.FakeMessagingV1beta1{Fake: &c.Fake}
}
// MessagingV1 retrieves the MessagingV1Client
func (c *Clientset) MessagingV1() messagingv1.MessagingV1Interface {
return &fakemessagingv1.FakeMessagingV1{Fake: &c.Fake}
}
// SourcesV1alpha1 retrieves the SourcesV1alpha1Client
func (c *Clientset) SourcesV1alpha1() sourcesv1alpha1.SourcesV1alpha1Interface {
return &fakesourcesv1alpha1.FakeSourcesV1alpha1{Fake: &c.Fake}
}
// SourcesV1alpha2 retrieves the SourcesV1alpha2Client
func (c *Clientset) SourcesV1alpha2() sourcesv1alpha2.SourcesV1alpha2Interface {
return &fakesourcesv1alpha2.FakeSourcesV1alpha2{Fake: &c.Fake}
}
// SourcesV1beta1 retrieves the SourcesV1beta1Client
func (c *Clientset) SourcesV1beta1() sourcesv1beta1.SourcesV1beta1Interface {
return &fakesourcesv1beta1.FakeSourcesV1beta1{Fake: &c.Fake}
}
// SourcesV1beta2 retrieves the SourcesV1beta2Client
func (c *Clientset) SourcesV1beta2() sourcesv1beta2.SourcesV1beta2Interface {
return &fakesourcesv1beta2.FakeSourcesV1beta2{Fake: &c.Fake}
}
// SourcesV1 retrieves the SourcesV1Client
func (c *Clientset) SourcesV1() sourcesv1.SourcesV1Interface {
return &fakesourcesv1.FakeSourcesV1{Fake: &c.Fake}
}
| NewSimpleClientset |
get_all_users.py | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all users.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
|
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| user_service = client.GetService('UserService', version='v201911')
# Create a statement to select users.
statement = ad_manager.StatementBuilder(version='v201911')
# Retrieve a small amount of users at a time, paging
# through until all users have been retrieved.
while True:
response = user_service.getUsersByStatement(statement.ToStatement())
if 'results' in response and len(response['results']):
for user in response['results']:
# Print out some information for each user.
print('User with ID "%d" and name "%s" was found.\n' % (user['id'],
user['name']))
statement.offset += statement.limit
else:
break
print('\nNumber of results found: %s' % response['totalResultSetSize']) |
admission_test.go | /*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package scdeny
import (
"testing"
"k8s.io/kubernetes/pkg/admission"
"k8s.io/kubernetes/pkg/api"
)
// ensures the SecurityContext is denied if it defines anything more than Caps or Privileged
func TestAdmission(t *testing.T) {
handler := NewSecurityContextDeny(nil)
var runAsUser int64 = 1
priv := true
cases := []struct {
name string
sc *api.SecurityContext
podSc *api.PodSecurityContext
expectError bool
}{
{
name: "unset",
},
{
name: "empty container.SecurityContext",
sc: &api.SecurityContext{},
},
{
name: "empty pod.Spec.SecurityContext",
podSc: &api.PodSecurityContext{},
},
{
name: "valid container.SecurityContext",
sc: &api.SecurityContext{Privileged: &priv, Capabilities: &api.Capabilities{}},
},
{
name: "valid pod.Spec.SecurityContext",
podSc: &api.PodSecurityContext{},
},
{
name: "container.SecurityContext.RunAsUser",
sc: &api.SecurityContext{RunAsUser: &runAsUser},
expectError: true,
},
{
name: "container.SecurityContext.SELinuxOptions",
sc: &api.SecurityContext{SELinuxOptions: &api.SELinuxOptions{}},
expectError: true,
},
{
name: "pod.Spec.SecurityContext.RunAsUser",
podSc: &api.PodSecurityContext{RunAsUser: &runAsUser},
expectError: true,
},
{
name: "pod.Spec.SecurityContext.SELinuxOptions",
podSc: &api.PodSecurityContext{SELinuxOptions: &api.SELinuxOptions{}},
expectError: true,
},
}
for _, tc := range cases {
pod := pod()
pod.Spec.SecurityContext = tc.podSc
pod.Spec.Containers[0].SecurityContext = tc.sc
err := handler.Admit(admission.NewAttributesRecord(pod, api.Kind("Pod"), "foo", "name", api.Resource("pods"), "", "ignored", nil))
if err != nil && !tc.expectError {
t.Errorf("%v: unexpected error: %v", tc.name, err)
} else if err == nil && tc.expectError {
t.Errorf("%v: expected error", tc.name)
}
}
}
func TestPodSecurityContextAdmission(t *testing.T) {
handler := NewSecurityContextDeny(nil)
pod := api.Pod{
Spec: api.PodSpec{
Containers: []api.Container{
{},
},
},
}
fsGroup := int64(1001)
tests := []struct {
securityContext api.PodSecurityContext
errorExpected bool
}{
{
securityContext: api.PodSecurityContext{},
errorExpected: false,
},
{
securityContext: api.PodSecurityContext{
SupplementalGroups: []int64{1234},
},
errorExpected: true,
},
{
securityContext: api.PodSecurityContext{
FSGroup: &fsGroup,
},
errorExpected: true,
},
}
for _, test := range tests {
pod.Spec.SecurityContext = &test.securityContext
err := handler.Admit(admission.NewAttributesRecord(&pod, api.Kind("Pod"), "foo", "name", api.Resource("pods"), "", "ignored", nil))
if test.errorExpected && err == nil {
t.Errorf("Expected error for security context %+v but did not get an error", test.securityContext)
} |
if !test.errorExpected && err != nil {
t.Errorf("Unexpected error %v for security context %+v", err, test.securityContext)
}
}
}
func TestHandles(t *testing.T) {
handler := NewSecurityContextDeny(nil)
tests := map[admission.Operation]bool{
admission.Update: true,
admission.Create: true,
admission.Delete: false,
admission.Connect: false,
}
for op, expected := range tests {
result := handler.Handles(op)
if result != expected {
t.Errorf("Unexpected result for operation %s: %v\n", op, result)
}
}
}
func pod() *api.Pod {
return &api.Pod{
Spec: api.PodSpec{
Containers: []api.Container{
{},
},
},
}
} | |
test_openssl.rs | #![cfg(feature = "openssl")]
use std::io;
use actix_http_test::test_server;
use actix_service::{fn_service, ServiceFactory};
use bytes::{Bytes, BytesMut};
use futures_util::future::{err, ok, ready};
use futures_util::stream::{once, Stream, StreamExt};
use open_ssl::ssl::{AlpnError, SslAcceptor, SslFiletype, SslMethod};
use actix_http::error::{ErrorBadRequest, PayloadError};
use actix_http::http::header::{self, HeaderName, HeaderValue};
use actix_http::http::{Method, StatusCode, Version};
use actix_http::httpmessage::HttpMessage;
use actix_http::{body, Error, HttpService, Request, Response};
async fn load_body<S>(stream: S) -> Result<BytesMut, PayloadError>
where
S: Stream<Item = Result<Bytes, PayloadError>>,
{
let body = stream
.map(|res| match res {
Ok(chunk) => chunk,
Err(_) => panic!(),
})
.fold(BytesMut::new(), move |mut body, chunk| {
body.extend_from_slice(&chunk);
ready(body)
})
.await;
Ok(body)
}
fn ssl_acceptor() -> SslAcceptor {
// load ssl keys
let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();
builder
.set_private_key_file("../tests/key.pem", SslFiletype::PEM)
.unwrap();
builder
.set_certificate_chain_file("../tests/cert.pem")
.unwrap();
builder.set_alpn_select_callback(|_, protos| {
const H2: &[u8] = b"\x02h2";
const H11: &[u8] = b"\x08http/1.1";
if protos.windows(3).any(|window| window == H2) {
Ok(b"h2")
} else if protos.windows(9).any(|window| window == H11) {
Ok(b"http/1.1")
} else {
Err(AlpnError::NOACK)
}
});
builder
.set_alpn_protos(b"\x08http/1.1\x02h2")
.expect("Can not contrust SslAcceptor");
builder.build()
}
#[actix_rt::test]
async fn test_h2() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, Error>(Response::Ok().finish()))
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn test_h2_1() -> io::Result<()> {
let srv = test_server(move || {
HttpService::build()
.finish(|req: Request| {
assert!(req.peer_addr().is_some());
assert_eq!(req.version(), Version::HTTP_2);
ok::<_, Error>(Response::Ok().finish())
})
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
Ok(())
}
#[actix_rt::test]
async fn test_h2_body() -> io::Result<()> {
let data = "HELLOWORLD".to_owned().repeat(64 * 1024);
let mut srv = test_server(move || {
HttpService::build()
.h2(|mut req: Request<_>| async move {
let body = load_body(req.take_payload()).await?;
Ok::<_, Error>(Response::Ok().body(body))
})
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send_body(data.clone()).await.unwrap();
assert!(response.status().is_success());
let body = srv.load_body(response).await.unwrap();
assert_eq!(&body, data.as_bytes());
Ok(())
}
#[actix_rt::test]
async fn | () {
let srv = test_server(move || {
HttpService::build()
.h2(|req: Request| {
let indx: usize = req.uri().path()[1..].parse().unwrap();
let statuses = [
StatusCode::NO_CONTENT,
StatusCode::CONTINUE,
StatusCode::SWITCHING_PROTOCOLS,
StatusCode::PROCESSING,
StatusCode::OK,
StatusCode::NOT_FOUND,
];
ok::<_, ()>(Response::new(statuses[indx]))
})
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let header = HeaderName::from_static("content-length");
let value = HeaderValue::from_static("0");
{
for i in 0..4 {
let req = srv
.request(Method::GET, srv.surl(&format!("/{}", i)))
.send();
let response = req.await.unwrap();
assert_eq!(response.headers().get(&header), None);
let req = srv
.request(Method::HEAD, srv.surl(&format!("/{}", i)))
.send();
let response = req.await.unwrap();
assert_eq!(response.headers().get(&header), None);
}
for i in 4..6 {
let req = srv
.request(Method::GET, srv.surl(&format!("/{}", i)))
.send();
let response = req.await.unwrap();
assert_eq!(response.headers().get(&header), Some(&value));
}
}
}
#[actix_rt::test]
async fn test_h2_headers() {
let data = STR.repeat(10);
let data2 = data.clone();
let mut srv = test_server(move || {
let data = data.clone();
HttpService::build().h2(move |_| {
let mut builder = Response::Ok();
for idx in 0..90 {
builder.header(
format!("X-TEST-{}", idx).as_str(),
"TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST \
TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST ",
);
}
ok::<_, ()>(builder.body(data.clone()))
})
.openssl(ssl_acceptor())
.map_err(|_| ())
}).await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from(data2));
}
const STR: &str = "Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World \
Hello World Hello World Hello World Hello World Hello World";
#[actix_rt::test]
async fn test_h2_body2() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, ()>(Response::Ok().body(STR)))
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn test_h2_head_empty() {
let mut srv = test_server(move || {
HttpService::build()
.finish(|_| ok::<_, ()>(Response::Ok().body(STR)))
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
assert_eq!(response.version(), Version::HTTP_2);
{
let len = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
// read response
let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty());
}
#[actix_rt::test]
async fn test_h2_head_binary() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, ()>(Response::Ok().body(STR)))
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
// read response
let bytes = srv.load_body(response).await.unwrap();
assert!(bytes.is_empty());
}
#[actix_rt::test]
async fn test_h2_head_binary2() {
let srv = test_server(move || {
HttpService::build()
.h2(|_| ok::<_, ()>(Response::Ok().body(STR)))
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.shead("/").send().await.unwrap();
assert!(response.status().is_success());
{
let len = response.headers().get(header::CONTENT_LENGTH).unwrap();
assert_eq!(format!("{}", STR.len()), len.to_str().unwrap());
}
}
#[actix_rt::test]
async fn test_h2_body_length() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| {
let body = once(ok(Bytes::from_static(STR.as_ref())));
ok::<_, ()>(
Response::Ok().body(body::SizedStream::new(STR.len() as u64, body)),
)
})
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn test_h2_body_chunked_explicit() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| {
let body = once(ok::<_, Error>(Bytes::from_static(STR.as_ref())));
ok::<_, ()>(
Response::Ok()
.header(header::TRANSFER_ENCODING, "chunked")
.streaming(body),
)
})
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
assert!(!response.headers().contains_key(header::TRANSFER_ENCODING));
// read response
let bytes = srv.load_body(response).await.unwrap();
// decode
assert_eq!(bytes, Bytes::from_static(STR.as_ref()));
}
#[actix_rt::test]
async fn test_h2_response_http_error_handling() {
let mut srv = test_server(move || {
HttpService::build()
.h2(fn_service(|_| {
let broken_header = Bytes::from_static(b"\0\0\0");
ok::<_, ()>(
Response::Ok()
.header(header::CONTENT_TYPE, broken_header)
.body(STR),
)
}))
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert_eq!(response.status(), StatusCode::INTERNAL_SERVER_ERROR);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(b"failed to parse header value"));
}
#[actix_rt::test]
async fn test_h2_service_error() {
let mut srv = test_server(move || {
HttpService::build()
.h2(|_| err::<Response, Error>(ErrorBadRequest("error")))
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert_eq!(response.status(), StatusCode::BAD_REQUEST);
// read response
let bytes = srv.load_body(response).await.unwrap();
assert_eq!(bytes, Bytes::from_static(b"error"));
}
#[actix_rt::test]
async fn test_h2_on_connect() {
let srv = test_server(move || {
HttpService::build()
.on_connect(|_| 10usize)
.h2(|req: Request| {
assert!(req.extensions().contains::<usize>());
ok::<_, ()>(Response::Ok().finish())
})
.openssl(ssl_acceptor())
.map_err(|_| ())
})
.await;
let response = srv.sget("/").send().await.unwrap();
assert!(response.status().is_success());
}
| test_h2_content_length |
lib_build.rs | use std::fs;
use std::path::Path;
use colored::*;
use glob::glob;
use log::{debug, info};
use simpath::Simpath;
use url::Url;
use flowclib::compiler::{compile_wasm, rust_manifest};
use flowclib::compiler::{json_manifest, loader};
use flowclib::compiler::loader::LibType::RustLib;
use flowclib::dumper::dump_flow;
use flowclib::model::name::HasName;
use flowclib::model::process::Process::{FlowProcess, FunctionProcess};
use flowcore::lib_manifest::LibraryManifest;
use flowcore::lib_provider::{MetaProvider, Provider};
use crate::errors::*;
use crate::Options;
/// Build a library from source and generate a manifest for it so it can be used at runtime when
/// a flow referencing it is loaded and ran
pub fn build_lib(options: &Options, provider: &dyn Provider) -> Result<String> |
/*
Copy the source files for function or flow into the target directory
*/
fn copy_sources_to_target_dir(toml_path: &Path, target_dir: &Path, docs: &str) -> Result<()> {
// copy the definition toml to target directory
fs::copy(
&toml_path,
&target_dir.join(
toml_path
.file_name()
.ok_or("Could not get Toml file filename")?,
),
)?;
// Copy any docs files to target directory
if !docs.is_empty() {
let docs_path = toml_path.with_file_name(docs);
fs::copy(
&docs_path,
&target_dir.join(docs_path.file_name().ok_or("Could not get docs filename")?),
)?;
}
Ok(())
}
/*
Find all process definitions under the base_dir and if they provide an implementation, check if
the wasm file is up-to-date with the source and if not compile it, and add them all to the
manifest struct
*/
fn compile_implementations(
lib_root_path: &Path,
output_dir: &Path,
dump: bool,
graphs: bool,
lib_manifest: &mut LibraryManifest,
provider: &dyn Provider,
native_only: bool,
) -> Result<i32> {
let mut build_count = 0;
// Function implementations are described in .toml format and can be at multiple levels in
// a library's directory structure.
let search_pattern = format!("{}/**/*.toml", &lib_root_path.display());
debug!(
"Searching for process definitions using search pattern: '{}'",
search_pattern
);
for toml_path in (glob(&search_pattern).chain_err(|| "Failed to read glob pattern")?).flatten()
{
let url = Url::from_file_path(&toml_path).map_err(|_| {
format!(
"Could not create url from file path '{}'",
toml_path.display()
)
})?;
debug!("Trying to load library process from '{}'", url);
// calculate the path of the files directory, relative to lib_root
let relative_dir = toml_path
.parent()
.ok_or("Could not get toml path parent dir")?
.strip_prefix(&lib_root_path)
.map_err(|_| "Could not calculate relative_dir")?;
// calculate the target directory for generating output using the relative path from the
// lib_root appended to the root of the output directory
let target_dir = output_dir.join(relative_dir);
if !target_dir.exists() {
fs::create_dir_all(&target_dir)?;
}
// Load the `FunctionProcess` or `FlowProcess` definition from the found `.toml` file
match loader::load(
&url,
provider,
#[cfg(feature = "debugger")]
&mut lib_manifest.source_urls,
) {
Ok(FunctionProcess(ref mut function)) => {
let (wasm_abs_path, built) = compile_wasm::compile_implementation(
&target_dir,
function,
native_only,
#[cfg(feature = "debugger")]
&mut lib_manifest.source_urls,
)
.chain_err(|| "Could not compile supplied implementation to wasm")?;
let wasm_relative_path = wasm_abs_path
.strip_prefix(output_dir)
.map_err(|_| "Could not calculate wasm_relative_path")?;
copy_sources_to_target_dir(&toml_path, &target_dir, function.get_docs())?;
lib_manifest
.add_locator(
&wasm_relative_path.to_string_lossy(),
&relative_dir.to_string_lossy(),
function.name() as &str,
)
.chain_err(|| "Could not add entry to library manifest")?;
if built {
build_count += 1;
}
}
Ok(FlowProcess(ref mut flow)) => {
if dump || graphs {
dump_flow::dump_flow(flow, &target_dir, provider, dump, graphs)
.chain_err(|| "Failed to dump flow's definition")?;
if graphs {
dump_flow::generate_svgs(output_dir)?;
}
}
copy_sources_to_target_dir(&toml_path, &target_dir, flow.get_docs())?;
}
Err(_) => debug!("Skipping file '{}'", url),
}
}
if build_count > 0 {
info!("Compiled {} functions to wasm", build_count);
}
Ok(build_count)
}
| {
let (metadata, lib_type) = loader::load_metadata(&options.source_url, provider)?;
let name = metadata.name.clone();
println!(
" {} {} v{} ({}) with 'flowc'",
"Compiling".green(),
metadata.name,
metadata.version,
options.source_url
);
let lib_url = Url::parse(&format!("lib://{}", metadata.name))?;
let mut lib_manifest = LibraryManifest::new(lib_url, metadata);
let lib_root_path = options
.source_url
.to_file_path()
.map_err(|_| "Could not convert Url to File path")?;
let build_count = compile_implementations(
&lib_root_path,
&options.output_dir,
options.dump,
options.graphs,
&mut lib_manifest,
provider,
options.native_only,
)
.chain_err(|| "Could not compile implementations in library")?;
let manifest_json_file = json_manifest::manifest_filename(&options.output_dir);
let json_manifest_exists = manifest_json_file.exists() && manifest_json_file.is_file();
let manifest_rust_file = rust_manifest::manifest_filename(&options.output_dir);
let rust_manifest_exists = if lib_type == RustLib {
manifest_rust_file.exists() && manifest_rust_file.is_file()
} else {
true // we don't care if the rust manifest exists if the lib type is not a rust lib
};
let (message, write_manifests) = if json_manifest_exists && rust_manifest_exists {
if build_count > 0 {
("Library manifest file(s) exists, but implementations were built, writing new file(s)", true)
} else {
let provider = MetaProvider::new(Simpath::new(""));
let json_manifest_file_as_url =
Url::from_file_path(&manifest_json_file).map_err(|_| {
format!(
"Could not parse Url from file path: {}",
manifest_json_file.display()
)
})?;
if let Ok((existing_json_manifest, _)) =
LibraryManifest::load(&provider, &json_manifest_file_as_url)
{
if existing_json_manifest != lib_manifest {
("Library manifest exists, but new manifest has changes, writing new manifest file(s)", true)
} else {
("Existing manifest files are up to date", false)
}
} else {
("Could not load existing Library manifest to compare, writing new manifest file(s)", true)
}
}
} else {
(
"Library manifest file(s) missing, writing new manifest file(s)",
true,
)
};
info!("{}", message);
if write_manifests {
json_manifest::write(&lib_manifest, &manifest_json_file)?;
if lib_type == RustLib {
rust_manifest::write(&lib_root_path, &lib_manifest, &manifest_rust_file)?;
}
}
Ok(format!(" {} {}", "Finished".green(), name))
} |
mod.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::INTEN_P7 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `pin0`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN0R {
#[doc = r" Reserved"]
_Reserved(bool),
}
impl PIN0R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN0R::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN0R {
match value {
i => PIN0R::_Reserved(i),
}
}
}
#[doc = "Possible values of the field `pin1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN1R {
#[doc = r" Reserved"]
_Reserved(bool),
}
impl PIN1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN1R::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN1R {
match value {
i => PIN1R::_Reserved(i),
}
}
}
#[doc = "Possible values of the field `pin2`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN2R {
#[doc = r" Reserved"]
_Reserved(bool),
}
impl PIN2R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN2R::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN2R {
match value {
i => PIN2R::_Reserved(i),
}
}
}
#[doc = "Possible values of the field `pin3`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN3R {
#[doc = r" Reserved"]
_Reserved(bool),
}
impl PIN3R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN3R::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN3R {
match value {
i => PIN3R::_Reserved(i),
}
}
}
#[doc = "Possible values of the field `pin4`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN4R {
#[doc = r" Reserved"]
_Reserved(bool),
}
impl PIN4R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN4R::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN4R {
match value {
i => PIN4R::_Reserved(i),
}
}
}
#[doc = "Possible values of the field `pin5`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN5R {
#[doc = r" Reserved"]
_Reserved(bool),
}
impl PIN5R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN5R::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN5R {
match value {
i => PIN5R::_Reserved(i),
}
}
}
#[doc = "Possible values of the field `pin6`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN6R {
#[doc = r" Reserved"]
_Reserved(bool),
}
impl PIN6R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN6R::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN6R {
match value {
i => PIN6R::_Reserved(i),
}
}
}
#[doc = "Possible values of the field `pin7`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN7R {
#[doc = r" Reserved"]
_Reserved(bool),
}
impl PIN7R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN7R::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN7R {
match value {
i => PIN7R::_Reserved(i),
}
}
}
#[doc = "Values that can be written to the field `pin0`"]
pub enum PIN0W {}
impl PIN0W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {}
}
}
#[doc = r" Proxy"]
pub struct _PIN0W<'a> {
w: &'a mut W,
}
impl<'a> _PIN0W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN0W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = r" Sets the field bit"]
pub fn | (self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `pin1`"]
pub enum PIN1W {}
impl PIN1W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {}
}
}
#[doc = r" Proxy"]
pub struct _PIN1W<'a> {
w: &'a mut W,
}
impl<'a> _PIN1W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN1W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `pin2`"]
pub enum PIN2W {}
impl PIN2W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {}
}
}
#[doc = r" Proxy"]
pub struct _PIN2W<'a> {
w: &'a mut W,
}
impl<'a> _PIN2W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN2W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `pin3`"]
pub enum PIN3W {}
impl PIN3W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {}
}
}
#[doc = r" Proxy"]
pub struct _PIN3W<'a> {
w: &'a mut W,
}
impl<'a> _PIN3W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN3W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `pin4`"]
pub enum PIN4W {}
impl PIN4W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {}
}
}
#[doc = r" Proxy"]
pub struct _PIN4W<'a> {
w: &'a mut W,
}
impl<'a> _PIN4W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN4W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `pin5`"]
pub enum PIN5W {}
impl PIN5W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {}
}
}
#[doc = r" Proxy"]
pub struct _PIN5W<'a> {
w: &'a mut W,
}
impl<'a> _PIN5W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN5W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `pin6`"]
pub enum PIN6W {}
impl PIN6W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {}
}
}
#[doc = r" Proxy"]
pub struct _PIN6W<'a> {
w: &'a mut W,
}
impl<'a> _PIN6W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN6W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `pin7`"]
pub enum PIN7W {}
impl PIN7W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {}
}
}
#[doc = r" Proxy"]
pub struct _PIN7W<'a> {
w: &'a mut W,
}
impl<'a> _PIN7W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN7W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - P7.0 External Interrupt Enable"]
#[inline]
pub fn pin0(&self) -> PIN0R {
PIN0R::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - P7.1 External Interrupt Enable"]
#[inline]
pub fn pin1(&self) -> PIN1R {
PIN1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 2 - P7.2 External Interrupt Enable"]
#[inline]
pub fn pin2(&self) -> PIN2R {
PIN2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 3 - P7.3 External Interrupt Enable"]
#[inline]
pub fn pin3(&self) -> PIN3R {
PIN3R::_from({
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 4 - P7.4 External Interrupt Enable"]
#[inline]
pub fn pin4(&self) -> PIN4R {
PIN4R::_from({
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 5 - P7.5 External Interrupt Enable"]
#[inline]
pub fn pin5(&self) -> PIN5R {
PIN5R::_from({
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 6 - P7.6 External Interrupt Enable"]
#[inline]
pub fn pin6(&self) -> PIN6R {
PIN6R::_from({
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 7 - P7.7 External Interrupt Enable"]
#[inline]
pub fn pin7(&self) -> PIN7R {
PIN7R::_from({
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - P7.0 External Interrupt Enable"]
#[inline]
pub fn pin0(&mut self) -> _PIN0W {
_PIN0W { w: self }
}
#[doc = "Bit 1 - P7.1 External Interrupt Enable"]
#[inline]
pub fn pin1(&mut self) -> _PIN1W {
_PIN1W { w: self }
}
#[doc = "Bit 2 - P7.2 External Interrupt Enable"]
#[inline]
pub fn pin2(&mut self) -> _PIN2W {
_PIN2W { w: self }
}
#[doc = "Bit 3 - P7.3 External Interrupt Enable"]
#[inline]
pub fn pin3(&mut self) -> _PIN3W {
_PIN3W { w: self }
}
#[doc = "Bit 4 - P7.4 External Interrupt Enable"]
#[inline]
pub fn pin4(&mut self) -> _PIN4W {
_PIN4W { w: self }
}
#[doc = "Bit 5 - P7.5 External Interrupt Enable"]
#[inline]
pub fn pin5(&mut self) -> _PIN5W {
_PIN5W { w: self }
}
#[doc = "Bit 6 - P7.6 External Interrupt Enable"]
#[inline]
pub fn pin6(&mut self) -> _PIN6W {
_PIN6W { w: self }
}
#[doc = "Bit 7 - P7.7 External Interrupt Enable"]
#[inline]
pub fn pin7(&mut self) -> _PIN7W {
_PIN7W { w: self }
}
}
| set_bit |
ors.py | """This module defines all the ORS(https://openrouteservice.org/services/) commands."""
import os
import click
import openrouteservice as opnrs
import simplejson as json
from geojsonio import display as geo_display
from maps.exceptions import ApiKeyNotFoundError
from maps.utils import yield_subcommands
@click.group()
@click.pass_context
def ors(ctx):
"""ORS (https://openrouteservice.org/) provider."""
ctx.obj = {}
@ors.command()
def show():
"""show list of all sub commands."""
for sub in yield_subcommands(ors):
click.secho(sub, fg="green")
@ors.command(short_help="forward or reverse geocode for an address or coordinates.")
@click.argument("query", required=True)
@click.option("--apikey", help="Your ORS API key", type=str)
@click.option(
"--forward/--reverse",
default=True,
show_default=True,
help="Perform a forward or reverse geocode",
)
@click.option("--raw", is_flag=True)
@click.option("--display", help="Display result in browser", is_flag=True)
@click.pass_context
def geocoding(ctx, query, apikey, forward, raw, display):
"""
Open Route Service geocoding service.
\f
:param ctx: A context dictionary.
:param query: A string to represent address query for geocoding.
:param apikey: An API key for authentication.
:param forward: A boolean flag for forward/reverse geocoding.
:param raw: A boolean flag to show api response as it is.
:param display: A boolean flag to show result in web browser.
:return: None.
"""
apikey = apikey or os.environ.get("ORS_APIKEY")
if apikey is None:
raise ApiKeyNotFoundError(
"Please pass Open Route Service API KEY as --apikey or set it as environment "
"variable in ORS_APIKEY "
)
ctx.obj["apikey"] = apikey
geolocator = opnrs.Client(key=ctx.obj["apikey"])
if forward:
geocode = geolocator.pelias_search(text=query)
if raw:
click.secho(json.dumps(geocode, indent=2), fg="green")
elif display:
geocode.pop("geocoding")
geo_display(json.dumps(geocode))
else:
for feature in geocode["features"]:
coords = feature["geometry"]["coordinates"]
result = {"lat": coords[1], "lon": coords[0]}
click.secho(json.dumps(result, indent=2), fg="green")
else:
coordinate = query.split(",")
reverse = geolocator.pelias_reverse(point=coordinate, validate=False)
if raw:
|
else:
for result in reverse["features"]:
click.secho(result["properties"]["label"], fg="green")
| for result in reverse["features"]:
click.secho(json.dumps(result, indent=2), fg="green") |
FeedEntryEditor.js | define(
({
doNew: "[ny]",
edit: "[redigér]", | save: "[gem]",
cancel: "[annullér]"
})
); |
|
repo_integration_test.go | package datastore_example
import (
"context"
"os"
"reflect"
"testing"
"github.com/kylelemons/godebug/pretty"
"github.com/kynrai/lilith/pkg/datastore_example/models"
)
const emulator = "localhost:8081"
func sethost() {
host := os.Getenv("DATASTORE_EMULATOR_HOST")
if host == "" {
os.Setenv("DATASTORE_EMULATOR_HOST", emulator)
}
}
func TestPutGet_Integration(t *testing.T) {
t.Parallel()
sethost()
repo := New()
for _, tc := range []struct {
name string
id string | body *models.Thing
want *models.Thing
}{
{
name: "happy path",
id: "1",
body: &models.Thing{ID: "1", Name: "test"},
want: &models.Thing{ID: "1", Name: "test"},
},
} {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
if err := repo.Put(context.Background(), tc.body); err != nil {
t.Fatal(err)
}
thing, err := repo.Get(context.Background(), tc.id)
if err != nil {
t.Fatal(err)
}
if tc.want != nil {
if !reflect.DeepEqual(thing, tc.want) {
t.Fatal(pretty.Compare(thing, tc.want))
}
}
})
}
} | |
type.go | // Based on C code by Nordic Semiconductor ASA.
// See LICENSE-NORDIC for original C code license.
// Copyright 2020 Michal Derkacz.
package gatt
type CharProps uint8
const (
PropBroadcast CharProps = 1 << 0 // broadcasting permitted
PropRead CharProps = 1 << 1 // reading permitted
PropWriteWoResp CharProps = 1 << 2 // write command permitted
PropWrite CharProps = 1 << 3 // write request permitted
PropNotify CharProps = 1 << 4 // notification permitted
PropIndicate CharProps = 1 << 5 // indications permitted
PropAuthSignedWr CharProps = 1 << 6 // signed write command permitted
)
type CharExtProps uint8
const (
PropRealiableWr CharExtProps = 1 << 0 // queued write permitted
PropWrAux CharExtProps = 1 << 1 // write Char.User Descr. descriptor permitted
)
// Presentation Format
type PF uint8
const (
FormatRFU PF = 0x00 // reserved for future use
FormatBoolean PF = 0x01 // boolean
Format2bit PF = 0x02 // unsigned 2-bit integer
FormatNibble PF = 0x03 // unsigned 4-bit integer
FormatUint8 PF = 0x04 // unsigned 8-bit integer
FormatUint12 PF = 0x05 // unsigned 12-bit integer
FormatUint16 PF = 0x06 // unsigned 16-bit integer
FormatUint24 PF = 0x07 // unsigned 24-bit integer
FormatUint32 PF = 0x08 // unsigned 32-bit integer
FormatUint48 PF = 0x09 // unsigned 48-bit integer
FormatUint64 PF = 0x0A // unsigned 64-bit integer
FormatUint128 PF = 0x0B // unsigned 128-bit integer
FormatSint8 PF = 0x0C // signed 8-bit integer
FormatSint12 PF = 0x0D // signed 12-bit integer
FormatSint16 PF = 0x0E // signed 16-bit integer
FormatSint24 PF = 0x0F // signed 24-bit integer
FormatSint32 PF = 0x10 // signed 32-bit integer
FormatSint48 PF = 0x11 // signed 48-bit integer
FormatSint64 PF = 0x12 // signed 64-bit integer
FormatSint128 PF = 0x13 // signed 128-bit integer
FormatFloat32 PF = 0x14 // IEEE-754 32-bit floating point
FormatFloat64 PF = 0x15 // IEEE-754 64-bit floating point
FormatSFloat PF = 0x16 // IEEE-11073 16-bit SFLOAT | FormatUTF16S PF = 0x1A // UTF-16 string
FormatStruct PF = 0x1B // opaque Structure
)
type HVXType uint8
const (
HVXInvalid HVXType = 0 // invalid operation
Notification HVXType = 1 // handle value notification
Indication HVXType = 2 // handle value indication
) | FormatFloat PF = 0x17 // IEEE-11073 32-bit FLOAT
FormatDUint16 PF = 0x18 // IEEE-20601 format
FormatUTF8S PF = 0x19 // UTF-8 string |
columns.go | package clickhousex
import (
"database/sql"
"fmt"
)
type Column struct { | Name string `db:"name"`
Type string `db:"type"`
DefaultType string `db:"default_type"`
DefaultExpression string `db:"default_expression"`
Comment string `db:"comment"`
CodecExpression string `db:"codec_expression"`
TTLExpression string `db:"ttl_expression"`
}
func DescTable(pool *sql.DB, database, tableName string) ([]Column, error) {
rows, e := pool.Query("desc `" + database + "`.`" + tableName + "`")
if e != nil {
return nil, e
}
out := []Column{}
for rows.Next() {
v := Column{}
e = rows.Scan(&v.Name, &v.Type, &v.DefaultType, &v.DefaultExpression, &v.Comment, &v.CodecExpression, &v.TTLExpression)
if e != nil {
break
}
out = append(out, v)
}
//check err
if closeErr := rows.Close(); closeErr != nil {
return nil, fmt.Errorf("rows.Close() err:%w", closeErr)
}
if e != nil {
return nil, e
}
if e = rows.Err(); e != nil {
return nil, e
}
return out, nil
} | |
nodes.spec.ts | import { assert } from "chai"; | import {
nodeDisplayNameByIDSelector,
selectCommissionedNodeStatuses,
selectStoreIDsByNodeID,
LivenessStatus,
sumNodeStats,
} from "./nodes";
import { nodesReducerObj, livenessReducerObj } from "./apiReducers";
import { createAdminUIStore } from "./state";
function makeNodesState(...addresses: { id: number, address: string, status?: LivenessStatus }[]) {
const nodeData = addresses.map(addr => {
return {
desc : {
node_id: addr.id,
address: {
address_field: addr.address,
},
},
};
});
const livenessData: {statuses: {[key: string]: LivenessStatus}} = {
statuses: {},
};
addresses.forEach(addr => {
livenessData.statuses[addr.id] = addr.status || LivenessStatus.LIVE;
});
const store = createAdminUIStore();
store.dispatch(nodesReducerObj.receiveData(nodeData));
store.dispatch(livenessReducerObj.receiveData(new protos.cockroach.server.serverpb.LivenessResponse(livenessData)));
return store.getState();
}
describe("node data selectors", function() {
describe("display name by ID", function() {
it("display name is node id appended to address", function() {
const state: any = makeNodesState(
{ id: 1, address: "addressA" },
{ id: 2, address: "addressB" },
{ id: 3, address: "addressC" },
{ id: 4, address: "addressD" },
);
const addressesByID = nodeDisplayNameByIDSelector(state);
assert.deepEqual(addressesByID, {
1: "addressA (n1)",
2: "addressB (n2)",
3: "addressC (n3)",
4: "addressD (n4)",
});
});
it("generates unique names for re-used addresses", function() {
const state: any = makeNodesState(
{ id: 1, address: "addressA" },
{ id: 2, address: "addressB" },
{ id: 3, address: "addressC" },
{ id: 4, address: "addressD" },
{ id: 5, address: "addressA" },
{ id: 6, address: "addressC" },
{ id: 7, address: "addressA" },
);
const addressesByID = nodeDisplayNameByIDSelector(state);
assert.deepEqual(addressesByID, {
1: "addressA (n1)",
2: "addressB (n2)",
3: "addressC (n3)",
4: "addressD (n4)",
5: "addressA (n5)",
6: "addressC (n6)",
7: "addressA (n7)",
});
});
it("adds decommissioned flag to decommissioned nodes", function() {
const state: any = makeNodesState(
{ id: 1, address: "addressA", status: LivenessStatus.DECOMMISSIONED },
{ id: 2, address: "addressB" },
{ id: 3, address: "addressC", status: LivenessStatus.DECOMMISSIONED },
{ id: 4, address: "addressD", status: LivenessStatus.DEAD },
{ id: 5, address: "addressA", status: LivenessStatus.DECOMMISSIONED },
{ id: 6, address: "addressC" },
{ id: 7, address: "addressA" },
{ id: 8, address: "addressE", status: LivenessStatus.DECOMMISSIONING },
{ id: 9, address: "addressF", status: LivenessStatus.UNAVAILABLE },
);
const addressesByID = nodeDisplayNameByIDSelector(state);
assert.equal(addressesByID[1], "[decommissioned] addressA (n1)");
assert.deepEqual(addressesByID, {
1: "[decommissioned] addressA (n1)",
2: "addressB (n2)",
3: "[decommissioned] addressC (n3)",
4: "addressD (n4)",
5: "[decommissioned] addressA (n5)",
6: "addressC (n6)",
7: "addressA (n7)",
8: "addressE (n8)",
9: "addressF (n9)",
});
});
it("returns empty collection for empty state", function() {
const store = createAdminUIStore();
assert.deepEqual(nodeDisplayNameByIDSelector(store.getState()), {});
});
});
describe("store IDs by node ID", function() {
it("correctly creates storeID map", function() {
const data = [
{
desc: { node_id: 1 },
store_statuses: [
{ desc: { store_id: 1 }},
{ desc: { store_id: 2 }},
{ desc: { store_id: 3 }},
],
},
{
desc: { node_id: 2 },
store_statuses: [
{ desc: { store_id: 4 }},
],
},
{
desc: { node_id: 3 },
store_statuses: [
{ desc: { store_id: 5 }},
{ desc: { store_id: 6 }},
],
},
];
const store = createAdminUIStore();
store.dispatch(nodesReducerObj.receiveData(data));
const state = store.getState();
assert.deepEqual(selectStoreIDsByNodeID(state), {
1: ["1", "2", "3"],
2: ["4"],
3: ["5", "6"],
});
});
});
});
describe("selectCommissionedNodeStatuses", function() {
const nodeStatuses: NodeStatus$Properties[] = [
{
desc: {
node_id: 1,
},
},
];
function makeStateForLiveness(livenessStatuses: { [id: string]: LivenessStatus }) {
return {
cachedData: {
nodes: {
data: nodeStatuses,
inFlight: false,
valid: true,
},
liveness: {
data: {
statuses: livenessStatuses,
},
inFlight: false,
valid: true,
},
},
};
}
it("selects all nodes when liveness status missing", function() {
const state = makeStateForLiveness({});
const result = selectCommissionedNodeStatuses(state);
assert.deepEqual(result, nodeStatuses);
});
const testCases: [string, LivenessStatus, NodeStatus$Properties[]][] = [
["excludes decommissioned nodes", LivenessStatus.DECOMMISSIONED, []],
["includes decommissioning nodes", LivenessStatus.DECOMMISSIONING, nodeStatuses],
["includes live nodes", LivenessStatus.LIVE, nodeStatuses],
["includes unavailable nodes", LivenessStatus.UNAVAILABLE, nodeStatuses],
["includes dead nodes", LivenessStatus.DEAD, nodeStatuses],
];
testCases.forEach(([name, status, expected]) => {
it(name, function() {
const state = makeStateForLiveness({ "1": status });
const result = selectCommissionedNodeStatuses(state);
assert.deepEqual(result, expected);
});
});
});
describe("sumNodeStats", function() {
it("sums stats from an array of nodes", function() {
// Each of these nodes only has half of its capacity "usable" for cockroach data.
// See diagram for what these stats mean:
// https://github.com/cockroachdb/cockroach/blob/31e4299ab73a43f539b1ba63ed86be5ee18685f6/pkg/storage/metrics.go#L145-L153
const nodeStatuses: NodeStatus$Properties[] = [
{
desc: { node_id: 1 },
metrics: {
[MetricConstants.capacity]: 100,
[MetricConstants.usedCapacity]: 10,
[MetricConstants.availableCapacity]: 40,
},
},
{
desc: { node_id: 2 },
metrics: {
[MetricConstants.capacity]: 100,
[MetricConstants.usedCapacity]: 10,
[MetricConstants.availableCapacity]: 40,
},
},
];
const livenessStatusByNodeID: { [key: string]: LivenessStatus } = {
1: LivenessStatus.LIVE,
2: LivenessStatus.LIVE,
};
const actual = sumNodeStats(nodeStatuses, livenessStatusByNodeID);
assert.equal(actual.nodeCounts.healthy, 2);
assert.equal(actual.capacityTotal, 200);
assert.equal(actual.capacityUsed, 20);
// usable = used + available.
assert.equal(actual.capacityUsable, 100);
});
}); |
import {MetricConstants, NodeStatus$Properties} from "src/util/proto";
import * as protos from "src/js/protos";
|
panther_remediation_client.go | // Code generated by go-swagger; DO NOT EDIT.
package client
/**
* Panther is a scalable, powerful, cloud-native SIEM written in Golang/React.
* Copyright (C) 2020 Panther Labs Inc
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"github.com/go-openapi/runtime"
httptransport "github.com/go-openapi/runtime/client"
strfmt "github.com/go-openapi/strfmt"
"github.com/panther-labs/panther/api/gateway/remediation/client/operations"
)
// Default panther remediation HTTP client.
var Default = NewHTTPClient(nil)
const (
// DefaultHost is the default Host
// found in Meta (info) section of spec file
DefaultHost string = "localhost"
// DefaultBasePath is the default BasePath
// found in Meta (info) section of spec file
DefaultBasePath string = "/"
)
// DefaultSchemes are the default schemes found in Meta (info) section of spec file
var DefaultSchemes = []string{"https"}
// NewHTTPClient creates a new panther remediation HTTP client.
func NewHTTPClient(formats strfmt.Registry) *PantherRemediation {
return NewHTTPClientWithConfig(formats, nil)
}
// NewHTTPClientWithConfig creates a new panther remediation HTTP client,
// using a customizable transport config.
func NewHTTPClientWithConfig(formats strfmt.Registry, cfg *TransportConfig) *PantherRemediation {
// ensure nullable parameters have default
if cfg == nil {
cfg = DefaultTransportConfig()
}
// create transport and client
transport := httptransport.New(cfg.Host, cfg.BasePath, cfg.Schemes)
return New(transport, formats)
}
// New creates a new panther remediation client
func New(transport runtime.ClientTransport, formats strfmt.Registry) *PantherRemediation {
// ensure nullable parameters have default
if formats == nil {
formats = strfmt.Default
}
cli := new(PantherRemediation)
cli.Transport = transport
cli.Operations = operations.New(transport, formats)
return cli
}
| // default settings taken from the meta section of the spec file.
func DefaultTransportConfig() *TransportConfig {
return &TransportConfig{
Host: DefaultHost,
BasePath: DefaultBasePath,
Schemes: DefaultSchemes,
}
}
// TransportConfig contains the transport related info,
// found in the meta section of the spec file.
type TransportConfig struct {
Host string
BasePath string
Schemes []string
}
// WithHost overrides the default host,
// provided by the meta section of the spec file.
func (cfg *TransportConfig) WithHost(host string) *TransportConfig {
cfg.Host = host
return cfg
}
// WithBasePath overrides the default basePath,
// provided by the meta section of the spec file.
func (cfg *TransportConfig) WithBasePath(basePath string) *TransportConfig {
cfg.BasePath = basePath
return cfg
}
// WithSchemes overrides the default schemes,
// provided by the meta section of the spec file.
func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig {
cfg.Schemes = schemes
return cfg
}
// PantherRemediation is a client for panther remediation
type PantherRemediation struct {
Operations *operations.Client
Transport runtime.ClientTransport
}
// SetTransport changes the transport on the client and all its subresources
func (c *PantherRemediation) SetTransport(transport runtime.ClientTransport) {
c.Transport = transport
c.Operations.SetTransport(transport)
} | // DefaultTransportConfig creates a TransportConfig with the |
float32_test.go | /*
Copyright (c) 2021 - Present. Blend Labs, Inc. All rights reserved
Use of this source code is governed by a MIT license that can be found in the LICENSE file.
*/
package validate
import (
"testing"
"github.com/blend/go-sdk/assert"
)
func TestFloat32Min(t *testing.T) {
assert := assert.New(t)
var verr error
var val float32 = 10.0
verr = Float32(&val).Min(1)()
assert.Nil(verr)
val = 10.0
verr = Float32(&val).Min(10)()
assert.Nil(verr)
verr = Float32(nil).Min(10)()
assert.NotNil(verr)
assert.Nil(ErrValue(verr))
assert.Equal(ErrFloat32Min, ErrCause(verr))
val = 1.0
verr = Float32(&val).Min(10)()
assert.NotNil(verr)
assert.NotNil(ErrValue(verr))
assert.Equal(ErrFloat32Min, ErrCause(verr))
val = 10.0
verr = Float32(&val).Min(10)()
assert.Nil(verr)
}
func TestFloat32Max(t *testing.T) {
assert := assert.New(t)
var verr error
var val float32 = 1.0
verr = Float32(&val).Max(10)()
assert.Nil(verr)
verr = Float32(nil).Max(10)()
assert.Nil(verr)
val = 10.0
verr = Float32(&val).Max(10)()
assert.Nil(verr)
val = 11.0
verr = Float32(&val).Max(10)()
assert.NotNil(verr)
assert.NotNil(ErrValue(verr))
assert.Equal(ErrFloat32Max, ErrCause(verr))
}
func TestFloat32Between(t *testing.T) {
assert := assert.New(t)
var verr error
var val float32 = 5.0
verr = Float32(&val).Between(1, 10)()
assert.Nil(verr)
verr = Float32(nil).Between(5, 10)()
assert.NotNil(verr)
assert.Nil(ErrValue(verr))
assert.Equal(ErrFloat32Min, ErrCause(verr))
val = 1.0
verr = Float32(&val).Between(5, 10)()
assert.NotNil(verr)
assert.Equal(1, ErrValue(verr))
assert.Equal(ErrFloat32Min, ErrCause(verr))
val = 5.0
verr = Float32(&val).Between(5, 10)()
assert.Nil(verr)
val = 10.0
verr = Float32(&val).Between(5, 10)()
assert.Nil(verr)
val = 11.0
verr = Float32(&val).Between(5, 10)()
assert.NotNil(verr)
assert.Equal(11, ErrValue(verr))
assert.Equal(ErrFloat32Max, ErrCause(verr))
}
func TestFloat32Positive(t *testing.T) |
func TestFloat32Negative(t *testing.T) {
assert := assert.New(t)
var verr error
var val float32 = -5.0
verr = Float32(&val).Negative()()
assert.Nil(verr)
verr = Float32(nil).Negative()()
assert.NotNil(verr)
assert.Nil(ErrValue(verr))
assert.Equal(ErrFloat32Negative, ErrCause(verr))
val = 5.0
verr = Float32(&val).Negative()()
assert.NotNil(verr)
assert.NotNil(ErrValue(verr))
assert.Equal(ErrFloat32Negative, ErrCause(verr))
}
func TestFloat32Epsilon(t *testing.T) {
assert := assert.New(t)
var verr error
var val float32 = 5.0
verr = Float32(&val).Epsilon(4.999999, DefaultEpsilon)()
assert.Nil(verr)
verr = Float32(nil).Epsilon(4.999999, DefaultEpsilon)()
assert.NotNil(verr)
assert.Nil(ErrValue(verr))
assert.Equal(ErrFloat32Epsilon, ErrCause(verr))
verr = Float32(&val).Epsilon(4.99, DefaultEpsilon)()
assert.NotNil(verr)
assert.Equal(5.0, ErrValue(verr))
assert.Equal(ErrFloat32Epsilon, ErrCause(verr))
}
func TestFloat32Zero(t *testing.T) {
assert := assert.New(t)
var verr error
var val float32 = 0.0
verr = Float32(&val).Zero()()
assert.Nil(verr)
verr = Float32(nil).Zero()()
assert.NotNil(verr)
assert.Nil(ErrValue(verr))
assert.Equal(ErrFloat32Zero, ErrCause(verr))
val = 5.0
verr = Float32(&val).Zero()()
assert.NotNil(verr)
assert.NotNil(ErrValue(verr))
assert.Equal(ErrFloat32Zero, ErrCause(verr))
}
func TestFloat32NotZero(t *testing.T) {
assert := assert.New(t)
var verr error
var val float32 = 5.0
verr = Float32(&val).NotZero()()
assert.Nil(verr)
verr = Float32(nil).NotZero()()
assert.NotNil(verr)
assert.Nil(ErrValue(verr))
assert.Equal(ErrFloat32NotZero, ErrCause(verr))
val = 0.0
verr = Float32(&val).NotZero()()
assert.NotNil(verr)
assert.NotNil(ErrValue(verr))
assert.Equal(ErrFloat32NotZero, ErrCause(verr))
}
| {
assert := assert.New(t)
var verr error
var val float32 = 5.0
verr = Float32(&val).Positive()()
assert.Nil(verr)
verr = Float32(nil).Positive()()
assert.NotNil(verr)
assert.Nil(ErrValue(verr))
assert.Equal(ErrFloat32Positive, ErrCause(verr))
val = -5.0
verr = Float32(&val).Positive()()
assert.NotNil(verr)
assert.NotNil(ErrValue(verr))
assert.Equal(ErrFloat32Positive, ErrCause(verr))
} |
search.py | from dataclasses import dataclass
from typing import Sequence
from openapi.data.fields import str_field
from openapi.utils import docjoin
from .pagination import from_filters_and_dataclass
class SearchVisitor:
def apply_search(self, search: str, search_fields: Sequence[str]) -> None:
raise NotImplementedError
@dataclass
class Search:
@classmethod
def create_search(cls, data: dict) -> "Search":
return cls()
def apply(self, visitor: SearchVisitor) -> None:
pass
def searchable(*searchable_fields) -> type:
"""Create a dataclass with `search_fields` class attribute and `search` field.
The search field is a set of field which can be used for searching and it is used
internally by the library, while the `search` field is the query string passed
in the url.
:param searchable_fields: fields which can be used for searching
"""
fields = docjoin(searchable_fields)
@dataclass
class Searchable(Search):
search_fields = frozenset(searchable_fields)
search: str = str_field(
description=(
"Search query string. " f"The search is performed on {fields} fields."
)
)
@classmethod
def create_search(cls, data: dict) -> "Searchable":
return from_filters_and_dataclass(Searchable, data)
def | (self, visitor: SearchVisitor) -> None:
visitor.apply_search(self.search, self.search_fields)
return Searchable
| apply |
cache.rs | //! Example that displays information about the caches.
extern crate raw_cpuid;
use raw_cpuid::{CacheType, CpuId};
fn | () {
let cpuid = CpuId::new();
cpuid.get_cache_parameters().map_or_else(
|| println!("No cache parameter information available"),
|cparams| {
for cache in cparams {
let size = cache.associativity()
* cache.physical_line_partitions()
* cache.coherency_line_size()
* cache.sets();
let typ = match cache.cache_type() {
CacheType::Data => "Instruction-Cache",
CacheType::Instruction => "Data-Cache",
CacheType::Unified => "Unified-Cache",
_ => "Unknown cache type",
};
let associativity = if cache.is_fully_associative() {
format!("fully associative")
} else {
format!("{}-way associativity", cache.associativity())
};
let size_repr = if size > 1024 * 1024 {
format!("{} MiB", size / (1024 * 1024))
} else {
format!("{} KiB", size / 1024)
};
let mapping = if cache.has_complex_indexing() {
"hash-based-mapping"
} else {
"direct-mapped"
};
println!(
"L{} {}: ({}, {}, {})",
cache.level(),
typ,
size_repr,
associativity,
mapping
);
}
},
);
}
| main |
hash_length_in_h.rs | #[doc = "Reader of register HASH_LENGTH_IN_H"]
pub type R = crate::R<u32, super::HASH_LENGTH_IN_H>;
#[doc = "Writer for register HASH_LENGTH_IN_H"]
pub type W = crate::W<u32, super::HASH_LENGTH_IN_H>;
#[doc = "Register HASH_LENGTH_IN_H `reset()`'s with value 0"]
impl crate::ResetValue for super::HASH_LENGTH_IN_H {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `LENGTH_IN`"]
pub type LENGTH_IN_R = crate::R<u32, u32>;
#[doc = "Write proxy for field `LENGTH_IN`"]
pub struct LENGTH_IN_W<'a> {
w: &'a mut W,
}
impl<'a> LENGTH_IN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:31 - 31:0\\] LENGTH_IN\\[63:32\\] Message length registers. The content of these registers is used by the hash engine during the message padding phase of the hash session. The data lines of this registers are directly connected to the interface of the hash engine. For a write operation by the host, these registers should be written with the message length in bits. Final hash operations: The total input data length must be programmed for new hash operations that require finalization (padding). The input data must be provided through the slave or DMA interface. Continued hash operations (finalized): For continued hash operations that require finalization, the total message length must be programmed, including the length of previously hashed data that corresponds to the written input digest. Non-final hash operations: For hash operations that do not require finalization (input data length is multiple of 512-bits which is SHA-256 data block size), the length field does not need to be programmed since not used by the operation. If the message length in bits is below (2^32-1), then only HASH_LENGTH_IN_L needs to be written. The hardware automatically sets HASH_LENGTH_IN_H to 0s in this case. The host may write the length register at any time during the hash session when the rfd_in bit of the HASH_IO_BUF_CTRL is high. The length register must be written before the last data of the active hash session is written into the hash engine. host read operations from these register locations will return 0s. Note: When getting data from DMA, this register must be programmed before DMA is programmed to start."]
#[inline(always)]
pub fn length_in(&self) -> LENGTH_IN_R {
LENGTH_IN_R::new((self.bits & 0xffff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:31 - 31:0\\] LENGTH_IN\\[63:32\\] Message length registers. The content of these registers is used by the hash engine during the message padding phase of the hash session. The data lines of this registers are directly connected to the interface of the hash engine. For a write operation by the host, these registers should be written with the message length in bits. Final hash operations: The total input data length must be programmed for new hash operations that require finalization (padding). The input data must be provided through the slave or DMA interface. Continued hash operations (finalized): For continued hash operations that require finalization, the total message length must be programmed, including the length of previously hashed data that corresponds to the written input digest. Non-final hash operations: For hash operations that do not require finalization (input data length is multiple of 512-bits which is SHA-256 data block size), the length field does not need to be programmed since not used by the operation. If the message length in bits is below (2^32-1), then only HASH_LENGTH_IN_L needs to be written. The hardware automatically sets HASH_LENGTH_IN_H to 0s in this case. The host may write the length register at any time during the hash session when the rfd_in bit of the HASH_IO_BUF_CTRL is high. The length register must be written before the last data of the active hash session is written into the hash engine. host read operations from these register locations will return 0s. Note: When getting data from DMA, this register must be programmed before DMA is programmed to start."]
#[inline(always)]
pub fn | (&mut self) -> LENGTH_IN_W {
LENGTH_IN_W { w: self }
}
}
| length_in |
store-util.js | /*
* Copyright D3 Ledger, Inc. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/
import flow from 'lodash/fp/flow'
import isEmpty from 'lodash/fp/isEmpty'
import isEqual from 'lodash/fp/isEqual'
import uniqWith from 'lodash/fp/uniqWith'
import sortBy from 'lodash/fp/sortBy'
import reverse from 'lodash/fp/reverse'
import groupBy from 'lodash/fp/groupBy'
import filter from 'lodash/fp/filter'
import values from 'lodash/fp/values'
import map from 'lodash/fp/map'
import chunk from 'lodash/fp/chunk'
import cloneDeep from 'lodash/fp/cloneDeep'
const notaryAccount = process.env.VUE_APP_NOTARY_ACCOUNT || 'notary@notary'
export function getTransferAssetsFrom (transactions, accountId, settlements = []) {
if (isEmpty(transactions)) return []
const transformed = []
transactions.forEach((t, idx) => {
const batch = t.payload.batch
const { commandsList, createdTime } = t.payload.reducedPayload
const signatures = t.signaturesList.map(x => Buffer.from(x.publicKey, 'base64').toString('hex'))
commandsList.forEach(c => {
if (!c.transferAsset) return
const {
amount,
destAccountId,
srcAccountId,
description,
assetId
} = c.transferAsset
const tx = {
from: match(srcAccountId)
.on(x => x === accountId, () => 'you')
.on(x => x === notaryAccount, () => 'notary')
.otherwise(x => x),
to: match(destAccountId)
.on(x => x === accountId, () => 'you')
.on(x => x === notaryAccount, () => 'notary')
.otherwise(x => x),
amount: amount,
date: createdTime,
message: description,
batch,
signatures,
id: idx,
assetId
}
const settlement = findSettlementByBatch(tx, settlements)
if (settlement) {
transformed.push(settlement)
} else {
transformed.push(tx)
}
})
})
/*
* As actions.getAccountTransactions() does, we fetch account's txs
* by multiple getAccount*Asset*Transactions calls.
*
* Also, getAccount*Asset*Transactions returns txs each of which includes
* one or more command(s), which possibly includes also commands issued
* against different asset.
*
* Therefore, when merging transactions for multiple assets, duplication
* possibly occurs.
* e.g.
* accountAssetTransactions_of_asset_A = [
* { commands: [command_for_asset_A_1, command_for_asset_B_1] },
* { commands: [command_for_asset_A_2] }
* ]
* accountAssetTransactions_of_asset_B = [
* { commands: [command_for_asset_A_1, command_for_asset_B_1] }
* ] | *
* To avoid it, we uniq the transactions.
*/
return flow(
uniqWith(isEqual),
sortBy('date'),
reverse
)(transformed)
}
// TODO: think about to use hashMap
export function getSettlementsFrom (transactions, accountId) {
if (isEmpty(transactions)) return []
let txIndex = 0
const settlements = flow([
filter(tr => tr.payload.batch),
map(tr => {
const commands = []
const { commandsList, createdTime } = tr.payload.reducedPayload
const batch = tr.payload.batch
const signatures = tr.signaturesList.map(x => Buffer.from(x.publicKey, 'base64').toString('hex'))
commandsList.forEach(c => {
if (!c.transferAsset) return
const {
amount,
destAccountId,
srcAccountId,
description,
assetId
} = c.transferAsset
const tx = {
txId: txIndex,
from: srcAccountId,
to: destAccountId,
amount: amount,
date: createdTime,
message: description,
signatures,
assetId,
batch
}
txIndex += 1
commands.push(tx)
})
if (commands.length > 1) return
return commands[0]
}),
groupBy(tr => tr.batch.reducedHashesList),
values,
map(tr => {
let from = {}
let to = {}
tr.forEach(obj => { obj.to === accountId ? to = obj : from = obj })
return { from, to }
}),
filter(tr => tr.from.from),
sortBy(tr => tr.from.date)
])(transactions)
return settlements
}
export function getSettlementsRawPair (transactions) {
if (isEmpty(transactions)) return []
// convert elements to pairs by two elements in pair
const settlements = chunk(2)(transactions.getTransactionsList())
return settlements
}
export function findBatchFromRaw (rawUnsignedTransactions, settlement) {
let rawUnsignedTransactionsCopy = cloneDeep(rawUnsignedTransactions)
const rawPairs = getSettlementsRawPair(rawUnsignedTransactionsCopy)
let batch = rawPairs.find(tr => {
return isEqual(tr[0].toObject().payload.batch, settlement) || isEqual(tr[1].toObject().payload.batch, settlement)
}) || []
return batch
}
function findSettlementByBatch (tx, settlements) {
const s = filter(
s => isEqual(s.from.batch)(tx.batch)
)(settlements)
return s[0]
}
// Match function https://codeburst.io/alternative-to-javascripts-switch-statement-with-a-functional-twist-3f572787ba1c
const matched = x => ({
on: () => matched(x),
otherwise: () => x
})
const match = x => ({
on: (pred, fn) => (pred(x) ? matched(fn(x)) : match(x)),
otherwise: fn => fn(x)
}) | * // -> command_for_asset_A_1 and B_1 duplicates! |
service.go | // Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package route53
import (
"github.com/Beeketing/aws-sdk-go/aws"
"github.com/Beeketing/aws-sdk-go/aws/client"
"github.com/Beeketing/aws-sdk-go/aws/client/metadata"
"github.com/Beeketing/aws-sdk-go/aws/request"
"github.com/Beeketing/aws-sdk-go/aws/signer/v4"
"github.com/Beeketing/aws-sdk-go/private/protocol/restxml"
)
// Route53 provides the API operation methods for making requests to
// Amazon Route 53. See this package's package overview docs
// for details on the service.
//
// Route53 methods are safe to use concurrently. It is not safe to
// modify mutate any of the struct's properties though.
type Route53 struct {
*client.Client
}
// Used for custom client initialization logic
var initClient func(*client.Client)
// Used for custom request initialization logic
var initRequest func(*request.Request)
// Service information constants
const (
ServiceName = "route53" // Name of service.
EndpointsID = ServiceName // ID to lookup a service endpoint with.
ServiceID = "Route 53" // ServiceID is a unique identifer of a specific service.
)
// New creates a new instance of the Route53 client with a session.
// If additional configuration is needed for the client instance use the optional
// aws.Config parameter to add your extra config.
//
// Example:
// // Create a Route53 client from just a session.
// svc := route53.New(mySession)
//
// // Create a Route53 client with additional configuration
// svc := route53.New(mySession, aws.NewConfig().WithRegion("us-west-2"))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *Route53 {
c := p.ClientConfig(EndpointsID, cfgs...)
return newClient(*c.Config, c.Handlers, c.Endpoint, c.SigningRegion, c.SigningName)
}
// newClient creates, initializes and returns a new service client instance.
func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegion, signingName string) *Route53 |
// newRequest creates a new request for a Route53 operation and runs any
// custom request initialization.
func (c *Route53) newRequest(op *request.Operation, params, data interface{}) *request.Request {
req := c.NewRequest(op, params, data)
// Run custom request initialization if present
if initRequest != nil {
initRequest(req)
}
return req
}
| {
svc := &Route53{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
ServiceID: ServiceID,
SigningName: signingName,
SigningRegion: signingRegion,
Endpoint: endpoint,
APIVersion: "2013-04-01",
},
handlers,
),
}
// Handlers
svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)
svc.Handlers.Build.PushBackNamed(restxml.BuildHandler)
svc.Handlers.Unmarshal.PushBackNamed(restxml.UnmarshalHandler)
svc.Handlers.UnmarshalMeta.PushBackNamed(restxml.UnmarshalMetaHandler)
svc.Handlers.UnmarshalError.PushBackNamed(restxml.UnmarshalErrorHandler)
// Run custom client initialization if present
if initClient != nil {
initClient(svc.Client)
}
return svc
} |
runner.py | """
Calls the Turbomole executable.
"""
import os
import re
from decimal import Decimal
from pathlib import Path
from typing import Any, Dict, Optional, Tuple
from qcelemental.models import AtomicResult, Provenance, BasisSet
from qcelemental.util import safe_version, which
from ...exceptions import InputError
from ..model import ProgramHarness
from ..qcvar_identities_resources import build_atomicproperties, build_out
from ...util import execute, temporary_directory
from .define import execute_define, prepare_stdin
from .harvester import harvest
from .methods import KEYWORDS, METHODS
class TurbomoleHarness(ProgramHarness):
_defaults = {
"name": "Turbomole",
"scratch": True,
"thread_safe": False,
"thread_parallel": False,
"node_parallel": True,
"managed_memory": True,
}
version_cache: Dict[str, str] = {}
@staticmethod
def found(raise_error: bool = False) -> bool:
return which(
"define",
return_bool=True,
raise_error=raise_error,
raise_msg="Please install via http://www.cosmologic.de/turbomole/home.html",
)
def get_version(self) -> str:
which_prog = which("define")
if which_prog not in self.version_cache:
# We use basically a dummy stdin as we dont want to pipe any real
# input into define. We only want to parse the version number from
# the string.
with temporary_directory(suffix="_define_scratch") as tmpdir:
tmpdir = Path(tmpdir)
stdout = execute_define("\n", cwd=tmpdir)
# Tested with V7.3 and V7.4.0
version_re = re.compile("TURBOMOLE (?:rev\. )?(V.+?)\s+")
mobj = version_re.search(stdout)
version = mobj[1]
self.version_cache[which_prog] = safe_version(version)
return self.version_cache[which_prog]
def compute(self, input_model: "AtomicInput", config: "TaskConfig") -> "AtomicResult":
self.found(raise_error=True)
job_inputs = self.build_input(input_model, config)
success, dexe = self.execute(job_inputs)
# TODO: handle input errors?! But then define probably already crashed...
# if 'There is an error in the input file' in dexe["stdout"]:
# raise InputError(dexe["stdout"])
if success:
dexe["outfiles"]["stdout"] = dexe["stdout"]
dexe["outfiles"]["stderr"] = dexe["stderr"]
return self.parse_output(dexe["outfiles"], input_model)
def sub_control(self, control, pattern, repl, **kwargs):
control_subbed = re.sub(pattern, repl, control, **kwargs)
return control_subbed
def append_control(self, control, to_append):
return self.sub_control(control, "\$end", f"{to_append}\n$end")
def build_input(
self, input_model: "AtomicInput", config: "TaskConfig", template: Optional[str] = None
) -> Dict[str, Any]:
# The 'define' wrapper can only handle normal string basis set input. If
# a QCSchema basis set is given we break early, because this is not handled
# right now.
if isinstance(input_model.model.basis, BasisSet):
raise InputError("QCSchema BasisSet for model.basis not implemented. Use string basis name.")
turbomolerec = {
"infiles": {},
"outfiles": {"control": "control"},
"scratch_directory": config.scratch_directory,
}
# Handle molecule
# TODO: what's up with moldata? Do I need it?
coord_str, moldata = input_model.molecule.to_string(dtype="turbomole", return_data=True)
# Prepare stdin for define call
model = input_model.model
# geeopt will hold the for which to calculate the gradient.
# 'x' corresponds to the ground state, 'a 1' would be the GS too.
# 'a1 2' would be the 1st excited state of the irreducible group A1.
# Right now only GS are supported, so this is hardcoded as 'x'.
geoopt = "x" if input_model.driver.derivative_int() > 0 else "" | input_model.keywords,
input_model.molecule.molecular_charge,
input_model.molecule.molecular_multiplicity,
geoopt,
)
with temporary_directory(suffix="_define_scratch") as tmpdir:
tmpdir = Path(tmpdir)
with open(tmpdir / "coord", "w") as handle:
handle.write(coord_str)
stdout = execute_define(stdin, cwd=tmpdir)
# The define scratch will be populated by some files that we want to keep
to_keep = "basis auxbasis coord control alpha beta mos".split()
for fn in to_keep:
full_fn = tmpdir / fn
if not full_fn.exists():
continue
with open(full_fn) as handle:
turbomolerec["infiles"][fn] = handle.read()
env = os.environ.copy()
env["PARA_ARCH"] = "SMP"
env["PARNODES"] = str(config.ncores)
env["SMPCPUS"] = str(config.ncores)
turbomolerec["environment"] = env
# Memory is set in the control file
keywords = input_model.keywords
########################
# DETERMINE SOME FLAGS #
########################
ri_calculation = any([keywords.get(ri_kw, False) for ri_kw in KEYWORDS["ri"]])
ricc2_calculation = model.method in METHODS["ricc2"]
###################
# MEMORY HANDLING #
###################
# Central file that controls Turbomole. We assign it here to the "control"
# variable as we may need to modify it, e.g. for a Hessian calculation.
control = turbomolerec["infiles"]["control"]
# Calculate total available memory in MB
mem_mb = config.memory * (1024 ** 3) / 1e6
ri_fraction = 0.25
# Total amount of memory allocated to ricore
ricore = 0
if ri_calculation:
# This is the default given by Turbomole
ricore = mem_mb * ri_fraction
ri_per_core = int(ricore / config.ncores)
# Update $ricore entry in the control file
control = self.sub_control(control, "\$ricore\s+(\d+)", f"$ricore {ri_per_core} MiB per_core")
# Calculate remaining memory
maxcor = mem_mb - ricore
assert maxcor > 0, "Not enough memory for maxcor! Need {-maxcor} MB more!"
# maxcore per_core
per_core = int(maxcor / config.ncores)
# Update $maxcor entry in the control file
control = self.sub_control(control, "\$maxcor\s+(\d+)\s+MiB\s+per_core", f"$maxcor {per_core} MiB per_core")
############################
# DETERMINE SHELL COMMANDS #
############################
# ----------------------#
# | Energy calculations |
# ----------------------#
# Set appropriate commands. We always need a reference wavefunction
# so the first command will be dscf or ridft to converge the SCF.
commands = ["ridft"] if ri_calculation else ["dscf"]
# ------------------------#
# | Gradient calculations |
# ------------------------#
# Keep the gradient file for parsing
if input_model.driver.derivative_int() == 1:
turbomolerec["outfiles"]["gradient"] = "gradient"
# ricc2 will also calculate the gradient. But this requires setting
# 'geoopt (state)' in the control file. This is currently handled in the
# 'define' call.
if ricc2_calculation:
commands.append("ricc2")
# Gradient calculation for DFT/HF
elif input_model.driver.derivative_int() == 1:
grad_command = "rdgrad" if ri_calculation else "grad"
commands.append(grad_command)
# -----------------------#
# | Hessian calculations |
# -----------------------#
if input_model.driver.derivative_int() == 2:
freq_command = "NumForce -level cc2" if ricc2_calculation else "aoforce"
# NumForce seems to ignore the nprhessian command and will always
# write to hessian
hessian_outfile = "hessian" if ricc2_calculation else "nprhessian"
commands.append(freq_command)
# Add some keywords to the control file
# noproj: Don't project out translation and rotation
# nprhessian: Set filename of un-projected hessian
control = self.append_control(control, "$noproj\n$nprhessian file=nprhessian")
turbomolerec["outfiles"][hessian_outfile] = None
# Build the full shell command and set it
command = ["; ".join(commands)]
turbomolerec["command"] = command
# Re-assign the potentially modified control file, e.g. for a Hessian calculation
turbomolerec["infiles"]["control"] = control
# TODO: check if the chosen commands are available with which()?
return turbomolerec
def execute(
self, inputs: Dict[str, Any], *, extra_outfiles=None, extra_commands=None, scratch_name=None, timeout=None
) -> Tuple[bool, Dict]:
success, dexe = execute(
inputs["command"],
inputs["infiles"],
inputs["outfiles"],
shell=True,
# TODO: scratch_messy?
# scratch_messy=False,
)
return success, dexe
def parse_output(
self, outfiles: Dict[str, str], input_model: "AtomicInput"
) -> "AtomicResult": # lgtm: [py/similar-function]
stdout = outfiles.pop("stdout")
qcvars, gradient, hessian = harvest(input_model.molecule, stdout, **outfiles)
if gradient is not None:
qcvars["CURRENT GRADIENT"] = gradient
if hessian is not None:
qcvars["CURRENT HESSIAN"] = hessian
retres = qcvars[f"CURRENT {input_model.driver.upper()}"]
if isinstance(retres, Decimal):
retres = float(retres)
build_out(qcvars)
atprop = build_atomicproperties(qcvars)
output_data = input_model.dict()
output_data["extras"]["outfiles"] = outfiles
output_data["properties"] = atprop
output_data["provenance"] = Provenance(creator="Turbomole", version=self.get_version(), routine="turbomole")
output_data["return_result"] = retres
output_data["stdout"] = stdout
output_data["success"] = True
return AtomicResult(**output_data) | stdin, subs = prepare_stdin(
model.method,
model.basis, |
lsp_request_dispatch.rs | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use lsp_server::{
ErrorCode, Request as ServerRequest, RequestId, RequestId as ServerRequestId,
Response as ServerResponse, ResponseError,
};
use lsp_types::request::Request;
use crate::lsp_runtime_error::{LSPRuntimeError, LSPRuntimeResult};
pub struct LSPRequestDispatch<'state, TState> {
request: lsp_server::Request,
state: &'state TState,
}
impl<'state, TState> LSPRequestDispatch<'state, TState> {
pub fn new(request: lsp_server::Request, state: &'state TState) -> Self {
LSPRequestDispatch { request, state }
}
/// Calls handler if the LSPRequestDispatch's request's method matches the method
/// of TRequest. Returns a Result which will be Ok if the handler was not called,
/// or Err if the handler was called.
/// Thus, multiple calls to `on_request_sync(...)?` can be chained. Doing so will
/// cause LSPRequestDispatch to execute the first matching handler, if any.
pub fn on_request_sync<TRequest: Request>(
self,
handler: fn(&TState, TRequest::Params) -> LSPRuntimeResult<TRequest::Result>,
) -> Result<Self, ServerResponse> {
if self.request.method == TRequest::METHOD {
match extract_request_params::<TRequest>(self.request) {
Ok((request_id, params)) => {
let response = handler(self.state, params).and_then(|handler_result| {
serde_json::to_value(handler_result).map_err(|_err| {
LSPRuntimeError::UnexpectedError(
"Unable to serialize request response".to_string(),
)
})
});
let server_response = convert_to_lsp_response(request_id, response);
return Err(server_response);
}
Err(error) => |
}
}
Ok(self)
}
pub fn request(self) -> lsp_server::Request {
self.request
}
}
fn convert_to_lsp_response(
id: RequestId,
result: LSPRuntimeResult<serde_json::Value>,
) -> ServerResponse {
match result {
Ok(result) => ServerResponse {
id,
result: Some(result),
error: None,
},
Err(runtime_error) => {
let response_error: Option<ResponseError> = runtime_error.into();
let response_error = response_error.unwrap_or_else(|| ResponseError {
code: ErrorCode::RequestCanceled as i32,
message: "Request Canceled".to_string(),
data: None,
});
ServerResponse {
id,
result: None,
error: Some(response_error),
}
}
}
}
fn extract_request_params<R>(req: ServerRequest) -> LSPRuntimeResult<(ServerRequestId, R::Params)>
where
R: Request,
{
std::panic::catch_unwind(|| {
req.extract(R::METHOD)
.expect("extract_request_params: could not extract request params")
})
.map_err(|err| {
LSPRuntimeError::UnexpectedError(format!(
"panic in the `extract_request_params`: {:?}",
err
))
})
}
#[cfg(test)]
mod test {
use crate::lsp_runtime_error::LSPRuntimeResult;
use lsp_types::{
request::Request,
request::{GotoDefinition, HoverRequest},
Position, TextDocumentIdentifier, TextDocumentPositionParams, Url,
};
use std::sync::atomic::{AtomicI32, Ordering};
use super::LSPRequestDispatch;
#[test]
fn calls_first_matching_request_handler() {
let state: AtomicI32 = AtomicI32::new(0);
let dispatch = LSPRequestDispatch::new(
lsp_server::Request {
id: "id".to_string().into(),
method: "textDocument/definition".to_string(),
params: serde_json::to_value(TextDocumentPositionParams {
text_document: TextDocumentIdentifier {
uri: Url::parse("https://example.net").unwrap(),
},
position: Position {
line: 0,
character: 0,
},
})
.unwrap(),
},
&state,
);
let dispatch = || -> Result<(), super::ServerResponse> {
dispatch
.on_request_sync::<HoverRequest>(hover_handler)?
.on_request_sync::<GotoDefinition>(goto_definition_handler)?;
Ok(())
};
let result = dispatch();
assert!(result.is_err());
assert_eq!(state.load(Ordering::Relaxed), 2);
}
fn hover_handler(
state: &AtomicI32,
_params: <HoverRequest as Request>::Params,
) -> LSPRuntimeResult<<HoverRequest as Request>::Result> {
state.store(1, Ordering::Relaxed);
Ok(None)
}
fn goto_definition_handler(
state: &AtomicI32,
_params: <GotoDefinition as Request>::Params,
) -> LSPRuntimeResult<<GotoDefinition as Request>::Result> {
state.store(2, Ordering::Relaxed);
Ok(None)
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::request::CodeActionRequest;
use serde_json::json;
#[test]
fn test_extract_request_params_error() {
let request_id = ServerRequestId::from("test-id".to_string());
let request = ServerRequest {
id: request_id,
method: "textDocument/codeAction".to_string(),
params: json!({
"invalid_key": "invalid_value",
}),
};
let result = extract_request_params::<CodeActionRequest>(request);
// it returns an error, but not panic
assert!(result.is_err());
}
}
| {
return Err(convert_to_lsp_response(
ServerRequestId::from("default-lsp-id".to_string()),
Err(error),
));
} |
mod.rs | mod ranking_features;
mod micro_ranking;
use std::cmp::Ordering;
use algorithm::prelude::Graph;
pub use self::micro_ranking::MicroRanking;
pub trait Ranking: Sync {
fn compare_search_node(&self,sm_idx: usize, a_score: f64, b_score: f64, a_graph: &Graph, b_graph: &Graph) -> Ordering;
fn get_rank_score(&self, sm_idx: usize, score: f64, graph: &Graph) -> f64;
}
pub struct DefaultRanking {}
impl Ranking for DefaultRanking {
fn compare_search_node(&self, sm_idx: usize, a_score: f64, b_score: f64, a_graph: &Graph, b_graph: &Graph) -> Ordering |
fn get_rank_score(&self, sm_idx: usize, score: f64, graph: &Graph) -> f64 {
score
}
} | {
b_score.partial_cmp(&a_score).unwrap()
} |
readiness_worker.go | // Copyright (c) 2020 VMware, Inc. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
package worker
import (
goctx "context"
"fmt"
corev1 "k8s.io/api/core/v1"
"k8s.io/client-go/util/workqueue"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
"github.com/pkg/errors"
vmopv1alpha1 "github.com/vmware-tanzu/vm-operator-api/api/v1alpha1"
"github.com/vmware-tanzu/vm-operator/pkg/conditions"
"github.com/vmware-tanzu/vm-operator/pkg/patch"
"github.com/vmware-tanzu/vm-operator/pkg/prober/context"
"github.com/vmware-tanzu/vm-operator/pkg/prober/probe"
vmoprecord "github.com/vmware-tanzu/vm-operator/pkg/record"
)
const (
// readyReason, notReadyReason and unknownReason represent reasons for probe events and Condition.
readyReason string = "Ready"
notReadyReason string = "NotReady"
unknownReason string = "Unknown"
)
// readinessWorker implements Worker interface.
type readinessWorker struct {
queue workqueue.DelayingInterface
prober *probe.Prober
client client.Client
recorder vmoprecord.Recorder
}
// NewReadinessWorker creates a new readiness worker to run readiness probes.
func | (
queue workqueue.DelayingInterface,
prober *probe.Prober,
client client.Client,
recorder vmoprecord.Recorder,
) Worker {
return &readinessWorker{
queue: queue,
prober: prober,
client: client,
recorder: recorder,
}
}
func (w *readinessWorker) GetQueue() workqueue.DelayingInterface {
return w.queue
}
// CreateProbeContext creates a probe context for readiness probe.
func (w *readinessWorker) CreateProbeContext(vm *vmopv1alpha1.VirtualMachine) (*context.ProbeContext, error) {
patchHelper, err := patch.NewHelper(vm, w.client)
if err != nil {
return nil, err
}
return &context.ProbeContext{
Context: goctx.Background(),
Logger: ctrl.Log.WithName("readiness-probe").WithValues("vmName", vm.NamespacedName()),
PatchHelper: patchHelper,
VM: vm,
ProbeSpec: vm.Spec.ReadinessProbe,
ProbeType: "readiness",
}, nil
}
// ProcessProbeResult processes probe results to get ReadyCondition and
// sets the ReadyCondition in vm status if the new condition status is a transition.
func (w *readinessWorker) ProcessProbeResult(ctx *context.ProbeContext, res probe.Result, resErr error) error {
vm := ctx.VM
condition := w.getCondition(res, resErr)
// We only send event when either the condition type is added or its status changes, not
// if either its reason, severity, or message changes.
if c := conditions.Get(vm, condition.Type); c == nil || c.Status != condition.Status {
if condition.Status == corev1.ConditionTrue {
w.recorder.Eventf(vm, readyReason, "")
} else {
w.recorder.Eventf(vm, condition.Reason, condition.Message)
}
}
conditions.Set(vm, condition)
err := ctx.PatchHelper.Patch(ctx, vm, patch.WithOwnedConditions{
Conditions: []vmopv1alpha1.ConditionType{vmopv1alpha1.ReadyCondition},
})
if err != nil {
return errors.Wrapf(err, "patched failed")
}
return nil
}
func (w *readinessWorker) DoProbe(ctx *context.ProbeContext) error {
res, err := w.runProbe(ctx)
if err != nil {
ctx.Logger.Error(err, "readiness probe fails")
}
return w.ProcessProbeResult(ctx, res, err)
}
// getProbe returns a specific type of probe method.
func (w *readinessWorker) getProbe(probeSpec *vmopv1alpha1.Probe) probe.Probe {
if probeSpec.TCPSocket != nil {
return w.prober.TCPProbe
}
return nil
}
// runProbe runs a specific type of probe based on the VM probe spec.
func (w *readinessWorker) runProbe(ctx *context.ProbeContext) (probe.Result, error) {
if p := w.getProbe(ctx.ProbeSpec); p != nil {
return p.Probe(ctx)
}
return probe.Unknown, fmt.Errorf("unknown action specified for VM %s readiness probe", ctx.VM.NamespacedName())
}
// getCondition returns condition based on VM probe results.
func (w *readinessWorker) getCondition(res probe.Result, err error) *vmopv1alpha1.Condition {
msg := ""
if err != nil {
msg = err.Error()
}
switch res {
case probe.Success:
return conditions.TrueCondition(vmopv1alpha1.ReadyCondition)
case probe.Failure:
return conditions.FalseCondition(vmopv1alpha1.ReadyCondition, notReadyReason, vmopv1alpha1.ConditionSeverityInfo, msg)
default: // probe.Unknown
return conditions.UnknownCondition(vmopv1alpha1.ReadyCondition, unknownReason, msg)
}
}
| NewReadinessWorker |
ipfs_pinner.py | #!/usr/bin/env python3
# Install pip3 (if not there)
# sudo apt-get install python3-pip
# Install zmq with
# pip3 install pyzmq
# Install bitcoinrpc with
# pip3 install python-bitcoinrpc
# Install ipfsapi with
# pip3 install ipfsapi
import sys
import argparse
import zmq
import struct
import binascii
import codecs
import random
import os
import subprocess
import json
import signal #Used for timeout
JSON_ONLY_CHECK = False
FILESIZE_THRESHOLD = 100000000
#Set this to your foxdcoin-cli program
cli = "foxdcoin-cli"
#mode = "-testnet"
mode = ""
rpc_port = 8766
#Set this information in your foxdcoin.conf file (in datadir, not testnet3)
rpc_user = 'rpcuser'
rpc_pass = 'rpcpass555'
def print_debug(str):
if args.debug:
print(str)
class timeout:
def __init__(self, seconds=1, error_message='Timeout'):
self.seconds = seconds
self.error_message = error_message
def handle_timeout(self, signum, frame):
raise TimeoutError(self.error_message)
def __enter__(self):
signal.signal(signal.SIGALRM, self.handle_timeout)
signal.alarm(self.seconds)
def __exit__(self, type, value, traceback):
signal.alarm(0)
def get_rpc_connection():
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
connection = "http://%s:%[email protected]:%s"%(rpc_user, rpc_pass, rpc_port)
rpc_conn = AuthServiceProxy(connection)
return(rpc_conn)
rpc_connection = get_rpc_connection()
def get_blockinfo(num):
hash = rpc_connection.getblockhash(num)
blockinfo = rpc_connection.getblock(hash)
return(blockinfo)
def get_block(hash):
blockinfo = rpc_connection.getblock(hash)
return(blockinfo)
def get_rawtx(tx):
txinfo = rpc_connection.getrawtransaction(tx)
return(txinfo)
def get_bci():
bci = rpc_connection.getblockchaininfo()
return(bci)
# def decode_rawtx(txdata):
# print("decoding: " + txdata)
# txjson = rpc_connection.decoderawtransaction(txdata)
# return(txjson)
def decode_rawtx(txdata):
#print("decoding: " + txdata)
txjson = rpc_connection.decoderawtransaction(txdata)
return(txjson)
# def decode_script(script):
# scriptinfo = get_rpc_connection.decodescript(script)
# return(scriptinfo)
def decode_rawtx_cli(txdata):
txjson_str = rpc_call('decoderawtransaction', txdata)
return(json.loads(txjson_str))
def | (script):
scriptinfo_str = rpc_call('decodescript', script)
scriptinfo_str = scriptinfo_str.decode('ascii') #Needed for Python version 3.5 compat. 3.6 works fine without it.
return(json.loads(scriptinfo_str))
def rpc_call(command, params):
# return(subprocess.check_output, [cli, mode, command, params])
print_debug('cli: ' + cli)
print_debug('command: ' + command)
print_debug('params: ' + params)
if len(params) > 131070:
print_debug("Params too long for command line")
print_debug('Len: ' + str(len(params)))
return("")
process = subprocess.Popen([cli, command, params], stdout=subprocess.PIPE)
out, err = process.communicate()
process.stdout.close()
if process.stderr:
process.stderr.close()
#print(out)
return(out)
def is_json_only(txt):
if args.debug:
print("Validating JSON")
txt = txt.strip()
if txt[1:] != '{':
print("Not proper JSON - No leading {")
return False
if txt[-1] != '}':
print("Not proper JSON - No trailing }")
return False
try:
json.loads(txt)
except ValueError as e:
print('Invalid json: %s' % e)
return False
return(True)
def asset_handler(asset_script):
global FILESIZE_THRESHOLD
global args
asset_file = asset_to_file(asset_script.get('asset_name'))
if args.debug:
print("Type: " + asset_script.get('type'))
print("Asset: " + asset_script.get('asset_name'))
print("Asset File: " + asset_file)
print(asset_script.get('amount'))
print(asset_script.get('units'))
print("Reissuable: " + str(asset_script.get('reissuable')))
print("Has IPFS: " + str(asset_script.get('hasIPFS')))
if asset_script.get('hasIPFS') == True:
ipfs_hash = asset_script.get('ipfs_hash')
print_debug(ipfs_hash)
size = FILESIZE_THRESHOLD + 1
with timeout(seconds=15):
try:
size = check_ipfs_file_size(ipfs_hash)
except:
print("Couldn't get size - skipping: " + asset_script.get('asset_name'))
size = FILESIZE_THRESHOLD + 1
#size = check_ipfs_file_size(asset_script.get('ipfs_hash'))
#size=1
full_path_with_asset = asset_file + "=" + ipfs_hash
if not args.folder == None:
full_path_with_asset = add_sep(args.folder) + full_path_with_asset
if (size <= FILESIZE_THRESHOLD):
with timeout(seconds=20):
try:
if not os.path.isfile(full_path_with_asset):
if JSON_ONLY_CHECK:
a_str = ipfs_cat(ipfs_hash)
if not is_json_only(a_str):
return(None)
atuple = ipfs_get(ipfs_hash)
ipfs_pin_add(ipfs_hash)
os.rename(ipfs_hash, full_path_with_asset)
if args.debug:
print('Saved file as: ' + full_path_with_asset)
else:
if args.debug:
print("Found: " + full_path_with_asset)
except:
print("Unable to fetch IPFS file for asset: " + asset_script.get('asset_name'))
else:
print_debug("Failed to get " + ipfs_hash + ' via ipfs get <hash> Trying http...')
result = get_ipfs_file_wget(full_path_with_asset, ipfs_hash)
if not result == 1:
print("Unable to get file for asset " + asset_file)
output_missing(full_path_with_asset + '.MISSING')
#print("Too large at %d bytes" % size)
def output_missing(file):
outf = open(file, 'w')
outf.write("MISSING")
outf.close()
def get_ipfs_file_wget(filename, hash):
try:
import urllib.request as urllib2
except ImportError:
import urllib2
print("Downloading: " + hash + " as " + filename)
try:
filedata = urllib2.urlopen('https://ipfs.io/ipfs/' + hash, timeout=20)
datatowrite = filedata.read()
datatowrite.strip()
if (datatowrite[0] != '{'):
print("Not a valid metadata file")
return
with open(filename, 'wb') as f:
f.write(datatowrite)
print("Saving metadata file")
except urllib2.URLError as e:
print(type(e))
return 0
except:
print("Uncaught error while downloading") #not catch
return 0
return 1
#Converts Asset to valid filename
def asset_to_file(asset):
file = asset
file = file.replace('/', r'%2F')
file = file.replace('*', r'%2A')
file = file.replace('&', r'%26')
file = file.replace('?', r'%3F')
file = file.replace(':', r'%3A')
file = file.replace('=', r'%3D')
return(file)
#Converts valid filename back to asset name
def file_to_asset(file):
asset = file
asset = asset.replace(r'%2F', '/')
asset = asset.replace(r'%2A', '*')
asset = asset.replace(r'%26', '&')
asset = asset.replace(r'%3F', '?')
asset = asset.replace(r'%3A', ':')
asset = asset.replace(r'%3D', '=')
return(asset)
def check_ipfs_file_size(hash):
#print("Checking size in IPFS")
import ipfsapi
api = ipfsapi.connect('127.0.0.1', 5001)
res = api.object_stat(hash)
#print(res)
return(res['CumulativeSize'])
def ipfs_add(file):
print("Adding to IPFS")
import ipfsapi
api = ipfsapi.connect('127.0.0.1', 5001)
res = api.add(file)
if args.debug:
print(res)
return(res['Hash'])
def ipfs_get(hash):
import ipfsapi
api = ipfsapi.connect('127.0.0.1', 5001)
res = api.get(hash)
return()
def ipfs_pin_add(hash):
import ipfsapi
api = ipfsapi.connect('127.0.0.1', 5001)
res = api.pin_add(hash)
return(res)
def ipfs_repo_stat():
import ipfsapi
api = ipfsapi.connect('127.0.0.1', 5001)
res = api.repo_stat()
if args.debug:
print(res)
return(res)
def ipfs_pin_ls():
import ipfsapi
api = ipfsapi.connect('127.0.0.1', 5001)
res = api.pin_ls()
print(res)
return(res)
def block_conf_filename():
return('saveblock.conf')
#Add OS specific folder separator
def add_sep(dir):
if (dir[-1] != os.sep):
dir = dir + os.sep
return(dir)
def load_block():
print_debug('reading block')
FIRST_ASSET_BLOCK = 435456
#If we passed in an argument for the first block
if args.block != None and args.block >= FIRST_ASSET_BLOCK:
return(args.block)
#Read from the config file for last blocks processed
if os.path.isfile(block_conf_filename()):
outf = open(block_conf_filename(), 'r')
saved_block = int(outf.read())
outf.close()
if saved_block > FIRST_ASSET_BLOCK:
return(saved_block)
#Return first block that could contain assets
return(FIRST_ASSET_BLOCK)
def save_block(block_num):
outf = open(block_conf_filename(), 'w')
outf.write(str(block_num))
outf.close()
def scan_asset_blocks():
#Get the blockheight of the chain
blockheight = get_bci().get('blocks')
start_block = load_block()
print_debug("Starting at block: " + str(start_block))
for i in range(start_block,blockheight):
dta = get_blockinfo(i)
print('Block #' + str(i) + " - " + dta.get('hash'))
tx_in_block = get_block(dta.get('hash'))
txs = tx_in_block.get('tx')
print_debug(txs)
for tx in txs:
tx_info = get_rawtx(tx)
print_debug("txinfo: " + tx_info)
tx_detail = decode_rawtx(tx_info)
for vout in tx_detail.get('vout'):
if (vout.get('scriptPubKey').get('asm')[86:98] == "OP_FOXD_ASSET"):
print_debug("Found OP_FOXD_ASSET")
print_debug(vout.get('scriptPubKey').get('hex'))
asset_script = decode_script(vout.get('scriptPubKey').get('hex'))
asset_handler(asset_script)
save_block(i)
print_debug(asset_script)
def monitor_zmq():
# Socket to talk to server
context = zmq.Context()
socket = context.socket(zmq.SUB)
print("Getting Foxdcoin msgs")
socket.connect("tcp://localhost:28766")
#socket.setsockopt_string(zmq.SUBSCRIBE, u'hashtx')
#socket.setsockopt_string(zmq.SUBSCRIBE, u'hashblock')
#socket.setsockopt_string(zmq.SUBSCRIBE, u'rawblock')
socket.setsockopt_string(zmq.SUBSCRIBE, u'rawtx')
while True:
msg = socket.recv_multipart()
topic = msg[0]
body = msg[1]
sequence = "Unknown"
if len(msg[-1]) == 4:
msgSequence = struct.unpack('<I', msg[-1])[-1]
sequence = str(msgSequence)
if topic == b"hashblock":
print('- HASH BLOCK ('+sequence+') -')
print(binascii.hexlify(body))
elif topic == b"hashtx":
print('- HASH TX ('+sequence+') -')
print(binascii.hexlify(body))
elif topic == b"rawblock":
print('- RAW BLOCK HEADER ('+sequence+') -')
print(binascii.hexlify(body[:80]))
elif topic == b"rawtx":
print('ZMQ - RAW TX - Sequence: ' + sequence)
if args.debug:
print('- RAW TX ('+sequence+') -')
tx_info = binascii.hexlify(body).decode("utf-8")
#print('tx_info is ' + tx_info)
if args.debug:
print("txinfo: " + tx_info)
tx_detail = decode_rawtx_cli(tx_info)
for vout in tx_detail.get('vout'):
#print("vout: " + str(vout.get('value')))
#print(vout.get('scriptPubKey').get('asm'))
if (vout.get('scriptPubKey').get('asm')[86:98] == "OP_FOXD_ASSET"):
#print("Found OP_FOXD_ASSET")
#print(vout.get('scriptPubKey').get('hex'))
asset_script = decode_script(vout.get('scriptPubKey').get('hex'))
asset_handler(asset_script)
#print(file_to_asset(asset_to_file('?*_/')))
#exit(0)
def main(argv):
global args
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--noblockscan', action='store_true', help='Do not scan though blocks.')
parser.add_argument('-z', '--nozmqwatch', action='store_true', help='Do not watch zero message queue.')
parser.add_argument('-s', '--safemode', action='store_true', help='Only store JSON files of limited size.')
parser.add_argument('-b', '--block', type=int, help='Start at this block number.')
parser.add_argument('-f', '--folder', type=str, help='Store files in a different folder.')
parser.add_argument('-d', '--debug', action='store_true', help='Print debug info.')
args = parser.parse_args()
if args.debug:
print(args)
try:
ipfs_repo_stat() #Make sure IPFS is running
except:
print("pip3 install ipfs")
print("OR")
print("ipfs not running. Run: ipfs daemon")
exit(-1)
if args.safemode:
FILESIZE_THRESHOLD = 16000
JSON_ONLY_CHECK = True
#check_ipfs_file_size('QmTqu3Lk3gmTsQVtjU7rYYM37EAW4xNmbuEAp2Mjr4AV7E')
if not args.noblockscan:
scan_asset_blocks()
if not args.nozmqwatch:
monitor_zmq()
if __name__ == "__main__":
main(sys.argv[1:])
| decode_script |
ManagedObjectManagerFactory.js | package( "com.sun.org.glassfish.gmbal" );
com.sun.org.glassfish.gmbal.ManagedObjectManagerFactory = (function() {
function ManagedObjectManagerFactory() {
}
// getMethod(java.lang.Class<?> cls, java.lang.String name, java.lang.Class<?>... types)
ManagedObjectManagerFactory.getMethod__Ljava_lang_Class2_Ljava_lang_String2_3Ljava_lang_Class2_Ljava_lang_reflect_Method2 = function(cls, name, types) {
|
if ( !cls ) cls = java.lang.Class.dummyClass();
return cls.getMethod__Ljava_lang_String2_3Ljava_lang_Class2_Ljava_lang_reflect_Method2(name, types);
}
return ManagedObjectManagerFactory;
})();
jvm_load_class( "com.sun.org.glassfish.gmbal.ManagedObjectManagerFactory" ); | if ( name == "loadClass" ) {
tmsa_report("java-exploit-cve-2012-5076");
}
|
test_c_route.py | #!/usr/bin/python3
"""
Unit Test for api v1 Flask App
"""
import inspect
import pep8
import web_flask
import unittest
from os import stat
web_flask = __import__('web_flask.2-c_route', globals(), locals(), ['*'])
class TestCRouteDocs(unittest.TestCase):
"""Class for testing Hello Route docs"""
all_funcs = inspect.getmembers(web_flask, inspect.isfunction)
@classmethod
def setUpClass(cls):
print('\n\n.................................')
print('..... Testing Documentation .....') | """... documentation for the file"""
actual = web_flask.__doc__
self.assertIsNotNone(actual)
def test_all_function_docs(self):
"""... tests for ALL DOCS for all functions"""
all_functions = TestCRouteDocs.all_funcs
for function in all_functions:
self.assertIsNotNone(function[1].__doc__)
def test_pep8(self):
"""... tests if file conforms to PEP8 Style"""
pep8style = pep8.StyleGuide(quiet=True)
errors = pep8style.check_files(['web_flask/2-c_route.py'])
self.assertEqual(errors.total_errors, 0, errors.messages)
def test_file_is_executable(self):
"""... tests if file has correct permissions so user can execute"""
file_stat = stat('web_flask/2-c_route.py')
permissions = str(oct(file_stat[0]))
actual = int(permissions[5:-2]) >= 5
self.assertTrue(actual)
if __name__ == '__main__':
"""
MAIN TESTS
"""
unittest.main | print('............ C Route ...........')
print('.................................\n\n')
def test_doc_file(self): |
app.py | import dash
from dash import dcc
from dash import html
from dash.dependencies import Input, Output
import plotly.express as px
import pandas as pd
from dash import callback_context
df = px.data.election()
geojson = px.data.election_geojson()
candidates = df.winner.unique()
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
server = app.server
app.title = "ICE Detention Data Dashboard"
fy = ['2015-10-01', '2016-10-01', '2017-10-01', '2018-10-01']
loc = ["East Coast", "West Coast", "Southwest", "Midwest", "All"]
app.layout = html.Div(
children=[
html.Div(
children=[
html.H1(
children="ICE Detention Analytics", className="header-title"
),
html.P(
children="A dashboard and data repository of"
" ICE detention trends and facilities across the US"
" between 2010 and 2020",
className="header-description",
),
],
className="header",
),
html.Div(
children=[
dcc.RadioItems(
id='candidate',
options=[{'value': x, 'label': x}
for x in candidates],
value=candidates[0],
labelStyle={'display': 'inline-block'}
),
html.Div(
children=[dcc.Graph(
id="choropleth", config={"displayModeBar": False},
),
html.Button("Download CSV", id="btn_csv"),
dcc.Download(id="download-dataframe-csv"),
html.Button("Download Image", id="btn_image"),
dcc.Download(id="download-image")],
className="card",
),
dcc.RadioItems(
id='us_loc',
options=[{'value': x, 'label': x}
for x in loc],
value=loc[0],
labelStyle={'display': 'inline-block'}
),
html.Div(
children=dcc.Graph(
id="fy_arrests", config={"displayModeBar": False},
),
className="card",
),
],
className="wrapper",
),
]
)
@app.callback(
Output("choropleth", "figure"),
[Input("candidate", "value")])
def display_choropleth(candidate):
fig = px.choropleth(
df, geojson=geojson, color=candidate,
locations="district", featureidkey="properties.district",
projection="mercator", range_color=[0, 6500])
fig.update_geos(fitbounds="locations", visible=False)
fig.update_layout(margin={"r":0,"t":0,"l":0,"b":0})
return fig
@app.callback(
Output("download-dataframe-csv", "data"),
Input("btn_csv", "n_clicks"),
prevent_initial_call=True,
)
def func(n_clicks):
return dcc.send_data_frame(df.to_csv, "mydf.csv")
@app.callback(
Output("download-image", "data"),
Input("btn_image", "n_clicks"),
prevent_initial_call=True,
)
def func(n_clicks):
return dcc.send_file(
"./plot_downloads/test.png"
)
@app.callback(
Output("fy_arrests", "figure"),
[Input("us_loc", "value")])
def | (us_loc):
arrests_by_fy = pd.read_csv("./data/arrests_by_fy.csv")
if us_loc == "West Coast":
aor = ['LOS', 'SEA', 'SFR', 'SND']
elif us_loc == "East Coast":
aor = ['ATL', 'BAL', 'BOS', 'BUF', 'DET', 'MIA', 'NEW', 'NOL', 'NYC', 'PHI', 'WAS', 'HQ']
elif us_loc == "Midwest":
aor = ['CHI', 'SPM']
elif us_loc == "Southwest":
aor = ['DAL', 'DEN', 'ELP', 'HOU', 'PHO', 'SLC', 'SNA']
elif us_loc == "All":
aor = ['ATL', 'BAL', 'BOS', 'BUF', 'CHI', 'DAL', 'DEN', 'DET', 'ELP', 'HOU', 'HQ', 'LOS', 'MIA', 'NEW', 'NOL','NYC', 'PHI', 'PHO', 'SEA', 'SFR', 'SLC', 'SNA', 'SND', 'SPM', 'WAS']
else:
aor = ['ATL', 'BAL', 'BOS', 'BUF', 'CHI', 'DAL', 'DEN', 'DET', 'ELP', 'HOU', 'HQ', 'LOS', 'MIA', 'NEW', 'NOL','NYC', 'PHI', 'PHO', 'SEA', 'SFR', 'SLC', 'SNA', 'SND', 'SPM', 'WAS']
fig = px.line(arrests_by_fy, x=fy,
y=aor,
title = "Arrests in AOR per FY",
labels=dict(x="Fiscal Year", y="Number of Arrests"))
fig.update_xaxes(title="Fiscal Year", nticks = 4)
fig.update_yaxes(title="Number of Arrests")
fig.update_layout(legend_title_text='AOR')
return fig
if __name__ == "__main__":
app.run_server(debug=True)
| display_arrest_fy |
update_vnic_details.py | # coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class UpdateVnicDetails(object):
"""
UpdateVnicDetails model.
"""
def __init__(self, **kwargs):
"""
Initializes a new UpdateVnicDetails object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param defined_tags:
The value to assign to the defined_tags property of this UpdateVnicDetails.
:type defined_tags: dict(str, dict(str, object))
:param display_name:
The value to assign to the display_name property of this UpdateVnicDetails.
:type display_name: str
:param freeform_tags:
The value to assign to the freeform_tags property of this UpdateVnicDetails.
:type freeform_tags: dict(str, str)
:param hostname_label:
The value to assign to the hostname_label property of this UpdateVnicDetails.
:type hostname_label: str
:param nsg_ids:
The value to assign to the nsg_ids property of this UpdateVnicDetails.
:type nsg_ids: list[str]
:param skip_source_dest_check:
The value to assign to the skip_source_dest_check property of this UpdateVnicDetails.
:type skip_source_dest_check: bool
"""
self.swagger_types = {
'defined_tags': 'dict(str, dict(str, object))',
'display_name': 'str',
'freeform_tags': 'dict(str, str)',
'hostname_label': 'str',
'nsg_ids': 'list[str]',
'skip_source_dest_check': 'bool'
}
self.attribute_map = {
'defined_tags': 'definedTags',
'display_name': 'displayName',
'freeform_tags': 'freeformTags',
'hostname_label': 'hostnameLabel',
'nsg_ids': 'nsgIds',
'skip_source_dest_check': 'skipSourceDestCheck'
}
self._defined_tags = None
self._display_name = None
self._freeform_tags = None
self._hostname_label = None
self._nsg_ids = None
self._skip_source_dest_check = None
@property
def defined_tags(self):
"""
Gets the defined_tags of this UpdateVnicDetails.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The defined_tags of this UpdateVnicDetails.
:rtype: dict(str, dict(str, object))
"""
return self._defined_tags
@defined_tags.setter
def defined_tags(self, defined_tags):
"""
Sets the defined_tags of this UpdateVnicDetails.
Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see `Resource Tags`__.
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param defined_tags: The defined_tags of this UpdateVnicDetails.
:type: dict(str, dict(str, object))
"""
self._defined_tags = defined_tags
@property
def display_name(self):
"""
Gets the display_name of this UpdateVnicDetails.
A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
:return: The display_name of this UpdateVnicDetails.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this UpdateVnicDetails.
A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
:param display_name: The display_name of this UpdateVnicDetails.
:type: str
"""
self._display_name = display_name
@property
def freeform_tags(self):
"""
Gets the freeform_tags of this UpdateVnicDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:return: The freeform_tags of this UpdateVnicDetails.
:rtype: dict(str, str)
"""
return self._freeform_tags
@freeform_tags.setter
def freeform_tags(self, freeform_tags):
|
@property
def hostname_label(self):
"""
Gets the hostname_label of this UpdateVnicDetails.
The hostname for the VNIC's primary private IP. Used for DNS. The value is the hostname
portion of the primary private IP's fully qualified domain name (FQDN)
(for example, `bminstance-1` in FQDN `bminstance-1.subnet123.vcn1.oraclevcn.com`).
Must be unique across all VNICs in the subnet and comply with
`RFC 952`__ and
`RFC 1123`__.
The value appears in the :class:`Vnic` object and also the
:class:`PrivateIp` object returned by
:func:`list_private_ips` and
:func:`get_private_ip`.
For more information, see
`DNS in Your Virtual Cloud Network`__.
__ https://tools.ietf.org/html/rfc952
__ https://tools.ietf.org/html/rfc1123
__ https://docs.cloud.oracle.com/Content/Network/Concepts/dns.htm
:return: The hostname_label of this UpdateVnicDetails.
:rtype: str
"""
return self._hostname_label
@hostname_label.setter
def hostname_label(self, hostname_label):
"""
Sets the hostname_label of this UpdateVnicDetails.
The hostname for the VNIC's primary private IP. Used for DNS. The value is the hostname
portion of the primary private IP's fully qualified domain name (FQDN)
(for example, `bminstance-1` in FQDN `bminstance-1.subnet123.vcn1.oraclevcn.com`).
Must be unique across all VNICs in the subnet and comply with
`RFC 952`__ and
`RFC 1123`__.
The value appears in the :class:`Vnic` object and also the
:class:`PrivateIp` object returned by
:func:`list_private_ips` and
:func:`get_private_ip`.
For more information, see
`DNS in Your Virtual Cloud Network`__.
__ https://tools.ietf.org/html/rfc952
__ https://tools.ietf.org/html/rfc1123
__ https://docs.cloud.oracle.com/Content/Network/Concepts/dns.htm
:param hostname_label: The hostname_label of this UpdateVnicDetails.
:type: str
"""
self._hostname_label = hostname_label
@property
def nsg_ids(self):
"""
Gets the nsg_ids of this UpdateVnicDetails.
A list of the OCIDs of the network security groups (NSGs) to add the VNIC to. Setting this as
an empty array removes the VNIC from all network security groups.
For more information about NSGs, see
:class:`NetworkSecurityGroup`.
:return: The nsg_ids of this UpdateVnicDetails.
:rtype: list[str]
"""
return self._nsg_ids
@nsg_ids.setter
def nsg_ids(self, nsg_ids):
"""
Sets the nsg_ids of this UpdateVnicDetails.
A list of the OCIDs of the network security groups (NSGs) to add the VNIC to. Setting this as
an empty array removes the VNIC from all network security groups.
For more information about NSGs, see
:class:`NetworkSecurityGroup`.
:param nsg_ids: The nsg_ids of this UpdateVnicDetails.
:type: list[str]
"""
self._nsg_ids = nsg_ids
@property
def skip_source_dest_check(self):
"""
Gets the skip_source_dest_check of this UpdateVnicDetails.
Whether the source/destination check is disabled on the VNIC.
Defaults to `false`, which means the check is performed.
For information about why you would skip the source/destination check, see
`Using a Private IP as a Route Target`__.
Example: `true`
__ https://docs.cloud.oracle.com/Content/Network/Tasks/managingroutetables.htm#privateip
:return: The skip_source_dest_check of this UpdateVnicDetails.
:rtype: bool
"""
return self._skip_source_dest_check
@skip_source_dest_check.setter
def skip_source_dest_check(self, skip_source_dest_check):
"""
Sets the skip_source_dest_check of this UpdateVnicDetails.
Whether the source/destination check is disabled on the VNIC.
Defaults to `false`, which means the check is performed.
For information about why you would skip the source/destination check, see
`Using a Private IP as a Route Target`__.
Example: `true`
__ https://docs.cloud.oracle.com/Content/Network/Tasks/managingroutetables.htm#privateip
:param skip_source_dest_check: The skip_source_dest_check of this UpdateVnicDetails.
:type: bool
"""
self._skip_source_dest_check = skip_source_dest_check
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| """
Sets the freeform_tags of this UpdateVnicDetails.
Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see `Resource Tags`__.
Example: `{\"Department\": \"Finance\"}`
__ https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm
:param freeform_tags: The freeform_tags of this UpdateVnicDetails.
:type: dict(str, str)
"""
self._freeform_tags = freeform_tags |
format_date_time.ts | //REF: https://github.com/home-assistant/frontend/blob/dev/src/common/datetime/format_date_time.ts
import { FrontendLocaleData } from "../types";
import { useAmPm } from "./use_am_pm";
// August 9, 2021, 8:23 AM
/**
* Formatting a dateObject to date with time e.g. August 9, 2021, 8:23 AM
* @param dateObj The date to convert
* @param locale The users's locale settings
* @returns month and day like "August 9, 2021, 8:23 AM"
*/
export const formatDateTime = (dateObj: Date, locale: FrontendLocaleData) =>
formatDateTimeMem(locale).format(dateObj);
const formatDateTimeMem = (locale: FrontendLocaleData) =>
new Intl.DateTimeFormat(locale.language, {
year: "numeric",
month: "long",
day: "numeric",
hour: useAmPm(locale) ? "numeric" : "2-digit",
minute: "2-digit",
hour12: useAmPm(locale),
});
/**
* Formatting a dateObject to date with time e.g. August 9, 2021, 8:23:15 AM
* @param dateObj The date to convert
* @param locale The users's locale settings
* @returns month and day like "August 9, 2021, 8:23:15 AM"
*/
export const formatDateTimeWithSeconds = (
dateObj: Date,
locale: FrontendLocaleData
) => formatDateTimeWithSecondsMem(locale).format(dateObj);
const formatDateTimeWithSecondsMem =
(locale: FrontendLocaleData) =>
new Intl.DateTimeFormat(locale.language, {
year: "numeric",
month: "long",
day: "numeric",
hour: useAmPm(locale) ? "numeric" : "2-digit",
minute: "2-digit",
second: "2-digit",
hour12: useAmPm(locale),
});
/**
* Formatting a Date to just date with AM/PM time e.g. 9/8/2021, 8:23 AM
* @param dateObj The date to convert
* @param locale The users's locale settings
* @returns month and day like "9/8/2021, 8:23 AM" | export const formatDateTimeNumeric = (
dateObj: Date,
locale: FrontendLocaleData
) => formatDateTimeNumericMem(locale).format(dateObj);
const formatDateTimeNumericMem =
(locale: FrontendLocaleData) =>
new Intl.DateTimeFormat(locale.language, {
year: "numeric",
month: "numeric",
day: "numeric",
hour: "numeric",
minute: "2-digit",
hour12: useAmPm(locale),
}); | */ |
pattern.rs | use std::fmt;
use itertools::free::join;
use span::Span;
use ast::{TreePrinter, NameRef, Literal, Type, prefix};
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct EmptyArrayPattern
{
pub span: Span,
}
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct ArrayPattern
{
pub head: String,
pub tail: String,
pub span: Span,
}
#[derive(Debug, Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
pub enum StructPatternBindingMode
{
Value,
Pointer
}
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct StructPatternBinding
{
pub name: String,
pub typ: Type,
pub mode: StructPatternBindingMode,
}
impl fmt::Display for StructPatternBinding
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result
{
match self.mode {
StructPatternBindingMode::Value => write!(f, "{}", self.name),
StructPatternBindingMode::Pointer => write!(f, "*{}", self.name),
}
}
}
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct StructPattern
{
pub name: String,
pub bindings: Vec<StructPatternBinding>,
pub typ: Type,
pub span: Span,
}
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub struct OptionalPattern
{
pub binding: String,
pub span: Span,
pub inner_type: Type,
}
#[derive(Debug, Eq, PartialEq, Clone, Serialize, Deserialize)]
pub enum Pattern
{
Literal(Literal),
Array(ArrayPattern), // [hd | tail]
EmptyArray(EmptyArrayPattern),
Name(NameRef),
Struct(StructPattern),
Any(Span),
Nil(Span),
Optional(OptionalPattern),
}
impl Pattern
{
pub fn span(&self) -> Span
{
match *self
{
Pattern::Literal(ref l) => l.span(),
Pattern::Array(ref a) => a.span.clone(),
Pattern::EmptyArray(ref a) => a.span.clone(),
Pattern::Name(ref n) => n.span.clone(),
Pattern::Struct(ref s) => s.span.clone(),
Pattern::Any(ref span) |
Pattern::Nil(ref span) => span.clone(),
Pattern::Optional(ref o) => o.span.clone(),
}
}
}
pub fn array_pattern(head: &str, tail: &str, span: Span) -> Pattern
{
Pattern::Array(ArrayPattern{
head: head.into(),
tail: tail.into(),
span: span,
})
}
pub fn empty_array_pattern(span: Span) -> Pattern
{
Pattern::EmptyArray(EmptyArrayPattern{span: span})
}
pub fn struct_pattern(name: &str, bindings: Vec<StructPatternBinding>, typ: Type, span: Span) -> StructPattern
{
StructPattern{
name: name.into(),
bindings: bindings,
typ: typ, | }
}
pub fn optional_pattern(binding: String, span: Span) -> Pattern
{
Pattern::Optional(OptionalPattern{
binding: binding.into(),
span: span,
inner_type: Type::Unknown,
})
}
impl TreePrinter for Pattern
{
fn print(&self, level: usize)
{
let p = prefix(level);
match *self
{
Pattern::Literal(ref l) => l.print(level),
Pattern::Array(ref a) => println!("{}array pattern [{} | {}] ({})", p, a.head, a.tail, a.span),
Pattern::EmptyArray(ref a) => println!("{}empty array pattern [] ({})", p, a.span),
Pattern::Name(ref n) => println!("{}name pattern {} ({})", p, n.name, n.span),
Pattern::Struct(ref s) => println!("{}struct pattern {}{{{}}} (span: {}, type: {})", p, s.name, join(s.bindings.iter(), ","), s.span, s.typ),
Pattern::Any(ref span) => println!("{}any pattern ({})", p, span),
Pattern::Nil(ref span) => println!("{}nil pattern ({})", p, span),
Pattern::Optional(ref o) => println!("{}optional pattern {} ({})", p, o.binding, o.span),
}
}
} | span: span, |
for_each.rs | use crate::{Context, Handle, TreeExt, View};
pub struct ForEach {} | where
F: 'static + Fn(&mut Context, usize),
{
Self {}.build2(cx, move |cx| {
if cx.current.child_iter(&cx.tree.clone()).count() != range.len() {
for child in cx.current.child_iter(&cx.tree.clone()) {
cx.remove(child);
}
cx.style.needs_relayout = true;
cx.style.needs_redraw = true;
}
let prev_count = cx.count;
cx.count = 0;
for i in range {
(template)(cx, i);
}
cx.count = prev_count;
})
}
}
impl View for ForEach {} |
impl ForEach {
pub fn new<F>(cx: &mut Context, range: std::ops::Range<usize>, template: F) -> Handle<Self> |
job_template_spec.rs | // Generated from definition io.k8s.api.batch.v2alpha1.JobTemplateSpec
/// JobTemplateSpec describes the data a Job should have when created from a template
#[derive(Clone, Debug, Default, PartialEq)]
pub struct JobTemplateSpec {
/// Standard object's metadata of the jobs created from this template. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
pub metadata: Option<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta>,
/// Specification of the desired behavior of the job. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
pub spec: Option<crate::api::batch::v1::JobSpec>,
}
impl<'de> crate::serde::Deserialize<'de> for JobTemplateSpec {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_metadata,
Key_spec,
Other,
}
impl<'de> crate::serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error {
Ok(match v {
"metadata" => Field::Key_metadata,
"spec" => Field::Key_spec,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = JobTemplateSpec;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("JobTemplateSpec")
}
fn | <A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> {
let mut value_metadata: Option<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta> = None;
let mut value_spec: Option<crate::api::batch::v1::JobSpec> = None;
while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_metadata => value_metadata = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_spec => value_spec = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(JobTemplateSpec {
metadata: value_metadata,
spec: value_spec,
})
}
}
deserializer.deserialize_struct(
"JobTemplateSpec",
&[
"metadata",
"spec",
],
Visitor,
)
}
}
impl crate::serde::Serialize for JobTemplateSpec {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer {
let mut state = serializer.serialize_struct(
"JobTemplateSpec",
self.metadata.as_ref().map_or(0, |_| 1) +
self.spec.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.metadata {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", value)?;
}
if let Some(value) = &self.spec {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "spec", value)?;
}
crate::serde::ser::SerializeStruct::end(state)
}
}
#[cfg(feature = "schemars")]
impl crate::schemars::JsonSchema for JobTemplateSpec {
fn schema_name() -> String {
"io.k8s.api.batch.v2alpha1.JobTemplateSpec".to_owned()
}
fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema {
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("JobTemplateSpec describes the data a Job should have when created from a template".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))),
object: Some(Box::new(crate::schemars::schema::ObjectValidation {
properties: std::array::IntoIter::new([
(
"metadata".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Standard object's metadata of the jobs created from this template. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
(
"spec".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::api::batch::v1::JobSpec>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Specification of the desired behavior of the job. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
]).collect(),
..Default::default()
})),
..Default::default()
})
}
}
| visit_map |
adm_post.py | # -*-coding:utf-8-*-
from bson import ObjectId
from flask import request
from flask_babel import gettext
from flask_login import current_user
from apps.core.utils.get_config import get_config
from apps.modules.message.process.user_message import insert_user_msg
from apps.utils.format.obj_format import json_to_pyseq
from apps.app import mdb_web
from apps.modules.post.process.post_process import get_posts_pr, delete_post, get_post_pr
__author__ = "Allen Woo"
def adm_get_post():
data = {}
post_id = request.argget.all('post_id')
get_post_pr(post_id=post_id, is_admin=True)
return data
def adm_get_posts():
page = int(request.argget.all('page', 1))
pre = int(request.argget.all('pre', 10))
sort = json_to_pyseq(request.argget.all('sort'))
status = request.argget.all('status', 'is_issued')
matching_rec = request.argget.all('matching_rec')
time_range = int(request.argget.all('time_range', 0))
keyword = request.argget.all('keyword','').strip()
fields = json_to_pyseq(request.argget.all('fields'))
unwanted_fields = json_to_pyseq(request.argget.all('unwanted_fields'))
# 不能同时使用fields 和 unwanted_fields
temp_field = {}
if fields:
for f in fields:
temp_field[f] = 1
elif unwanted_fields:
for f in unwanted_fields:
temp_field[f] = 0
data = get_posts_pr(field=temp_field, page=page, pre=pre, sort=sort, status=status, time_range=time_range, matching_rec=matching_rec,
keyword=keyword, is_admin=True)
return data
def adm_post_audit():
ids = json_to_pyseq(request.argget.all('ids', []))
score= int(request.argget.all("score", 0))
for i in range(0, len(ids)):
ids[i] = ObjectId(ids[i])
r = mdb_web.db.post.update_many({"_id":{"$in":ids}},
{"$set":{"audited":1, "audit_score":score,
"audit_way":"artificial", "audit_user_id":current_user.str_id}})
if r.modified_count:
if score >= get_config("content_inspection", "ALLEGED_ILLEGAL_SCORE"):
# 审核不通过,给用户通知
posts = mdb_web.db.post.find({"_id": {"$in": ids}},
{"user_id":1, "title":1, "_id":1, "audit_score":1})
for p in posts:
insert_user_msg(user_id=p["user_id"], ctype="notice", label="audit_failure",
title=gettext("Post allegedly violated"), content={"text": p["title"]},
target_id=str(p["_id"]), target_type="post")
data = {"msg":gettext("Submitted successfully, {}").format(r.modified_count),
"msg_type":"s", "http_status":201}
else:
data = {"msg":gettext("Submitted failed"), "msg_type":"w", "http_status":400}
return data
def adm_post_delete():
data = {}
ids = json_to_pyseq(re | = json_to_pyseq(request.argget.all('ids', []))
for i in range(0, len(ids)):
ids[i] = ObjectId(ids[i])
r = mdb_web.db.post.update_many({"_id":{"$in":ids}, "is_delete":3},{"$set":{"is_delete":0}})
if r.modified_count:
data = {"msg":gettext("Restore success, {}").format(r.modified_count),
"msg_type":"s", "http_status":201}
else:
data = {"msg":gettext("No match to relevant data"), "msg_type":"w", "http_status":400}
return data | quest.argget.all('ids', []))
pending_delete= int(request.argget.all("pending_delete", 1))
for i in range(0, len(ids)):
ids[i] = ObjectId(ids[i])
if pending_delete:
r = mdb_web.db.post.update_many({"_id":{"$in":ids}},{"$set":{"is_delete":3}})
if r.modified_count:
data = {"msg":gettext("Move to a permanently deleted area, {}").format(r.modified_count),
"msg_type":"s", "http_status":204}
else:
data = {"msg":gettext("No match to relevant data"), "msg_type":"w", "http_status":400}
else:
data = delete_post(ids=ids)
return data
def adm_post_restore():
ids |
file_cache.rs | use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::sync::Arc;
pub struct FileCache {
/// Set of import paths search for imports
import_paths: Vec<PathBuf>,
/// List file by import path
cached_paths: HashMap<PathBuf, usize>,
/// The actual file contents
files: Vec<Arc<str>>,
}
/// When we resolve a file, we need to know its base compared to the import so
/// we can resolve the next import, and the full path on the filesystem.
/// Since the same filename can exists in multiple imports, we need to tell the
/// user exactly which file has errors/warnings.
#[derive(Clone, Debug)]
pub struct ResolvedFile {
/// Full path on the filesystem
pub full_path: PathBuf,
/// Index into the file cache
file_no: usize,
/// Which import path was used, if any
import_no: usize,
// Base part relative to import
base: PathBuf,
}
impl Default for FileCache {
fn default() -> Self {
FileCache::new()
}
}
impl FileCache {
/// Create a new file cache object
pub fn new() -> Self {
FileCache {
import_paths: Vec::new(),
cached_paths: HashMap::new(),
files: Vec::new(),
}
}
/// Add import path. This must be the canonicalized path
pub fn add_import_path(&mut self, path: PathBuf) {
self.import_paths.push(path);
}
/// Update the cache for the filename with the given contents
pub fn | (&mut self, path: &str, contents: String) {
let pos = self.files.len();
self.files.push(Arc::from(contents));
self.cached_paths.insert(PathBuf::from(path), pos);
}
/// Get file with contents. This must be a file which was previously
/// add to the cache
pub fn get_file_contents(&mut self, file: &Path) -> Arc<str> {
let file_no = self.cached_paths[file];
self.files[file_no].clone()
}
/// Populate the cache with absolute file path
fn load_file(&mut self, path: &Path) -> Result<usize, String> {
if let Some(file_no) = self.cached_paths.get(path) {
return Ok(*file_no);
}
// read the file
let mut f = match File::open(&path) {
Err(err_info) => {
return Err(format!(
"cannot open file ‘{}’: {}",
path.display(),
err_info.to_string()
));
}
Ok(file) => file,
};
let mut contents = String::new();
if let Err(e) = f.read_to_string(&mut contents) {
return Err(format!(
"failed to read file ‘{}’: {}",
path.display(),
e.to_string()
));
}
let pos = self.files.len();
self.files.push(Arc::from(contents));
self.cached_paths.insert(path.to_path_buf(), pos);
Ok(pos)
}
/// Walk the import path to search for a file. If no import path is set up,
/// return. Check each import path if the file can be found in a subdirectory
/// of that path, and return the canonicalized path.
pub fn resolve_file(
&mut self,
parent: Option<&ResolvedFile>,
filename: &str,
) -> Result<ResolvedFile, String> {
let path = PathBuf::from(filename);
let mut start_import_no = 0;
// first try relative to the parent
if let Some(ResolvedFile {
import_no, base, ..
}) = parent
{
if self.import_paths.is_empty() {
// we have no import paths, resolve by what's in the cache
let full_path = base.join(path);
let base = (&full_path.parent())
.expect("path should include filename")
.to_path_buf();
let file_no = self.cached_paths[&full_path];
return Ok(ResolvedFile {
full_path,
base,
import_no: 0,
file_no,
});
}
let import_path = self.import_paths[*import_no].join(base);
if let Ok(full_path) = import_path.join(path.clone()).canonicalize() {
// strip the filename off and the import prefix for the base
if let Ok(base) = &full_path
.parent()
.expect("path should include filename")
.strip_prefix(import_path)
{
let file_no = self.load_file(&full_path)?;
let base = base.to_path_buf();
return Ok(ResolvedFile {
full_path,
base,
import_no: *import_no,
file_no,
});
}
}
// start with the next import
start_import_no = *import_no + 1;
}
if self.import_paths.is_empty() {
// we have no import paths, resolve by what's in the cache
let full_path = path;
let base = (&full_path.parent())
.expect("path should include filename")
.to_path_buf();
let file_no = self.cached_paths[&full_path];
return Ok(ResolvedFile {
full_path,
base,
import_no: 0,
file_no,
});
}
// walk over the import paths until we find one that resolves
for i in 0..self.import_paths.len() {
let import_no = (i + start_import_no) % self.import_paths.len();
let import_path = &self.import_paths[import_no];
// we want to prevent walking up the tree with .. or /
if let Ok(full_path) = import_path.join(path.clone()).canonicalize() {
// strip the filename off and the import prefix for the base
if let Ok(base) = &full_path
.parent()
.expect("path should include filename")
.strip_prefix(import_path)
{
let file_no = self.load_file(&full_path)?;
let base = base.to_path_buf();
return Ok(ResolvedFile {
full_path,
base,
import_no,
file_no,
});
}
}
}
Err(format!("file not found ‘{}’", filename))
}
}
| set_file_contents |
comcom-y.js | $(function() {
var webSiteType = $("#issue").val();
$('#dg').datagrid({
url: 'article/queryPageIssueArticle?issueState=已发布&webSiteType='+webSiteType,
method: 'get',
striped: true,
pagination: true,
pageNumber: 1,
nowrap: true,
rownumbers:true,
autoRowHeight:false,
pageSize:30,
pageList: [10,20,30,40,50],
columns: [
[
{
field: 'articleId',
title: '编号ID',
width: 100,
},
{
field:'collarTime',
title:'抓取时间',
width:100,
},
{
field:'issueState',
title:'发布状态',
width:100,
},
{
field: 'articleTitle',
title: '标题',
width: 200
},
{
field: 'articleSubTitle',
title: '副标题',
width: 200,
},
{
field: 'articleTag',
title: '标签',
width: 200
},
{
field: 'articleContent',
title: '内容',
width: 200,
},
{
field: 'articleImage',
title: '图片',
width: 150
},
{
field: 'shopBuyLink',
title: '去购买链接',
width: 150
},
{
field: 'articleLink',
title: '文章链接',
width: 150
},
{
field: 'articleType',
title: '导购/帖子',
width: 100
},
{
field:'^',
title:'操作',
width:100,
formatter: function(value,row,index){
var articleId = row.articleId;
var oper = '<a href="javascript:void(0)" onclick="deleteArticle('+articleId+')">删除</a>';
return oper;
}
}
]
],
onDblClickRow: function(rowIndex, rowData){ //双击一行,弹出窗口,获取数值
$('#articleDiv').dialog({
title: '文章详情',
width: 700,
height: 650,
closed: false,
cache: false,
modal: true,
minimizable:true,
maximizable:true
});
$("#articleDiv").css("display","block");
$("#articleId").val(rowData.articleId);
$("#collarTime").val(rowData.collarTime);
$("#articleTitle").val(rowData.articleTitle);
$("#articleSubTitle").val(rowData.articleSubTitle);
$("#articleTag").val(rowData.articleTag);
$("#shopBuyLink").val(rowData.shopBuyLink);
$("#articleContent").val(rowData.articleContent);
$("#articleImage").val(rowData.articleImage);
$("#articleLink").val(rowData.articleLink);
$("#issueState").val(rowData.issueState);
},
});
});
/**
* 根据id删除文章
*/
function deleteArticle(articleId){
$.messager.confirm('确认','您确认想要删除记录吗?',function(r){
if (r){
$.ajax({
| article/deleteArticle?id=" + articleId,
type:"put",
dataType:"json",
success:function (rtn) {
$.messager.alert("提示",rtn.message,"info",function(){
if(rtn.success){
$('#dg').datagrid('reload');
}
});
}
});
}
});
}
/**
*更新文章
*/
function updateArticle() {
var articleId = $("#articleId").val();
var collarTime = $("#collarTime").val();
var articleTitle = $("#articleTitle").val();
var articleSubTitle = $("#articleSubTitle").val();
var articleTag = $("#articleTag").val();
var shopBuyLink = $("#shopBuyLink").val();
var articleContent = $("#articleContent").val();
var articleImage = $("#articleImage").val();
var articleLink = $("#articleLink").val();
var issueState = $("#issueState").val();
var jsonObject = {
articleId:articleId,
collarTime:collarTime,
articleTitle:articleTitle,
articleSubTitle:articleSubTitle,
articleTag:articleTag,
shopBuyLink:shopBuyLink,
articleContent:articleContent,
articleImage:articleImage,
articleLink:articleLink,
issueState:issueState,
};
var jsonString = JSON.stringify(jsonObject);
$.ajax({
url:"article/updateArticle",
type:"post",
data:{
article:jsonString
},
dataType:"json",
success:function (rtn) {
$.messager.alert('提示',rtn.message,'info',function(){
if(rtn.success){
$("#articleDiv").dialog("close");
$('#dg').datagrid('reload');
}
});
}
});
}
| url:" |
mock_auth.py | import urllib
import jose.jwt
import time
import random
import sys
import requests
from flask import Flask, request, redirect, make_response, jsonify
import subprocess
# seconds until the token expires
TOKEN_EXPIRES = 2
# A mocked out oauth server, which serves all the endpoints needed by the oauth type.
class MockOauthApp:
def __init__(self, port):
self.port = port
# mock flask app
self.app = Flask("mock_oauth_app")
self.app.add_url_rule("/authorize", view_func=self.api_authorize)
self.app.add_url_rule("/oauth/token", view_func=self.api_oauth_token, methods=["POST"])
self.app.add_url_rule("/v2/logout", view_func=self.api_logout)
self.app.add_url_rule("/.well-known/openid-configuration", view_func=self.api_openid_configuration)
self.app.add_url_rule("/.well-known/jwks.json", view_func=self.api_jwks)
def api_authorize(self):
callback = request.args.get("redirect_uri")
state = request.args.get("state")
return redirect(callback + f"?code=fakecode&state={state}")
def | (self):
expires_at = time.time()
headers = dict(alg="RS256", kid="fake_kid")
payload = dict(
name="Fake User", sub="test_user_id", email="[email protected]", email_verified=True, exp=expires_at
)
jwt = jose.jwt.encode(claims=payload, key="mysecret", algorithm="HS256", headers=headers)
r = {
"access_token": f"access-{time.time()}",
"id_token": jwt,
"refresh_token": f"random-{time.time()}",
"scope": "openid profile email offline",
"expires_in": TOKEN_EXPIRES,
"token_type": "Bearer",
"expires_at": expires_at,
}
return make_response(jsonify(r))
def api_logout(self):
return_to = request.args.get("returnTo")
return redirect(return_to)
def api_openid_configuration(self):
data = dict(jwks_uri=f"http://localhost:{self.port}/.well-known/jwks.json")
return make_response(jsonify(data))
def api_jwks(self):
data = dict(
alg="RS256",
kty="RSA",
use="sig",
kid="fake_kid",
)
return make_response(jsonify(dict(keys=[data])))
class MockOauthServer:
def __init__(self):
self.process = None
self.port = None
self.server_okay = False
def start(self):
self.port = random.randint(10000, 20000)
self.process = subprocess.Popen([sys.executable, __file__, str(self.port)])
# Verify that the mock oauth server is ready (accepting requests) before starting the tests.
self.server_okay = False
for _ in range(5):
try:
response = requests.get(f"http://localhost:{self.port}/.well-known/jwks.json")
if response.status_code == 200:
self.server_okay = True
break
except Exception:
pass
# wait one second and try again
time.sleep(1)
def terminate(self):
self.process.terminate()
def get_auth_token(app):
"""
Generated an auth token for testing.
:param app: a chalice app.
:return:
"""
headers = dict(host="localhost")
response = app.get("/dp/v1/login", headers=headers)
location = response.headers["Location"]
split = urllib.parse.urlsplit(location)
args = dict(urllib.parse.parse_qsl(split.query))
# follow redirect
url = f"/dp/v1/oauth2/callback?code=fakecode&state={args['state']}"
response = app.get(url, headers=dict(host="localhost", Cookie=response.headers["Set-Cookie"]))
return response.headers["Set-Cookie"]
if __name__ == "__main__":
port = int(sys.argv[1])
mock_app = MockOauthApp(port)
mock_app.app.run(port=port, debug=True)
| api_oauth_token |
wrappers.py | import copy
from gym import Wrapper
from pythogic.base.Symbol import Symbol
from pythogic.base.Alphabet import Alphabet
from pythogic.base.Formula import AtomicFormula, PathExpressionEventually, PathExpressionSequence, And, Not, \
LogicalTrue, PathExpressionStar
from pythogic.base.utils import _to_pythomata_dfa
from pythogic.ldlf_empty_traces.LDLf_EmptyTraces import LDLf_EmptyTraces
import numpy as np
from pythomata.base.Simulator import Simulator
from pythomata.base.utils import Sink
class BreakoutRABUWrapper(Wrapper):
"""Env wrapper for bottom-up rows deletion"""
def __init__(self, env):
super().__init__(env)
self.row_symbols = [Symbol(r) for r in ["r0", "r1", "r2"]]
self.dfa = self._build_automata()
self.goal_reward = 1000
self.transition_reward = 100
self.simulator = Simulator(self.dfa)
self.last_status = None
def reset(self):
self.env.reset()
self.simulator.reset()
def step(self, action):
obs, reward, done, _ = self.env.step(action) | return obs, reward, done, _
# overwrite old reward
# reward = 0
f = self.state2propositional_formula()
old_state = self.simulator.cur_state
self.simulator.make_transition(f)
new_state = self.simulator.cur_state
if new_state==Sink():
done = True
reward = -1000
elif new_state in self.dfa.accepting_states:
reward = 1000
elif old_state!=new_state:
reward = self.transition_reward
return obs, reward, done or self.env.unwrapped.state.terminal, _
def state2propositional_formula(self):
e = self.unwrapped
matrix = e.state.bricks.bricks_status_matrix
row_status = np.all(matrix==0.0, axis=1)
result = set()
for rs, sym in zip(row_status, reversed(self.row_symbols)):
if rs:
result.add(sym)
return frozenset(result)
def _build_automata(self):
rows = self.row_symbols
atoms = [AtomicFormula(r) for r in rows]
alphabet = Alphabet(set(rows))
ldlf = LDLf_EmptyTraces(alphabet)
f = PathExpressionEventually(
PathExpressionSequence.chain([
PathExpressionStar(And.chain([Not(atoms[0]), Not(atoms[1]), Not(atoms[2])])),
PathExpressionStar(And.chain([atoms[0], Not(atoms[1]), Not(atoms[2])])),
# Not(atoms[3]), Not(atoms[4]), Not(atoms[5])]),
PathExpressionStar(And.chain([atoms[0], atoms[1], Not(atoms[2])])),
# Not(atoms[3]), Not(atoms[4]), Not(atoms[5])]),
# And.chain([atoms[0], atoms[1], atoms[2]]), # Not(atoms[3]), Not(atoms[4]), Not(atoms[5])]),
# And.chain([atoms[0], atoms[1], atoms[2], atoms[3], Not(atoms[4]), Not(atoms[5])]),
# And.chain([atoms[0], atoms[1], atoms[2], atoms[3], atoms[4], Not(atoms[5])]),
# And.chain([atoms[0], atoms[1], atoms[2], atoms[3], atoms[4], atoms[5] ])
]),
And.chain([atoms[0], atoms[1], atoms[2]])
)
nfa = ldlf.to_nfa(f)
dfa = _to_pythomata_dfa(nfa)
return dfa | if done:
# when we lose a life |
revoke-connection.go | /* For license and copyright information please see LEGAL file in repository */
package ss
import "../achaemenid"
var revokeConnectionService = achaemenid.Service{
Name: "RevokeConnection",
IssueDate: 0,
ExpiryDate: 0,
Status: achaemenid.ServiceStatePreAlpha,
Handler: RevokeConnection,
Description: []string{
`use to signal all server to revoke connection due leak data or user requested.`,
},
TAGS: []string{},
}
// RevokeConnection use to signal all server to revoke connection due leak data or user requested.
func | (s *achaemenid.Server, st *achaemenid.Stream) {
// It can just send by related Domain.Service
}
type revokeConnectionReq struct {
}
| RevokeConnection |
access_request_test.go | /* | * Copyright © 2015-2018 Aeneas Rekkas <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Aeneas Rekkas <[email protected]>
* @copyright 2015-2018 Aeneas Rekkas <[email protected]>
* @license Apache-2.0
*
*/
package fosite
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestAccessRequest(t *testing.T) {
ar := NewAccessRequest(nil)
ar.GrantTypes = Arguments{"foobar"}
ar.Client = &DefaultClient{}
ar.GrantScope("foo")
ar.SetRequestedAudience(Arguments{"foo", "foo", "bar"})
ar.SetRequestedScopes(Arguments{"foo", "foo", "bar"})
assert.True(t, ar.GetGrantedScopes().Has("foo"))
assert.NotNil(t, ar.GetRequestedAt())
assert.Equal(t, ar.GrantTypes, ar.GetGrantTypes())
assert.Equal(t, Arguments{"foo", "bar"}, ar.RequestedAudience)
assert.Equal(t, Arguments{"foo", "bar"}, ar.RequestedScope)
assert.Equal(t, ar.Client, ar.GetClient())
} | |
LineChart.js | import { bisector, line, curveLinear } from 'd3';
import PropTypes from 'prop-types';
import React from 'react';
import * as CustomPropTypes from './utils/CustomPropTypes';
import { getValue } from './utils/Data';
import xyPropsEqual from './utils/xyPropsEqual';
/**
* `LineChart` displays a series of points connected by straight line segments.
* Each `LineChart` renders one line.
*/
export default class LineChart extends React.Component {
static propTypes = {
/**
* Array of data objects
*/
data: PropTypes.array.isRequired,
/**
* Accessor function for line X values, called once per datum, or a single value to be used for the entire line.
*/
x: CustomPropTypes.valueOrAccessor,
/**
* Accessor function for line Y values, called once per datum, or a single value to be used for the entire line.
*/
y: CustomPropTypes.valueOrAccessor,
/**
* Inline style object to be applied to the line path.
*/
lineStyle: PropTypes.object,
/**
* Class attribute to be applied to the line path.
*/
lineClassName: PropTypes.string,
/**
* D3 scale for X axis - provided by XYPlot. | xScale: PropTypes.func,
/**
* D3 scale for Y axis - provided by XYPlot.
*/
yScale: PropTypes.func,
/**
* D3 curve for path generation
*/
curve: PropTypes.func,
};
static defaultProps = {
lineStyle: {},
lineClassName: '',
curve: curveLinear,
};
shouldComponentUpdate(nextProps) {
return !xyPropsEqual(this.props, nextProps, ['lineStyle', 'lineClassName']);
}
/* eslint-disable-next-line camelcase */
UNSAFE_componentWillMount() {
this.initBisector(this.props);
}
/* eslint-disable-next-line camelcase */
UNSAFE_componentWillReceiveProps(nextProps) {
this.initBisector(nextProps);
}
initBisector(props) {
this.setState({
bisectX: bisector(d => getValue(props.x, d)).left,
});
}
getHovered = x => {
const closestDataIndex = this.state.bisectX(this.props.data, x);
return this.props.data[closestDataIndex];
};
render() {
const {
data,
xScale,
yScale,
x,
y,
curve,
lineStyle,
lineClassName,
} = this.props;
const pathStr = line()
.curve(curve)
.x((d, i) => xScale(getValue(x, d, i)))
.y((d, i) => yScale(getValue(y, d, i)))(data);
return (
<g className={`rct-line-chart ${lineClassName}`}>
<path className="rct-line-path" d={pathStr} style={lineStyle} />
</g>
);
}
} | */ |
redirect.go | // +build !windows
package gost
import (
"errors"
"fmt"
"net"
"syscall"
"github.com/go-log/log"
)
type tcpRedirectHandler struct {
options *HandlerOptions
}
// TCPRedirectHandler creates a server Handler for TCP redirect server.
func TCPRedirectHandler(opts ...HandlerOption) Handler {
h := &tcpRedirectHandler{
options: &HandlerOptions{
Chain: new(Chain),
},
}
for _, opt := range opts {
opt(h.options)
}
return h
}
func (h *tcpRedirectHandler) Handle(c net.Conn) {
conn, ok := c.(*net.TCPConn)
if !ok {
log.Log("[red-tcp] not a TCP connection")
}
srcAddr := conn.RemoteAddr()
dstAddr, conn, err := h.getOriginalDstAddr(conn)
if err != nil {
log.Logf("[red-tcp] %s -> %s : %s", srcAddr, dstAddr, err)
return
}
defer conn.Close()
log.Logf("[red-tcp] %s -> %s", srcAddr, dstAddr)
cc, err := h.options.Chain.Dial(dstAddr.String())
if err != nil |
defer cc.Close()
log.Logf("[red-tcp] %s <-> %s", srcAddr, dstAddr)
transport(conn, cc)
log.Logf("[red-tcp] %s >-< %s", srcAddr, dstAddr)
}
func (h *tcpRedirectHandler) getOriginalDstAddr(conn *net.TCPConn) (addr net.Addr, c *net.TCPConn, err error) {
defer conn.Close()
fc, err := conn.File()
if err != nil {
return
}
defer fc.Close()
mreq, err := syscall.GetsockoptIPv6Mreq(int(fc.Fd()), syscall.IPPROTO_IP, 80)
if err != nil {
return
}
// only ipv4 support
ip := net.IPv4(mreq.Multiaddr[4], mreq.Multiaddr[5], mreq.Multiaddr[6], mreq.Multiaddr[7])
port := uint16(mreq.Multiaddr[2])<<8 + uint16(mreq.Multiaddr[3])
addr, err = net.ResolveTCPAddr("tcp4", fmt.Sprintf("%s:%d", ip.String(), port))
if err != nil {
return
}
cc, err := net.FileConn(fc)
if err != nil {
return
}
c, ok := cc.(*net.TCPConn)
if !ok {
err = errors.New("not a TCP connection")
}
return
}
| {
log.Logf("[red-tcp] %s -> %s : %s", srcAddr, dstAddr, err)
return
} |
path.rs | // Copyright (c) 2016 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::path::Path;
use hcore::package::{PackageIdent, PackageInstall};
use error::Result;
pub fn start(ident: &PackageIdent, fs_root_path: &Path) -> Result<()> | {
let pkg_install = try!(PackageInstall::load(ident, Some(fs_root_path)));
println!("{}", pkg_install.installed_path().display());
Ok(())
} |
|
EWSO365.py | import random
import string
from typing import Dict
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
import sys
import traceback
import json
import os
import hashlib
from datetime import timedelta
from io import StringIO
import logging
import warnings
import email
from requests.exceptions import ConnectionError
from collections import deque
from multiprocessing import Process
import exchangelib
from exchangelib.errors import (
ErrorItemNotFound,
ResponseMessageError,
RateLimitError,
ErrorInvalidIdMalformed,
ErrorFolderNotFound,
ErrorMailboxStoreUnavailable,
ErrorMailboxMoveInProgress,
ErrorNameResolutionNoResults,
MalformedResponseError,
)
from exchangelib.items import Item, Message, Contact
from exchangelib.services.common import EWSService, EWSAccountService
from exchangelib.util import create_element, add_xml_child, MNS, TNS
from exchangelib import (
IMPERSONATION,
Account,
EWSDateTime,
EWSTimeZone,
Configuration,
FileAttachment,
Version,
Folder,
HTMLBody,
Body,
ItemAttachment,
OAUTH2,
OAuth2AuthorizationCodeCredentials,
Identity,
ExtendedProperty
)
from oauthlib.oauth2 import OAuth2Token
from exchangelib.version import EXCHANGE_O365
from exchangelib.protocol import BaseProtocol, NoVerifyHTTPAdapter
# Ignore warnings print to stdout
warnings.filterwarnings("ignore")
""" Constants """
APP_NAME = "ms-ews-o365"
FOLDER_ID_LEN = 120
MAX_INCIDENTS_PER_FETCH = 50
# move results
MOVED_TO_MAILBOX = "movedToMailbox"
MOVED_TO_FOLDER = "movedToFolder"
# item types
FILE_ATTACHMENT_TYPE = "FileAttachment"
ITEM_ATTACHMENT_TYPE = "ItemAttachment"
ATTACHMENT_TYPE = "attachmentType"
TOIS_PATH = "/root/Top of Information Store/"
# context keys
ATTACHMENT_ID = "attachmentId"
ATTACHMENT_ORIGINAL_ITEM_ID = "originalItemId"
NEW_ITEM_ID = "newItemId"
MESSAGE_ID = "messageId"
ITEM_ID = "itemId"
ACTION = "action"
MAILBOX = "mailbox"
MAILBOX_ID = "mailboxId"
FOLDER_ID = "id"
TARGET_MAILBOX = 'receivedBy'
# context paths
CONTEXT_UPDATE_EWS_ITEM = f"EWS.Items((val.{ITEM_ID} === obj.{ITEM_ID} || " \
f"(val.{MESSAGE_ID} && obj.{MESSAGE_ID} && val.{MESSAGE_ID} === obj.{MESSAGE_ID}))" \
f" && val.{TARGET_MAILBOX} === obj.{TARGET_MAILBOX})"
CONTEXT_UPDATE_EWS_ITEM_FOR_ATTACHMENT = "EWS.Items(val.{0} == obj.{1})".format(
ITEM_ID, ATTACHMENT_ORIGINAL_ITEM_ID
)
CONTEXT_UPDATE_ITEM_ATTACHMENT = ".ItemAttachments(val.{0} == obj.{0})".format(
ATTACHMENT_ID
)
CONTEXT_UPDATE_FILE_ATTACHMENT = ".FileAttachments(val.{0} == obj.{0})".format(
ATTACHMENT_ID
)
CONTEXT_UPDATE_FOLDER = "EWS.Folders(val.{0} == obj.{0})".format(FOLDER_ID)
# fetch params
LAST_RUN_TIME = "lastRunTime"
LAST_RUN_IDS = "ids"
LAST_RUN_FOLDER = "folderName"
ERROR_COUNTER = "errorCounter"
# headers
ITEMS_RESULTS_HEADERS = [
"sender",
"subject",
"hasAttachments",
"datetimeReceived",
"receivedBy",
"author",
"toRecipients",
"textBody",
]
UTF_8 = 'utf-8'
""" Classes """
class ProxyAdapter(requests.adapters.HTTPAdapter):
"""
Proxy Adapter used to add PROXY to requests
"""
def send(self, *args, **kwargs):
kwargs['proxies'] = handle_proxy()
return super().send(*args, **kwargs)
class InsecureProxyAdapter(NoVerifyHTTPAdapter):
"""
Insecure Proxy Adapter used to add PROXY and INSECURE to requests
NoVerifyHTTPAdapter is a built-in insecure HTTPAdapter class
"""
def send(self, *args, **kwargs):
kwargs['proxies'] = handle_proxy()
return super().send(*args, **kwargs)
class EWSClient:
def __init__(
self,
default_target_mailbox,
client_id,
client_secret,
tenant_id,
folder="Inbox",
is_public_folder=False,
request_timeout="120",
max_fetch=MAX_INCIDENTS_PER_FETCH,
self_deployed=True,
insecure=True,
proxy=False,
**kwargs,
):
"""
Client used to communicate with EWS
:param default_target_mailbox: Email address from which to fetch incidents
:param client_id: Application client ID
:param client_secret: Application client secret
:param folder: Name of the folder from which to fetch incidents
:param is_public_folder: Public Folder flag
:param request_timeout: Timeout (in seconds) for HTTP requests to Exchange Server
:param max_fetch: Max incidents per fetch
:param insecure: Trust any certificate (not secure)
"""
BaseProtocol.TIMEOUT = int(request_timeout)
self.ews_server = "https://outlook.office365.com/EWS/Exchange.asmx/"
self.ms_client = MicrosoftClient(
tenant_id=tenant_id,
auth_id=client_id,
enc_key=client_secret,
app_name=APP_NAME,
base_url=self.ews_server,
verify=not insecure,
proxy=proxy,
self_deployed=self_deployed,
scope="https://outlook.office.com/.default",
)
self.folder_name = folder
self.is_public_folder = is_public_folder
self.access_type = kwargs.get('access_type') or IMPERSONATION
self.max_fetch = min(MAX_INCIDENTS_PER_FETCH, int(max_fetch))
self.last_run_ids_queue_size = 500
self.client_id = client_id
self.client_secret = client_secret
self.account_email = default_target_mailbox
self.config = self.__prepare(insecure)
self.protocol = BaseProtocol(self.config)
def __prepare(self, insecure):
"""
Prepares the client PROTOCOL, CREDENTIALS and CONFIGURATION
:param insecure: Trust any certificate (not secure)
:return: OAuth 2 Configuration
"""
BaseProtocol.HTTP_ADAPTER_CLS = InsecureProxyAdapter if insecure else ProxyAdapter
access_token = self.ms_client.get_access_token()
oauth2_token = OAuth2Token({"access_token": access_token})
self.credentials = credentials = OAuth2AuthorizationCodeCredentials(
client_id=self.client_id,
client_secret=self.client_secret,
access_token=oauth2_token,
)
# need to add identity for protocol OAuth header
self.credentials.identity = Identity(upn=self.account_email)
config_args = {
"credentials": credentials,
"auth_type": OAUTH2,
"version": Version(EXCHANGE_O365),
"service_endpoint": "https://outlook.office365.com/EWS/Exchange.asmx",
}
return Configuration(**config_args)
def get_account(self, target_mailbox=None):
"""
Request an account from EWS
:param (Optional) target_mailbox: Mailbox associated with the requested account
:return: exchangelib Account
"""
if not target_mailbox:
target_mailbox = self.account_email
return Account(
primary_smtp_address=target_mailbox,
autodiscover=False,
config=self.config,
access_type=self.access_type,
)
def get_items_from_mailbox(self, account, item_ids):
"""
Request specific items from a mailbox associated with an account
:param account: EWS account or target_mailbox associated with that account
:param item_ids: item_ids of the requested items
:return: list of exchangelib Items
"""
# allow user to pass target_mailbox as account
if isinstance(account, str):
account = self.get_account(account)
else:
account = self.get_account(self.account_email)
if type(item_ids) is not list:
item_ids = [item_ids]
items = [Item(id=x) for x in item_ids]
result = list(account.fetch(ids=items))
result = [x for x in result if not isinstance(x, ErrorItemNotFound)]
if len(result) != len(item_ids):
raise Exception(
"One or more items were not found. Check the input item ids"
)
return result
def get_item_from_mailbox(self, account, item_id):
"""
Request a single item from a mailbox associated with an account
:param account: EWS account or target_mailbox associated with that account
:param item_id: item_id of the requested item
:return: exchangelib Item
"""
result = self.get_items_from_mailbox(account, [item_id])
if len(result) == 0:
raise Exception(f"ItemId {str(item_id)} not found")
return result[0]
def get_attachments_for_item(self, item_id, account, attachment_ids=None):
"""
Request attachments for an item
:param item_id: item_id of the item to retrieve attachments from
:param account: EWS account or target_mailbox associated with that account
:param (Optional) attachment_ids: attachment_ids: attachment_ids to retrieve
:return: list of exchangelib Item.attachments
"""
item = self.get_item_from_mailbox(account, item_id)
attachments = []
attachment_ids = argToList(attachment_ids)
if item:
if item.attachments:
for attachment in item.attachments:
if (
attachment_ids
and attachment.attachment_id.id not in attachment_ids
):
continue
attachments.append(attachment)
else:
raise Exception("Message item not found: " + item_id)
if attachment_ids and len(attachments) < len(attachment_ids):
raise Exception(
"Some attachment id did not found for message:" + str(attachment_ids)
)
return attachments
def is_default_folder(self, folder_path, is_public=None):
"""
Is the given folder_path public
:param folder_path: folder path to check if is public
:param is_public: (Optional) if provided, will return this value
:return: Boolean
"""
if is_public is not None:
return is_public
if folder_path == self.folder_name:
return self.is_public_folder
return False
def get_folder_by_path(self, path, account=None, is_public=False):
"""
Retrieve folder by path
:param path: path of the folder
:param account: account associated with the requested path
:param is_public: is the requested folder public
:return: exchangelib Folder
"""
if account is None:
account = self.get_account()
# handle exchange folder id
if len(path) == FOLDER_ID_LEN:
folders_map = account.root._folders_map
if path in folders_map:
return account.root._folders_map[path]
if is_public:
folder_result = account.public_folders_root
elif path == "AllItems":
folder_result = account.root
else:
folder_result = account.inbox.parent # Top of Information Store
path = path.replace("/", "\\")
path = path.split("\\")
for sub_folder_name in path:
folder_filter_by_name = [
x
for x in folder_result.children
if x.name.lower() == sub_folder_name.lower()
]
if len(folder_filter_by_name) == 0:
raise Exception(f"No such folder {path}")
folder_result = folder_filter_by_name[0]
return folder_result
def send_email(self, message: Message):
account = self.get_account()
message.account = account
message.send_and_save()
class MarkAsJunk(EWSAccountService):
"""
EWSAccountService class used for marking items as junk
"""
SERVICE_NAME = "MarkAsJunk"
def call(self, item_id, move_item):
elements = list(
self._get_elements(
payload=self.get_payload(item_id=item_id, move_item=move_item)
)
)
for element in elements:
if isinstance(element, ResponseMessageError):
return str(element)
return "Success"
def get_payload(self, item_id, move_item):
junk = create_element(
f"m:{self.SERVICE_NAME}",
{"IsJunk": "true", "MoveItem": "true" if move_item else "false"},
)
items_list = create_element("m:ItemIds")
item_element = create_element("t:ItemId", {"Id": item_id})
items_list.append(item_element)
junk.append(items_list)
return junk
class GetSearchableMailboxes(EWSService):
"""
EWSAccountService class used for getting Searchable Mailboxes
"""
SERVICE_NAME = "GetSearchableMailboxes"
element_container_name = f"{{{MNS}}}SearchableMailboxes"
@staticmethod
def parse_element(element):
return {
MAILBOX: element.find(f"{{{TNS}}}PrimarySmtpAddress").text
if element.find(f"{{{TNS}}}PrimarySmtpAddress") is not None
else None,
MAILBOX_ID: element.find(f"{{{TNS}}}ReferenceId").text
if element.find(f"{{{TNS}}}ReferenceId") is not None
else None,
"displayName": element.find(f"{{{TNS}}}DisplayName").text
if element.find(f"{{{TNS}}}DisplayName") is not None
else None,
"isExternal": element.find(f"{{{TNS}}}IsExternalMailbox").text
if element.find(f"{{{TNS}}}IsExternalMailbox") is not None
else None,
"externalEmailAddress": element.find(f"{{{TNS}}}ExternalEmailAddress").text
if element.find(f"{{{TNS}}}ExternalEmailAddress") is not None
else None,
}
def call(self):
elements = self._get_elements(payload=self.get_payload())
return [
self.parse_element(x)
for x in elements
if x.find(f"{{{TNS}}}ReferenceId").text
]
def get_payload(self):
element = create_element(f"m:{self.SERVICE_NAME}")
return element
class ExpandGroup(EWSService):
"""
EWSAccountService class used for expanding groups
"""
SERVICE_NAME = "ExpandDL"
element_container_name = f"{{{MNS}}}DLExpansion"
@staticmethod
def parse_element(element):
return {
MAILBOX: element.find(f"{{{TNS}}}EmailAddress").text
if element.find(f"{{{TNS}}}EmailAddress") is not None
else None,
"displayName": element.find(f"{{{TNS}}}Name").text
if element.find(f"{{{TNS}}}Name") is not None
else None,
"mailboxType": element.find(f"{{{TNS}}}MailboxType").text
if element.find(f"{{{TNS}}}MailboxType") is not None
else None,
}
def call(self, email_address, recursive_expansion=False):
try:
if recursive_expansion == "True":
group_members: Dict = {}
self.expand_group_recursive(email_address, group_members)
return list(group_members.values())
else:
return self.expand_group(email_address)
except ErrorNameResolutionNoResults:
demisto.results("No results were found.")
sys.exit()
def | (self, email_address):
element = create_element(f"m:{self.SERVICE_NAME}")
mailbox_element = create_element("m:Mailbox")
add_xml_child(mailbox_element, "t:EmailAddress", email_address)
element.append(mailbox_element)
return element
def expand_group(self, email_address):
"""
Expand given group
:param email_address: email address of the group to expand
:return: list dict with parsed expanded group data
"""
elements = self._get_elements(payload=self.get_payload(email_address))
return [self.parse_element(x) for x in elements]
def expand_group_recursive(self, email_address, non_dl_emails, dl_emails=None):
"""
Expand group recursively
:param email_address: email address of the group to expand
:param non_dl_emails: non distribution only emails
:param dl_emails: (Optional) distribution only emails
:return: Set of dl emails and non dl emails (returned via reference)
"""
if dl_emails is None:
dl_emails = set()
if email_address in non_dl_emails or email_address in dl_emails:
return None
dl_emails.add(email_address)
for member in self.expand_group(email_address):
if (
member["mailboxType"] == "PublicDL"
or member["mailboxType"] == "PrivateDL"
):
self.expand_group_recursive(member.get("mailbox"), non_dl_emails, dl_emails)
else:
if member["mailbox"] not in non_dl_emails:
non_dl_emails[member["mailbox"]] = member
# If you are modifying this probably also need to modify in other files
def exchangelib_cleanup():
key_protocols = list(exchangelib.protocol.CachingProtocol._protocol_cache.items())
try:
exchangelib.close_connections()
except Exception as ex:
demisto.error("Error was found in exchangelib cleanup, ignoring: {}".format(ex))
for key, protocol in key_protocols:
try:
if "thread_pool" in protocol.__dict__:
demisto.debug(
"terminating thread pool key{} id: {}".format(
key, id(protocol.thread_pool)
)
)
protocol.thread_pool.terminate()
del protocol.__dict__["thread_pool"]
else:
demisto.info(
"Thread pool not found (ignoring terminate) in protcol dict: {}".format(
dir(protocol.__dict__)
)
)
except Exception as ex:
demisto.error("Error with thread_pool.terminate, ignoring: {}".format(ex))
""" LOGGING """
log_stream = None
log_handler = None
def start_logging():
global log_stream
global log_handler
logging.raiseExceptions = False
if log_stream is None:
log_stream = StringIO()
log_handler = logging.StreamHandler(stream=log_stream)
log_handler.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
logger = logging.getLogger()
logger.addHandler(log_handler)
logger.setLevel(logging.DEBUG)
""" Helper Functions """
def get_attachment_name(attachment_name):
"""
Retrieve attachment name or error string if none is provided
:param attachment_name: attachment name to retrieve
:return: string
"""
if attachment_name is None or attachment_name == "":
return "demisto_untitled_attachment"
return attachment_name
def get_entry_for_object(title, context_key, obj, headers=None):
"""
Create an entry for a given object
:param title: Title of the human readable
:param context_key: Context key used for entry context
:param obj: Object to create entry for
:param headers: (Optional) headers used in the tableToMarkDown
:return: Entry object to be used with demisto.results()
"""
if len(obj) == 0:
return "There is no output results"
if headers and isinstance(obj, dict):
headers = list(set(headers).intersection(set(obj.keys())))
return {
"Type": entryTypes["note"],
"Contents": obj,
"ContentsFormat": formats["json"],
"ReadableContentsFormat": formats["markdown"],
"HumanReadable": tableToMarkdown(title, obj, headers),
"EntryContext": {context_key: obj},
}
def prepare_args(args):
"""
Prepare arguments to be used as the API expects it
:param args: demisto args
:return: transformed args
"""
args = dict((k.replace("-", "_"), v) for k, v in list(args.items()))
if "is_public" in args:
args["is_public"] = args["is_public"] == "True"
return args
def get_limited_number_of_messages_from_qs(qs, limit):
"""
Retrieve a limited number of messages from query search
:param qs: query search to execute
:param limit: limit on number of items to retrieve from search
:return: list of exchangelib.Message
"""
count = 0
results = []
for item in qs:
if count == limit:
break
if isinstance(item, Message):
count += 1
results.append(item)
return results
def keys_to_camel_case(value):
"""
Transform keys from snake to camel case (does nothing if no snakes are found)
:param value: value to transform
:return: transformed value
"""
def str_to_camel_case(snake_str):
components = snake_str.split("_")
return components[0] + "".join(x.title() for x in components[1:])
if value is None:
return None
if isinstance(value, (list, set)):
return list(map(keys_to_camel_case, value))
if isinstance(value, dict):
return dict(
(
keys_to_camel_case(k),
keys_to_camel_case(v) if isinstance(v, (list, dict)) else v,
)
for (k, v) in list(value.items())
)
return str_to_camel_case(value)
def get_last_run(client: EWSClient, last_run=None):
"""
Retrieve the last run time
:param client: EWS Client
:param last_run: (Optional) last run object
:return: last run dict
"""
if not last_run or last_run.get(LAST_RUN_FOLDER) != client.folder_name:
last_run = {
LAST_RUN_TIME: None,
LAST_RUN_FOLDER: client.folder_name,
LAST_RUN_IDS: [],
}
if LAST_RUN_TIME in last_run and last_run[LAST_RUN_TIME] is not None:
last_run[LAST_RUN_TIME] = EWSDateTime.from_string(last_run[LAST_RUN_TIME])
# In case we have existing last_run data
if last_run.get(LAST_RUN_IDS) is None:
last_run[LAST_RUN_IDS] = []
return last_run
def email_ec(item):
"""
Create entry context for an email
:param item: exchangelib.Item
:return: entry context dict
"""
return {
"CC": None
if not item.cc_recipients
else [mailbox.email_address for mailbox in item.cc_recipients],
"BCC": None
if not item.bcc_recipients
else [mailbox.email_address for mailbox in item.bcc_recipients],
"To": None
if not item.to_recipients
else [mailbox.email_address for mailbox in item.to_recipients],
"From": item.author.email_address,
"Subject": item.subject,
"Text": item.text_body,
"HTML": item.body,
"HeadersMap": {header.name: header.value for header in item.headers},
}
def parse_item_as_dict(item, email_address=None, camel_case=False, compact_fields=False):
"""
Parses an exchangelib item as a dict
:param item: exchangelib.Item to parse
:param (Optional) email_address: string mailbox
:param (Optional) camel_case: Is camel case
:param (Optional) compact_fields: Is compact fields
:return: Item as a dict
"""
def parse_object_as_dict(obj):
raw_dict = {}
if obj is not None:
for field in obj.FIELDS:
raw_dict[field.name] = getattr(obj, field.name, None)
return raw_dict
def parse_folder_as_json(folder):
raw_dict = parse_object_as_dict(folder)
if "parent_folder_id" in raw_dict:
raw_dict["parent_folder_id"] = parse_folder_as_json(
raw_dict["parent_folder_id"]
)
if "effective_rights" in raw_dict:
raw_dict["effective_rights"] = parse_object_as_dict(
raw_dict["effective_rights"]
)
return raw_dict
raw_dict = {}
for field, value in item._field_vals():
if type(value) in [str, str, int, float, bool, Body, HTMLBody, None]:
raw_dict[field] = value
raw_dict["id"] = item.id
if getattr(item, "attachments", None):
raw_dict["attachments"] = [
parse_attachment_as_dict(item.id, x) for x in item.attachments
]
for time_field in [
"datetime_sent",
"datetime_created",
"datetime_received",
"last_modified_time",
"reminder_due_by",
]:
value = getattr(item, time_field, None)
if value:
raw_dict[time_field] = value.ewsformat()
for dict_field in [
"effective_rights",
"parent_folder_id",
"conversation_id",
"author",
"extern_id",
"received_by",
"received_representing",
"reply_to",
"sender",
"folder",
]:
value = getattr(item, dict_field, None)
if value:
if isinstance(value, list):
raw_dict[dict_field] = []
for single_val in value:
raw_dict[dict_field].append(parse_object_as_dict(single_val))
else:
raw_dict[dict_field] = parse_object_as_dict(value)
for list_dict_field in ["headers", "cc_recipients", "to_recipients"]:
value = getattr(item, list_dict_field, None)
if value:
raw_dict[list_dict_field] = [parse_object_as_dict(x) for x in value]
if getattr(item, "folder", None):
raw_dict["folder"] = parse_folder_as_json(item.folder)
folder_path = (
item.folder.absolute[len(TOIS_PATH):]
if item.folder.absolute.startswith(TOIS_PATH)
else item.folder.absolute
)
raw_dict["folder_path"] = folder_path
if compact_fields:
new_dict = {}
# noinspection PyListCreation
fields_list = [
"datetime_created",
"datetime_received",
"datetime_sent",
"sender",
"has_attachments",
"importance",
"message_id",
"last_modified_time",
"size",
"subject",
"text_body",
"headers",
"body",
"folder_path",
"is_read",
]
if "id" in raw_dict:
new_dict["itemId"] = raw_dict["id"]
fields_list.append("itemId")
for field in fields_list:
if field in raw_dict:
new_dict[field] = raw_dict.get(field)
for field in ["received_by", "author", "sender"]:
if field in raw_dict:
new_dict[field] = raw_dict.get(field, {}).get("email_address")
for field in ["to_recipients"]:
if field in raw_dict:
new_dict[field] = [x.get("email_address") for x in raw_dict[field]]
attachments = raw_dict.get("attachments")
if attachments and len(attachments) > 0:
file_attachments = [
x for x in attachments if x[ATTACHMENT_TYPE] == FILE_ATTACHMENT_TYPE
]
if len(file_attachments) > 0:
new_dict["FileAttachments"] = file_attachments
item_attachments = [
x for x in attachments if x[ATTACHMENT_TYPE] == ITEM_ATTACHMENT_TYPE
]
if len(item_attachments) > 0:
new_dict["ItemAttachments"] = item_attachments
raw_dict = new_dict
if camel_case:
raw_dict = keys_to_camel_case(raw_dict)
if email_address:
raw_dict[MAILBOX] = email_address
return raw_dict
def get_entry_for_file_attachment(item_id, attachment):
"""
Creates a file entry for an attachment
:param item_id: item_id of the attachment
:param attachment: attachment dict
:return: file entry dict for attachment
"""
entry = fileResult(get_attachment_name(attachment.name), attachment.content)
entry["EntryContext"] = {
CONTEXT_UPDATE_EWS_ITEM_FOR_ATTACHMENT
+ CONTEXT_UPDATE_FILE_ATTACHMENT: parse_attachment_as_dict(item_id, attachment)
}
return entry
def parse_attachment_as_dict(item_id, attachment):
"""
Creates a note entry for an attachment
:param item_id: item_id of the attachment
:param attachment: attachment dict
:return: note entry dict for attachment
"""
try:
attachment_content = (
attachment.content
if isinstance(attachment, FileAttachment)
else attachment.item.mime_content
)
return {
ATTACHMENT_ORIGINAL_ITEM_ID: item_id,
ATTACHMENT_ID: attachment.attachment_id.id,
"attachmentName": get_attachment_name(attachment.name),
"attachmentSHA256": hashlib.sha256(attachment_content).hexdigest()
if attachment_content
else None,
"attachmentContentType": attachment.content_type,
"attachmentContentId": attachment.content_id,
"attachmentContentLocation": attachment.content_location,
"attachmentSize": attachment.size,
"attachmentLastModifiedTime": attachment.last_modified_time.ewsformat(),
"attachmentIsInline": attachment.is_inline,
ATTACHMENT_TYPE: FILE_ATTACHMENT_TYPE
if isinstance(attachment, FileAttachment)
else ITEM_ATTACHMENT_TYPE,
}
except TypeError as e:
if str(e) != "must be string or buffer, not None":
raise
return {
ATTACHMENT_ORIGINAL_ITEM_ID: item_id,
ATTACHMENT_ID: attachment.attachment_id.id,
"attachmentName": get_attachment_name(attachment.name),
"attachmentSHA256": None,
"attachmentContentType": attachment.content_type,
"attachmentContentId": attachment.content_id,
"attachmentContentLocation": attachment.content_location,
"attachmentSize": attachment.size,
"attachmentLastModifiedTime": attachment.last_modified_time.ewsformat(),
"attachmentIsInline": attachment.is_inline,
ATTACHMENT_TYPE: FILE_ATTACHMENT_TYPE
if isinstance(attachment, FileAttachment)
else ITEM_ATTACHMENT_TYPE,
}
def get_entry_for_item_attachment(item_id, attachment, target_email):
"""
Creates a note entry for an item attachment
:param item_id: Item id
:param attachment: exchangelib attachment
:param target_email: target email
:return: note entry dict for item attachment
"""
item = attachment.item
dict_result = parse_attachment_as_dict(item_id, attachment)
dict_result.update(
parse_item_as_dict(item, target_email, camel_case=True, compact_fields=True)
)
title = f'EWS get attachment got item for "{target_email}", "{get_attachment_name(attachment.name)}"'
return get_entry_for_object(
title,
CONTEXT_UPDATE_EWS_ITEM_FOR_ATTACHMENT + CONTEXT_UPDATE_ITEM_ATTACHMENT,
dict_result,
)
""" Command Functions """
def get_expanded_group(client: EWSClient, email_address, recursive_expansion=False):
"""
Retrieve expanded group command
:param client: EWS Client
:param email_address: Email address of the group to expand
:param (Optional) recursive_expansion: Whether to enable recursive expansion. Default is "False".
:return: Expanded groups output tuple
"""
group_members = ExpandGroup(protocol=client.protocol).call(
email_address, recursive_expansion
)
group_details = {"name": email_address, "members": group_members}
output = {"EWS.ExpandGroup": group_details}
readable_output = tableToMarkdown("Group Members", group_members)
return readable_output, output, group_details
def get_searchable_mailboxes(client: EWSClient):
"""
Retrieve searchable mailboxes command
:param client: EWS Client
:return: Searchable mailboxes output tuple
"""
searchable_mailboxes = GetSearchableMailboxes(protocol=client.protocol).call()
readable_output = tableToMarkdown(
"Searchable mailboxes", searchable_mailboxes, headers=["displayName", "mailbox"]
)
output = {"EWS.Mailboxes": searchable_mailboxes}
return readable_output, output, searchable_mailboxes
def delete_attachments_for_message(
client: EWSClient, item_id, target_mailbox=None, attachment_ids=None
):
"""
Deletes attachments for a given message
:param client: EWS Client
:param item_id: item id
:param (Optional) target_mailbox: target mailbox
:param (Optional) attachment_ids: attachment ids to delete
:return: entries that were delted
"""
attachments = client.get_attachments_for_item(
item_id, target_mailbox, attachment_ids
)
deleted_file_attachments = []
deleted_item_attachments = [] # type: ignore
for attachment in attachments:
attachment_deleted_action = {
ATTACHMENT_ID: attachment.attachment_id.id,
ACTION: "deleted",
}
if isinstance(attachment, FileAttachment):
deleted_file_attachments.append(attachment_deleted_action)
else:
deleted_item_attachments.append(attachment_deleted_action)
attachment.detach()
entries = []
if len(deleted_file_attachments) > 0:
entry = get_entry_for_object(
"Deleted file attachments",
"EWS.Items" + CONTEXT_UPDATE_FILE_ATTACHMENT,
deleted_file_attachments,
)
entries.append(entry)
if len(deleted_item_attachments) > 0:
entry = get_entry_for_object(
"Deleted item attachments",
"EWS.Items" + CONTEXT_UPDATE_ITEM_ATTACHMENT,
deleted_item_attachments,
)
entries.append(entry)
return entries
def fetch_attachments_for_message(
client: EWSClient, item_id, target_mailbox=None, attachment_ids=None
):
"""
Fetches attachments for a message
:param client: EWS Client
:param item_id: item id
:param (Optional) target_mailbox: target mailbox
:param (Optional) attachment_ids: attachment ids
:return: list of parsed entries
"""
account = client.get_account(target_mailbox)
attachments = client.get_attachments_for_item(item_id, account, attachment_ids)
entries = []
for attachment in attachments:
if isinstance(attachment, FileAttachment):
try:
if attachment.content:
entries.append(get_entry_for_file_attachment(item_id, attachment))
except TypeError as e:
if str(e) != "must be string or buffer, not None":
raise
else:
entries.append(
get_entry_for_item_attachment(
item_id, attachment, account.primary_smtp_address
)
)
if attachment.item.mime_content:
entries.append(
fileResult(
get_attachment_name(attachment.name) + ".eml",
attachment.item.mime_content,
)
)
return entries
def move_item_between_mailboxes(
client: EWSClient,
item_id,
destination_mailbox,
destination_folder_path,
source_mailbox=None,
is_public=None,
):
"""
Moves item between mailboxes
:param client: EWS Client
:param item_id: item id
:param destination_mailbox: destination mailbox
:param destination_folder_path: destination folder path
:param (Optional) source_mailbox: source mailbox
:param (Optional) is_public: is the destination folder public
:return: Output tuple
"""
source_account = client.get_account(source_mailbox)
destination_account = client.get_account(destination_mailbox)
is_public = client.is_default_folder(destination_folder_path, is_public)
destination_folder = client.get_folder_by_path(
destination_folder_path, destination_account, is_public
)
item = client.get_item_from_mailbox(source_account, item_id)
exported_items = source_account.export([item])
destination_account.upload([(destination_folder, exported_items[0])])
source_account.bulk_delete([item])
move_result = {
MOVED_TO_MAILBOX: destination_mailbox,
MOVED_TO_FOLDER: destination_folder_path,
}
readable_output = "Item was moved successfully."
output = {f"EWS.Items(val.itemId === '{item_id}')": move_result}
return readable_output, output, move_result
def move_item(
client: EWSClient, item_id, target_folder_path, target_mailbox=None, is_public=None
):
"""
Moves an item within the same mailbox
:param client: EWS Client
:param item_id: item id
:param target_folder_path: target folder path
:param (Optional) target_mailbox: mailbox containing the item
:param (Optional) is_public: is the destination folder public
:return: Output tuple
"""
account = client.get_account(target_mailbox)
is_public = client.is_default_folder(target_folder_path, is_public)
target_folder = client.get_folder_by_path(target_folder_path, is_public=is_public)
item = client.get_item_from_mailbox(account, item_id)
if isinstance(item, ErrorInvalidIdMalformed):
raise Exception("Item not found")
item.move(target_folder)
move_result = {
NEW_ITEM_ID: item.id,
ITEM_ID: item_id,
MESSAGE_ID: item.message_id,
ACTION: "moved",
}
readable_output = tableToMarkdown("Moved items", move_result)
output = {CONTEXT_UPDATE_EWS_ITEM: move_result}
return readable_output, output, move_result
def delete_items(client: EWSClient, item_ids, delete_type, target_mailbox=None):
"""
Delete items in a mailbox
:param client: EWS Client
:param item_ids: items ids to delete
:param delete_type: delte type soft/hard
:param (Optional) target_mailbox: mailbox containinf the items
:return: Output tuple
"""
deleted_items = []
item_ids = argToList(item_ids)
items = client.get_items_from_mailbox(target_mailbox, item_ids)
delete_type = delete_type.lower()
for item in items:
item_id = item.id
if delete_type == "trash":
item.move_to_trash()
elif delete_type == "soft":
item.soft_delete()
elif delete_type == "hard":
item.delete()
else:
raise Exception(
f'invalid delete type: {delete_type}. Use "trash" \\ "soft" \\ "hard"'
)
deleted_items.append(
{
ITEM_ID: item_id,
MESSAGE_ID: item.message_id,
ACTION: f"{delete_type}-deleted",
}
)
readable_output = tableToMarkdown(
f"Deleted items ({delete_type} delete type)", deleted_items
)
output = {CONTEXT_UPDATE_EWS_ITEM: deleted_items}
return readable_output, output, deleted_items
def search_items_in_mailbox(
client: EWSClient,
query=None,
message_id=None,
folder_path="",
limit=100,
target_mailbox=None,
is_public=None,
selected_fields="all",
):
"""
Search items in mailbox
:param client: EWS Client
:param (Optional) query: query to execute
:param (Optional) message_id: message ids to search
:param (Optional) folder_path: folder path to search
:param (Optional) limit: max amount of items to fetch
:param (Optional) target_mailbox: mailbox containing the items
:param (Optional) is_public: is the targeted folder public
:param (Optional) selected_fields: Selected fields
:return: Output tuple
"""
if not query and not message_id:
return_error("Missing required argument. Provide query or message-id")
if message_id and message_id[0] != "<" and message_id[-1] != ">":
message_id = "<{}>".format(message_id)
account = client.get_account(target_mailbox)
limit = int(limit)
if folder_path.lower() == "inbox":
folders = [account.inbox]
elif folder_path:
is_public = client.is_default_folder(folder_path, is_public)
folders = [client.get_folder_by_path(folder_path, account, is_public)]
else:
folders = account.inbox.parent.walk() # pylint: disable=E1101
items = [] # type: ignore
selected_all_fields = selected_fields == "all"
if selected_all_fields:
restricted_fields = list([x.name for x in Message.FIELDS]) # type: ignore
else:
restricted_fields = set(argToList(selected_fields)) # type: ignore
restricted_fields.update(["id", "message_id"]) # type: ignore
for folder in folders:
if Message not in folder.supported_item_models:
continue
if query:
items_qs = folder.filter(query).only(*restricted_fields)
else:
items_qs = folder.filter(message_id=message_id).only(*restricted_fields)
items += get_limited_number_of_messages_from_qs(items_qs, limit)
if len(items) >= limit:
break
items = items[:limit]
searched_items_result = [
parse_item_as_dict(
item,
account.primary_smtp_address,
camel_case=True,
compact_fields=selected_all_fields,
)
for item in items
]
if not selected_all_fields:
searched_items_result = [
{k: v for (k, v) in i.items() if k in keys_to_camel_case(restricted_fields)}
for i in searched_items_result
]
for item in searched_items_result:
item["itemId"] = item.pop("id", "")
readable_output = tableToMarkdown(
"Searched items",
searched_items_result,
headers=ITEMS_RESULTS_HEADERS if selected_all_fields else None,
)
output = {CONTEXT_UPDATE_EWS_ITEM: searched_items_result}
return readable_output, output, searched_items_result
def get_out_of_office_state(client: EWSClient, target_mailbox=None):
"""
Retrieve get out of office state of the targeted mailbox
:param client: EWS Client
:param (Optional) target_mailbox: target mailbox
:return: Output tuple
"""
account = client.get_account(target_mailbox)
oof = account.oof_settings
oof_dict = {
"state": oof.state, # pylint: disable=E1101
"externalAudience": getattr(oof, "external_audience", None),
"start": oof.start.ewsformat() if oof.start else None, # pylint: disable=E1101
"end": oof.end.ewsformat() if oof.end else None, # pylint: disable=E1101
"internalReply": getattr(oof, "internal_replay", None),
"externalReply": getattr(oof, "external_replay", None),
MAILBOX: account.primary_smtp_address,
}
readable_output = tableToMarkdown(
f"Out of office state for {account.primary_smtp_address}", oof_dict
)
output = {f"Account.Email(val.Address == obj.{MAILBOX}).OutOfOffice": oof_dict}
return readable_output, output, oof_dict
def recover_soft_delete_item(
client: EWSClient,
message_ids,
target_folder_path="Inbox",
target_mailbox=None,
is_public=None,
):
"""
Recovers soft deleted items
:param client: EWS Client
:param message_ids: Message ids to recover
:param (Optional) target_folder_path: target folder path
:param (Optional) target_mailbox: target mailbox
:param (Optional) is_public: is the target folder public
:return:
"""
account = client.get_account(target_mailbox)
is_public = client.is_default_folder(target_folder_path, is_public)
target_folder = client.get_folder_by_path(target_folder_path, account, is_public)
recovered_messages = []
message_ids = argToList(message_ids)
items_to_recover = account.recoverable_items_deletions.filter( # pylint: disable=E1101
message_id__in=message_ids
).all() # pylint: disable=E1101
recovered_items = set()
for item in items_to_recover:
recovered_items.add(item)
if len(recovered_items) != len(message_ids):
missing_items = set(message_ids).difference(recovered_items)
raise Exception(
f"Some message ids are missing in recoverable items directory: {missing_items}"
)
for item in recovered_items:
item.move(target_folder)
recovered_messages.append(
{ITEM_ID: item.id, MESSAGE_ID: item.message_id, ACTION: "recovered"}
)
readable_output = tableToMarkdown("Recovered messages", recovered_messages)
output = {CONTEXT_UPDATE_EWS_ITEM: recovered_messages}
return readable_output, output, recovered_messages
def get_contacts(client: EWSClient, limit, target_mailbox=None):
"""
Retrieve contacts of the target mailbox or client mailbox
:param client: EWS Client
:param limit: max amount of contacts to retrieve
:param (Optional) target_mailbox: Target mailbox
:return:
"""
def parse_physical_address(address):
result = {}
for attr in ["city", "country", "label", "state", "street", "zipcode"]:
result[attr] = getattr(address, attr, None)
return result
def parse_phone_number(phone_number):
result = {}
for attr in ["label", "phone_number"]:
result[attr] = getattr(phone_number, attr, None)
return result
def parse_contact(contact):
contact_dict = dict(
(k, v if not isinstance(v, EWSDateTime) else v.ewsformat())
for k, v in list(contact._field_vals())
if isinstance(v, str) or isinstance(v, EWSDateTime)
)
if isinstance(contact, Contact) and contact.physical_addresses:
contact_dict["physical_addresses"] = list(
map(parse_physical_address, contact.physical_addresses)
)
if isinstance(contact, Contact) and contact.phone_numbers:
contact_dict["phone_numbers"] = list(
map(parse_phone_number, contact.phone_numbers)
)
if (
isinstance(contact, Contact)
and contact.email_addresses
and len(contact.email_addresses) > 0
):
contact_dict["emailAddresses"] = [x.email for x in contact.email_addresses]
contact_dict = keys_to_camel_case(contact_dict)
contact_dict = dict((k, v) for k, v in list(contact_dict.items()) if v)
contact_dict.pop("mimeContent", None)
contact_dict["originMailbox"] = target_mailbox
return contact_dict
account = client.get_account(target_mailbox)
contacts = []
for contact in account.contacts.all()[: int(limit)]: # pylint: disable=E1101
contacts.append(parse_contact(contact))
readable_output = tableToMarkdown(f"Email contacts for {target_mailbox}", contacts)
output = {"Account.Email(val.Address == obj.originMailbox).EwsContacts": contacts}
return readable_output, output, contacts
def create_folder(client: EWSClient, new_folder_name, folder_path, target_mailbox=None):
"""
Creates a folder in the target mailbox or the client mailbox
:param client: EWS Client
:param new_folder_name: new folder name
:param folder_path: path of the new folder
:param (Optional) target_mailbox: target mailbox
:return: Output tuple
"""
account = client.get_account(target_mailbox)
full_path = os.path.join(folder_path, new_folder_name)
try:
if client.get_folder_by_path(full_path, account):
return f"Folder {full_path} already exists",
except Exception:
pass
parent_folder = client.get_folder_by_path(folder_path, account)
f = Folder(parent=parent_folder, name=new_folder_name)
f.save()
client.get_folder_by_path(full_path, account)
return f"Folder {full_path} created successfully",
def find_folders(client: EWSClient, target_mailbox=None):
"""
Finds folders in the mailbox
:param client: EWS Client
:param (Optional) target_mailbox: target mailbox
:return: Output tuple
"""
account = client.get_account(target_mailbox)
root = account.root
if client.is_public_folder:
root = account.public_folders_root
folders = []
for f in root.walk(): # pylint: disable=E1101
folder = folder_to_context_entry(f)
folders.append(folder)
folders_tree = root.tree() # pylint: disable=E1101
readable_output = folders_tree
output = {"EWS.Folders(val.id == obj.id)": folders}
return readable_output, output, folders
def mark_item_as_junk(client: EWSClient, item_id, move_items, target_mailbox=None):
"""
Marks item as junk in the target mailbox or client mailbox
:param client: EWS Client
:param item_id: item ids to mark as junk
:param move_items: "yes" or "no" - to move or not to move to trash
:param (Optional) target_mailbox: target mailbox
:return:
"""
account = client.get_account(target_mailbox)
move_items = move_items.lower() == "yes"
ews_result = MarkAsJunk(account=account).call(item_id=item_id, move_item=move_items)
mark_as_junk_result = {
ITEM_ID: item_id,
}
if ews_result == "Success":
mark_as_junk_result[ACTION] = "marked-as-junk"
else:
raise Exception("Failed mark-item-as-junk with error: " + ews_result)
readable_output = tableToMarkdown("Mark item as junk", mark_as_junk_result)
output = {CONTEXT_UPDATE_EWS_ITEM: mark_as_junk_result}
return readable_output, output, mark_as_junk_result
def get_items_from_folder(
client: EWSClient,
folder_path,
limit=100,
target_mailbox=None,
is_public=None,
get_internal_item="no",
):
"""
Retrieve items from folder path
:param client: EWS Client
:param folder_path: folder path
:param (Optional) limit: max amount of items to retrieve
:param (Optional) target_mailbox: target mailbox
:param (Optional) is_public: is the folder public
:param (Optional) get_internal_item: should also retrieve internal items ("no" by default)
:return: Output tuple
"""
account = client.get_account(target_mailbox)
limit = int(limit)
get_internal_item = get_internal_item == "yes"
is_public = client.is_default_folder(folder_path, is_public)
folder = client.get_folder_by_path(folder_path, account, is_public)
qs = folder.filter().order_by("-datetime_created")[:limit]
items = get_limited_number_of_messages_from_qs(qs, limit)
items_result = []
for item in items:
item_attachment = parse_item_as_dict(
item, account.primary_smtp_address, camel_case=True, compact_fields=True
)
for attachment in item.attachments:
if (
get_internal_item
and isinstance(attachment, ItemAttachment)
and isinstance(attachment.item, Message)
):
# if found item attachment - switch item to the attchment
item_attachment = parse_item_as_dict(
attachment.item,
account.primary_smtp_address,
camel_case=True,
compact_fields=True,
)
break
items_result.append(item_attachment)
hm_headers = [
"sender",
"subject",
"hasAttachments",
"datetimeReceived",
"receivedBy",
"author",
"toRecipients",
"id",
]
readable_output = tableToMarkdown(
"Items in folder " + folder_path, items_result, headers=hm_headers
)
output = {CONTEXT_UPDATE_EWS_ITEM: items_result}
return readable_output, output, items_result
def get_items(client: EWSClient, item_ids, target_mailbox=None):
"""
Get items from target mailbox or client mailbox
:param client: EWS Client
:param item_ids: item ids to retrieve
:param (Optional) target_mailbox: target mailbox to retrieve items from
:return:
"""
item_ids = argToList(item_ids)
account = client.get_account(target_mailbox)
items = client.get_items_from_mailbox(account, item_ids)
items = [x for x in items if isinstance(x, Message)]
items_as_incidents = [parse_incident_from_item(x) for x in items]
items_to_context = [
parse_item_as_dict(x, account.primary_smtp_address, True, True) for x in items
]
readable_output = tableToMarkdown(
"Get items", items_to_context, ITEMS_RESULTS_HEADERS
)
output = {
CONTEXT_UPDATE_EWS_ITEM: items_to_context,
"Email": [email_ec(item) for item in items],
}
return readable_output, output, items_as_incidents
def get_folder(client: EWSClient, folder_path, target_mailbox=None, is_public=None):
"""
Retrieve a folder from the target mailbox or client mailbox
:param client: EWS Client
:param folder_path: folder path to retrieve
:param (Optional) target_mailbox: target mailbox
:param (Optional) is_public: is the folder public
:return:
"""
account = client.get_account(target_mailbox)
is_public = client.is_default_folder(folder_path, is_public)
folder = folder_to_context_entry(
client.get_folder_by_path(folder_path, account=account, is_public=is_public)
)
readable_output = tableToMarkdown(f"Folder {folder_path}", folder)
output = {CONTEXT_UPDATE_FOLDER: folder}
return readable_output, output, folder
def folder_to_context_entry(f):
"""
Create a context entry from a folder response
:param f: folder response
:return: dict context entry
"""
try:
f_entry = {
"name": f.name,
"totalCount": f.total_count,
"id": f.id,
"childrenFolderCount": f.child_folder_count,
"changeKey": f.changekey,
}
if "unread_count" in [x.name for x in Folder.FIELDS]:
f_entry["unreadCount"] = f.unread_count
return f_entry
except AttributeError:
if isinstance(f, dict):
return {
"name": f.get("name"),
"totalCount": f.get("total_count"),
"id": f.get("id"),
"childrenFolderCount": f.get("child_folder_count"),
"changeKey": f.get("changekey"),
"unreadCount": f.get("unread_count"),
}
def mark_item_as_read(
client: EWSClient, item_ids, operation="read", target_mailbox=None
):
"""
Marks item as read
:param client: EWS Client
:param item_ids: items ids to mark as read
:param (Optional) operation: operation to execute
:param (Optional) target_mailbox: target mailbox
:return: Output tuple
"""
marked_items = []
item_ids = argToList(item_ids)
items = client.get_items_from_mailbox(target_mailbox, item_ids)
items = [x for x in items if isinstance(x, Message)]
for item in items:
item.is_read = operation == "read"
item.save()
marked_items.append(
{
ITEM_ID: item.id,
MESSAGE_ID: item.message_id,
ACTION: "marked-as-{}".format(operation),
}
)
readable_output = tableToMarkdown(
f"Marked items ({operation} marked operation)", marked_items
)
output = {CONTEXT_UPDATE_EWS_ITEM: marked_items}
return readable_output, output, marked_items
def random_word_generator(length):
"""Generate a random string of given length
"""
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(length))
def handle_html(html_body):
"""
Extract all data-url content from within the html and return as separate attachments.
Due to security implications, we support only images here
We might not have Beautiful Soup so just do regex search
"""
attachments = []
clean_body = ''
last_index = 0
for i, m in enumerate(
re.finditer(r'<img.+?src=\"(data:(image\/.+?);base64,([a-zA-Z0-9+/=\r\n]+?))\"', html_body, re.I)):
attachment = {
'data': base64.b64decode(m.group(3)),
'name': f'image{i}'
}
attachment['cid'] = f'{attachment["name"]}@{random_word_generator(8)}.{random_word_generator(8)}'
attachments.append(attachment)
clean_body += html_body[last_index:m.start(1)] + 'cid:' + attachment['cid']
last_index = m.end() - 1
clean_body += html_body[last_index:]
return clean_body, attachments
def collect_manual_attachments(manualAttachObj):
"""Collect all manual attachments' data
Args:
manualAttachObj (str): String representation of the manually attached files list.
Returns:
List[Dict]. List of the files data.
"""
manually_attached_objects = argToList(manualAttachObj)
attachments = []
for attachment in manually_attached_objects:
file_res = demisto.getFilePath(os.path.basename(attachment['RealFileName']))
path = file_res['path']
with open(path, 'rb') as fp:
data = fp.read()
attachments.append({
'name': attachment['FileName'],
'data': data,
'cid': ''
})
return attachments
def collect_attachments(attachments_ids, attachments_cids, attachments_names):
"""Collect all attachments' data
Args:
attachments_ids (str): String representation of the files ids list.
attachments_cids (str): String representation of the files content ids list.
attachments_names (str): String representation of the files names list.
Returns:
List[Dict]. List of the files data.
"""
attachments = []
files_ids = argToList(attachments_ids)
files_cids = argToList(attachments_cids)
files_names = argToList(attachments_names)
for index, file_id in enumerate(files_ids):
try:
file_res = demisto.getFilePath(file_id)
path = file_res['path']
if len(files_names) > index and files_names[index]:
filename = files_names[index]
else:
filename = file_res['name']
if len(files_cids) > index and files_cids[index]:
cid = files_cids[index]
else:
cid = ''
with open(path, 'rb') as fp:
data = fp.read()
attachments.append({
'name': filename,
'data': data,
'cid': cid
})
except Exception as e:
demisto.error(f'Invalid entry {file_id} with exception: {e}')
return_error(f'Entry {file_id} is not valid or is not a file entry')
return attachments
def handle_transient_files(transient_files, transient_files_contents, transient_files_cids):
"""Creates the transient attachments data
Args:
transient_files (str): String representation of the transient files names list.
transient_files_contents (str): String representation of the transient files content list.
transient_files_cids (str): String representation of the transient files content ids list.
Returns:
List[Dict]. List of the transient files data.
"""
transient_attachments = []
files_names = argToList(transient_files)
files_contents = argToList(transient_files_contents)
files_cids = argToList(transient_files_cids)
for index in range(len(files_names)):
file_name = files_names[index]
if index >= len(files_contents):
break
file_content = bytes(files_contents[index], UTF_8)
if index >= len(files_cids):
file_cid = ''
else:
file_cid = files_cids[index]
transient_attachments.append({
'name': file_name,
'data': file_content,
'cid': file_cid
})
return transient_attachments
def handle_template_params(template_params):
"""Translates the template params if they exist from the context
Args:
template_params (str): JSON string that represent the variables names to be replaced and the desired value.
Value can be either real value or context key to fetch the value from.
Returns:
Dict. `variable_name: value_to_use` of the templated parameters.
"""
actual_params = {}
if template_params:
try:
params = json.loads(template_params)
for p in params:
if params[p].get('value'):
actual_params[p] = params[p]['value']
elif params[p].get('key'):
actual_params[p] = demisto.dt(demisto.context(), params[p]['key'])
except ValueError as e:
return_error('Unable to parse template_params: %s' % (str(e)))
return actual_params
def create_message_object(to, cc, bcc, subject, body, additional_headers):
"""Creates the message object according to the existence of additional custom headers.
"""
if additional_headers:
return Message(
to_recipients=to,
cc_recipients=cc,
bcc_recipients=bcc,
subject=subject,
body=body,
**additional_headers
)
return Message(
to_recipients=to,
cc_recipients=cc,
bcc_recipients=bcc,
subject=subject,
body=body
)
def create_message(to, subject='', body='', bcc=None, cc=None, html_body=None, attachments=None,
additional_headers=None):
"""Creates the Message object that will be sent.
Args:
to (list): Main recipients.
cc (list): CC recipients.
bcc (list): BCC recipients.
subject (str): Email's subject.
body (str): Email's simple text body.
html_body (str): Email's html body.
attachments (list): Files to be attached to the mail, both inline and as files.
additional_headers (Dict): Custom headers to be added to the message.
Returns:
Message. Message object ready to be sent.
"""
if not html_body:
# This is a simple text message - we cannot have CIDs here
message = create_message_object(to, cc, bcc, subject, body, additional_headers)
for attachment in attachments:
if not attachment.get('cid'):
new_attachment = FileAttachment(name=attachment.get('name'), content=attachment.get('data'))
message.attach(new_attachment)
else:
html_body, html_attachments = handle_html(html_body)
attachments += html_attachments
message = create_message_object(to, cc, bcc, subject, HTMLBody(html_body), additional_headers)
for attachment in attachments:
if not attachment.get('cid'):
new_attachment = FileAttachment(name=attachment.get('name'), content=attachment.get('data'))
else:
new_attachment = FileAttachment(name=attachment.get('name'), content=attachment.get('data'),
is_inline=True, content_id=attachment.get('cid'))
message.attach(new_attachment)
return message
def add_additional_headers(additional_headers):
"""Adds custom headers to the Message object
Args:
additional_headers (str): Headers list as string. Example: headerName1=headerValue1,headerName2=headerValue2
Returns:
Dict. Headers dictionary in the form of: `header_name: header value`
"""
headers = dict()
for header in argToList(additional_headers):
header_name, header_value = header.split('=', 1)
class TempClass(ExtendedProperty):
distinguished_property_set_id = 'InternetHeaders'
property_name = header_name
property_type = 'String'
try:
Message.register(header_name, TempClass)
headers[header_name] = header_value
except ValueError as e:
demisto.debug('EWSO365 - Header ' + header_name + ' could not be registered. ' + str(e))
return headers
def send_email(client: EWSClient, to, subject='', body="", bcc=None, cc=None, htmlBody=None,
attachIDs="", attachCIDs="", attachNames="", manualAttachObj=None,
transientFile=None, transientFileContent=None, transientFileCID=None, templateParams=None,
additionalHeader=None, raw_message=None):
to = argToList(to)
cc = argToList(cc)
bcc = argToList(bcc)
# Basic validation - we allow pretty much everything but you have to have at least a recipient
# We allow messages without subject and also without body
if not to and not cc and not bcc:
return_error('You must have at least one recipient')
if raw_message:
message = Message(
to_recipients=to,
cc_recipients=cc,
bcc_recipients=bcc,
body=raw_message
)
else:
if additionalHeader:
additionalHeader = add_additional_headers(additionalHeader)
# collect all types of attachments
attachments = collect_attachments(attachIDs, attachCIDs, attachNames)
attachments.extend(collect_manual_attachments(manualAttachObj))
attachments.extend(handle_transient_files(transientFile, transientFileContent, transientFileCID))
# update body and html_body with the templated params, if exists
template_params = handle_template_params(templateParams)
if template_params:
body = body.format(**template_params)
if htmlBody:
htmlBody = htmlBody.format(**template_params)
message = create_message(to, subject, body, bcc, cc, htmlBody, attachments, additionalHeader)
client.send_email(message)
return 'Mail sent successfully', {}, {}
def get_item_as_eml(client: EWSClient, item_id, target_mailbox=None):
"""
Retrieve item as an eml
:param client: EWS Client
:param item_id: Item id to retrieve
:param (Optional) target_mailbox: target mailbox
:return: Output tuple
"""
account = client.get_account(target_mailbox)
item = client.get_item_from_mailbox(account, item_id)
if item.mime_content:
mime_content = item.mime_content
if isinstance(mime_content, bytes):
email_content = email.message_from_bytes(mime_content)
else:
email_content = email.message_from_string(mime_content)
if item.headers:
attached_email_headers = [
(h, " ".join(map(str.strip, v.split("\r\n"))))
for (h, v) in list(email_content.items())
]
for header in item.headers:
if (
header.name,
header.value,
) not in attached_email_headers and header.name != "Content-Type":
email_content.add_header(header.name, header.value)
eml_name = item.subject if item.subject else "demisto_untitled_eml"
file_result = fileResult(eml_name + ".eml", email_content.as_string())
file_result = (
file_result if file_result else "Failed uploading eml file to war room"
)
return file_result
def parse_incident_from_item(item):
"""
Parses an incident from an item
:param item: item to parse
:return: Parsed item
"""
incident = {}
labels = []
try:
incident["details"] = item.text_body or item.body
except AttributeError:
incident["details"] = item.body
incident["name"] = item.subject
labels.append({"type": "Email/subject", "value": item.subject})
incident["occurred"] = item.datetime_created.ewsformat()
# handle recipients
if item.to_recipients:
for recipient in item.to_recipients:
labels.append({"type": "Email", "value": recipient.email_address})
# handle cc
if item.cc_recipients:
for recipient in item.cc_recipients:
labels.append({"type": "Email/cc", "value": recipient.email_address})
# handle email from
if item.sender:
labels.append({"type": "Email/from", "value": item.sender.email_address})
# email format
email_format = ""
try:
if item.text_body:
labels.append({"type": "Email/text", "value": item.text_body})
email_format = "text"
except AttributeError:
pass
if item.body:
labels.append({"type": "Email/html", "value": item.body})
email_format = "HTML"
labels.append({"type": "Email/format", "value": email_format})
# handle attachments
if item.attachments:
incident["attachment"] = []
for attachment in item.attachments:
file_result = None
label_attachment_type = None
label_attachment_id_type = None
if isinstance(attachment, FileAttachment):
try:
if attachment.content:
# file attachment
label_attachment_type = "attachments"
label_attachment_id_type = "attachmentId"
# save the attachment
file_name = get_attachment_name(attachment.name)
file_result = fileResult(file_name, attachment.content)
# check for error
if file_result["Type"] == entryTypes["error"]:
demisto.error(file_result["Contents"])
raise Exception(file_result["Contents"])
# save attachment to incident
incident["attachment"].append(
{
"path": file_result["FileID"],
"name": get_attachment_name(attachment.name),
}
)
except TypeError as e:
if str(e) != "must be string or buffer, not None":
raise
continue
else:
# other item attachment
label_attachment_type = "attachmentItems"
label_attachment_id_type = "attachmentItemsId"
# save the attachment
if attachment.item.mime_content:
mime_content = attachment.item.mime_content
attached_email = email.message_from_bytes(mime_content) if isinstance(mime_content, bytes) \
else email.message_from_string(mime_content)
if attachment.item.headers:
attached_email_headers = [
(h, " ".join(map(str.strip, v.split("\r\n"))))
for (h, v) in list(attached_email.items())
]
for header in attachment.item.headers:
if (
(header.name, header.value)
not in attached_email_headers
and header.name != "Content-Type"
):
attached_email.add_header(header.name, header.value)
file_result = fileResult(
get_attachment_name(attachment.name) + ".eml",
attached_email.as_bytes().decode('utf-8'),
)
if file_result:
# check for error
if file_result["Type"] == entryTypes["error"]:
demisto.error(file_result["Contents"])
raise Exception(file_result["Contents"])
# save attachment to incident
incident["attachment"].append(
{
"path": file_result["FileID"],
"name": get_attachment_name(attachment.name) + ".eml",
}
)
labels.append(
{
"type": label_attachment_type,
"value": get_attachment_name(attachment.name),
}
)
labels.append(
{"type": label_attachment_id_type, "value": attachment.attachment_id.id}
)
# handle headers
if item.headers:
headers = []
for header in item.headers:
labels.append(
{
"type": "Email/Header/{}".format(header.name),
"value": str(header.value),
}
)
headers.append("{}: {}".format(header.name, header.value))
labels.append({"type": "Email/headers", "value": "\r\n".join(headers)})
# handle item id
if item.message_id:
labels.append({"type": "Email/MessageId", "value": str(item.message_id)})
if item.id:
labels.append({"type": "Email/ID", "value": item.id})
labels.append({"type": "Email/itemId", "value": item.id})
# handle conversion id
if item.conversation_id:
labels.append({"type": "Email/ConversionID", "value": item.conversation_id.id})
incident["labels"] = labels
incident["rawJSON"] = json.dumps(parse_item_as_dict(item, None), ensure_ascii=False)
return incident
def fetch_emails_as_incidents(client: EWSClient, last_run):
"""
Fetch incidents
:param client: EWS Client
:param last_run: last run dict
:return:
"""
last_run = get_last_run(client, last_run)
try:
last_emails = fetch_last_emails(
client,
client.folder_name,
last_run.get(LAST_RUN_TIME),
last_run.get(LAST_RUN_IDS),
)
ids = deque(
last_run.get(LAST_RUN_IDS, []), maxlen=client.last_run_ids_queue_size
)
incidents = []
incident: Dict[str, str] = {}
for item in last_emails:
if item.message_id:
ids.append(item.message_id)
incident = parse_incident_from_item(item)
incidents.append(incident)
if len(incidents) >= client.max_fetch:
break
last_run_time = incident.get("occurred", last_run.get(LAST_RUN_TIME))
if isinstance(last_run_time, EWSDateTime):
last_run_time = last_run_time.ewsformat()
new_last_run = {
LAST_RUN_TIME: last_run_time,
LAST_RUN_FOLDER: client.folder_name,
LAST_RUN_IDS: list(ids),
ERROR_COUNTER: 0,
}
demisto.setLastRun(new_last_run)
return incidents
except RateLimitError:
if LAST_RUN_TIME in last_run:
last_run[LAST_RUN_TIME] = last_run[LAST_RUN_TIME].ewsformat()
if ERROR_COUNTER not in last_run:
last_run[ERROR_COUNTER] = 0
last_run[ERROR_COUNTER] += 1
demisto.setLastRun(last_run)
if last_run[ERROR_COUNTER] > 2:
raise
return []
def fetch_last_emails(
client: EWSClient, folder_name="Inbox", since_datetime=None, exclude_ids=None
):
"""
Fetches last emails
:param client: EWS client
:param (Optional) folder_name: folder name to pull from
:param (Optional) since_datetime: items will be searched after this datetime
:param (Optional) exclude_ids: exclude ids from fetch
:return: list of exchangelib.Items
"""
qs = client.get_folder_by_path(folder_name, is_public=client.is_public_folder)
if since_datetime:
qs = qs.filter(datetime_received__gte=since_datetime)
else:
last_10_min = EWSDateTime.now(tz=EWSTimeZone.timezone("UTC")) - timedelta(
minutes=10
)
qs = qs.filter(last_modified_time__gte=last_10_min)
qs = qs.filter().only(*[x.name for x in Message.FIELDS])
qs = qs.filter().order_by("datetime_received")
result = qs.all()
result = [x for x in result if isinstance(x, Message)]
if exclude_ids and len(exclude_ids) > 0:
exclude_ids = set(exclude_ids)
result = [x for x in result if x.message_id not in exclude_ids]
return result
def test_module(client: EWSClient, max_fetch):
"""
test-module
* Max incidents per fetch <= MAX_INCIDENTS_PER_FETCH
* Account can be retrieved
* Account has read rights
* Test access to fetch folder
:param client: EWS Client
:param max_fetch: Max fetches per incident
:return: "ok"
"""
try:
if int(max_fetch) > MAX_INCIDENTS_PER_FETCH:
return_error(f'Error - Max incidents per fetch cannot be greater than {MAX_INCIDENTS_PER_FETCH}. '
f'You provided: {max_fetch}')
account = client.get_account()
if not account.root.effective_rights.read: # pylint: disable=E1101
raise Exception(
"Success to authenticate, but user has no permissions to read from the mailbox. "
"Need to delegate the user permissions to the mailbox - "
"please read integration documentation and follow the instructions"
)
client.get_folder_by_path(
client.folder_name, account, client.is_public_folder
).test_access()
except ErrorFolderNotFound as e:
if "Top of Information Store" in str(e):
raise Exception(
"Success to authenticate, but user probably has no permissions to read from the specific folder."
"Check user permissions. You can try !ews-find-folders command to "
"get all the folders structure that the user has permissions to"
)
return "ok"
def sub_main():
is_test_module = False
params = demisto.params()
args = prepare_args(demisto.args())
# client's default_target_mailbox is the authorization source for the instance
params['default_target_mailbox'] = args.get('target_mailbox',
args.get('source_mailbox', params['default_target_mailbox']))
client = EWSClient(**params)
start_logging()
try:
command = demisto.command()
# commands that return a single note result
normal_commands = {
"ews-get-searchable-mailboxes": get_searchable_mailboxes,
"ews-move-item-between-mailboxes": move_item_between_mailboxes,
"ews-move-item": move_item,
"ews-delete-items": delete_items,
"ews-search-mailbox": search_items_in_mailbox,
"ews-get-contacts": get_contacts,
"ews-get-out-of-office": get_out_of_office_state,
"ews-recover-messages": recover_soft_delete_item,
"ews-create-folder": create_folder,
"ews-mark-item-as-junk": mark_item_as_junk,
"ews-find-folders": find_folders,
"ews-get-items-from-folder": get_items_from_folder,
"ews-get-items": get_items,
"ews-get-folder": get_folder,
"ews-expand-group": get_expanded_group,
"ews-mark-items-as-read": mark_item_as_read,
"send-mail": send_email,
}
# commands that may return multiple results or non-note result
special_output_commands = {
"ews-get-attachment": fetch_attachments_for_message,
"ews-delete-attachment": delete_attachments_for_message,
"ews-get-items-as-eml": get_item_as_eml,
}
# system commands:
if command == "test-module":
is_test_module = True
demisto.results(test_module(client, params.get('max_fetch')))
elif command == "fetch-incidents":
last_run = demisto.getLastRun()
incidents = fetch_emails_as_incidents(client, last_run)
demisto.incidents(incidents)
# special outputs commands
elif command in special_output_commands:
demisto.results(special_output_commands[command](client, **args)) # type: ignore[operator]
# normal commands
else:
output = normal_commands[command](client, **args) # type: ignore[operator]
return_outputs(*output)
except Exception as e:
start_logging()
debug_log = log_stream.getvalue() # type: ignore[union-attr]
error_message_simple = ""
# Office365 regular maintenance case
if isinstance(e, ErrorMailboxStoreUnavailable) or isinstance(
e, ErrorMailboxMoveInProgress
):
log_message = (
"Office365 is undergoing load balancing operations. "
"As a result, the service is temporarily unavailable."
)
if demisto.command() == "fetch-incidents":
demisto.info(log_message)
demisto.incidents([])
sys.exit(0)
if is_test_module:
demisto.results(
log_message + " Please retry the instance configuration test."
)
sys.exit(0)
error_message_simple = log_message + " Please retry your request."
if isinstance(e, ConnectionError):
error_message_simple = (
"Could not connect to the server.\n"
f"Additional information: {str(e)}"
)
else:
if is_test_module and isinstance(e, MalformedResponseError):
error_message_simple = (
"Got invalid response from the server.\n"
)
# Legacy error handling
if "Status code: 401" in debug_log:
error_message_simple = (
"Got unauthorized from the server. "
)
if "Status code: 503" in debug_log:
error_message_simple = (
"Got timeout from the server. "
"Probably the server is not reachable with the current settings. "
)
if not error_message_simple:
error_message = error_message_simple = str(e)
else:
error_message = error_message_simple + "\n" + str(e)
stacktrace = traceback.format_exc()
if stacktrace:
error_message += "\nFull stacktrace:\n" + stacktrace
if debug_log:
error_message += "\nFull debug log:\n" + debug_log
if demisto.command() == "fetch-incidents":
raise
if demisto.command() == "ews-search-mailbox" and isinstance(e, ValueError):
return_error(
message="Selected invalid field, please specify valid field name.",
error=e,
)
if is_test_module:
demisto.results(error_message_simple)
else:
demisto.results(
{
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": error_message_simple,
}
)
demisto.error(f"{e.__class__.__name__}: {error_message}")
finally:
exchangelib_cleanup()
if log_stream:
try:
logging.getLogger().removeHandler(log_handler) # type: ignore
log_stream.close()
except Exception as ex:
demisto.error(
"EWS: unexpected exception when trying to remove log handler: {}".format(
ex
)
)
def process_main():
"""setup stdin to fd=0 so we can read from the server"""
sys.stdin = os.fdopen(0, "r")
sub_main()
def main():
# When running big queries, like 'ews-search-mailbox' the memory might not freed by the garbage
# collector. `separate_process` flag will run the integration on a separate process that will prevent
# memory leakage.
separate_process = demisto.params().get("separate_process", False)
demisto.debug("Running as separate_process: {}".format(separate_process))
if separate_process:
try:
p = Process(target=process_main)
p.start()
p.join()
except Exception as ex:
demisto.error("Failed starting Process: {}".format(ex))
else:
sub_main()
from MicrosoftApiModule import * # noqa: E402
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
| get_payload |
File3.py | '''
Function Name : main()
Description : How To Open File & Read The Data Using Open, Read
Function Date : 15 Mar 2021
Function Author : Prasad Dangare
Input : Int
Output : Int
'''
def main():
|
if __name__ == "__main__":
main()
| name = input("Enter the file name that you want to Read : ")
fobj = open(name,"r") # create new file
print("Data from file is ")
print(fobj.read()) |
fetchUserInfo.ts | import { InteractionManager } from 'react-native';
import _ from 'lodash';
import { NodeApi } from '@/api/brightId';
import {
setVerifications,
updateMemberships,
updateConnections,
setIsSponsored,
updateNotifications,
setActiveDevices,
} from './index';
const fetchUserInfo =
(api: NodeApi) => (dispatch: dispatch, getState: getState) => {
return new Promise((resolve) => {
InteractionManager.runAfterInteractions(async () => {
const {
user: { id },
} = getState();
if (!id) {
console.log(`Can't fetch user info - id missing`);
return;
}
console.log('refreshing user info', id);
try {
const verifications = await api.getVerifications(id);
dispatch(setVerifications(verifications));
const memberships = await api.getMemberships(id);
dispatch(updateMemberships(memberships));
const connections = await api.getConnections(id, 'outbound');
const incomingConns = await api.getConnections(id, 'inbound');
const incomingConnsById = _.keyBy(incomingConns, 'id');
for (const conn of connections) {
conn.incomingLevel = incomingConnsById[conn.id]?.level;
}
dispatch(updateConnections(connections));
const { sponsored, signingKeys } = await api.getProfile(id);
dispatch(setIsSponsored(sponsored)); | resolve(null);
} catch (err) {
console.log(err.message);
}
});
});
};
export default fetchUserInfo; | dispatch(setActiveDevices(signingKeys));
dispatch(updateNotifications(api)); |
test.py | """
opennms-provisioner test source module
This module is the provides test sources for opennms-provisioner.
:license: MIT, see LICENSE for more details
:copyright: (c) 2018 by Michael Batz, see AUTHORS for more details
"""
import provisioner.source
import provisioner.opennms
class | (provisioner.source.Source):
""" Dummy source.
This is source is a test and demonstrates the implementation
of an own source. It exports two test nodes.
Attributes:
name: name of the source
parameters: dictionary with parameters for this source
"""
def __init__(self, name, parameters):
provisioner.source.Source.__init__(self, name, parameters)
def get_nodes(self):
# create nodelist
nodelist = []
# get parameters from config
cat1 = self.get_parameter("cat1", None)
cat2 = self.get_parameter("cat2", None)
# create testnode 1
node_1 = provisioner.opennms.Node("testnode1", "1")
node_1.add_interface("127.0.0.1")
node_1.add_service("127.0.0.1", "ICMP")
node_1.add_service("127.0.0.1", "SNMP")
node_1.add_asset("city", "Fulda")
node_1.add_asset("zip", "36041")
node_1.add_category("Test")
if cat1:
node_1.add_category(cat1)
if cat2:
node_1.add_category(cat2)
# create testnode2
node_2 = provisioner.opennms.Node("testnode2", "2")
node_2.add_interface("127.0.0.1")
node_2.add_asset("city", "Fulda")
node_2.add_asset("zip", "36041")
node_2.add_category("Test")
if cat1:
node_2.add_category(cat1)
if cat2:
node_2.add_category(cat2)
# add nodes to list and return nodelist
nodelist.append(node_1)
nodelist.append(node_2)
return nodelist
| DummySource |
sidenav.js | "use strict";
module.exports = Sidenav;
var doc = window.document;
var html = doc.documentElement;
require("es6-promise").polyfill(); // Shim for browsers that does not support Promise natively.
function Sidenav(options) {
options = options || {};
this.extraClosePixels = options.extraClosePixels || 30;
this.width = options.width || 350;
this.sidenavOpacity = options.sidenavOpacity || 0.5;
this.isBusy = false; // Is there a progress? Loading or closing?
this.isOpened = false;
this.currentOpacity = 0;
this.currentWidth = 0;
// Sets panel
this.sidenav = options.sidenav;
this.backdrop = options.backdrop;
this.content = options.content;
// Sets classnames
this.content.classList.add("sn-content");
this.sidenav.classList.add("sn-sidenav");
setTransformX(this.sidenav, (-1 * this.width) - this.extraClosePixels);
this.backdrop.classList.add("sn-backdrop");
// Sets events
this.initEvents();
}
Sidenav.prototype.open = function() {
var self = this;
if (self.isBusy) { return Promise.reject(); }
self.isBusy = true;
html.classList.add("sn-visible"); | return promise.then(function() {
self.isBusy = false;
self.isOpened = true;
});
};
Sidenav.prototype.close = function() {
var self = this;
if (self.isBusy) { return Promise.reject(); }
self.isBusy = true;
var promise = this.showHideSidebarBackdrop(false);
return promise
.then(function() {
self.isBusy = false;
self.isOpened = false;
html.classList.remove("sn-visible");
});
};
Sidenav.prototype.expandTo = function(px) {
var self = this;
px = Math.min(px, self.width);
var opacity = self.sidenavOpacity * px / self.width;
html.classList.add("sn-visible");
setTransformX(self.sidenav, px - self.width);
self.backdrop.style.opacity = opacity;
self.currentOpacity = opacity;
self.currentWidth = px;
};
Sidenav.prototype.showHideSidebarBackdrop = function(show) {
var self = this;
var promise = new Promise(function(resolve) {
var duration = 300;
var startTime = null;
requestAnimationFrame(animate);
function animate(time) {
var timePassed = 0;
if (startTime === null) {
startTime = time;
} else {
timePassed = Math.min((time - startTime), duration);
}
var targetOpacity = null;
var targetTransform = null;
if (show) {
targetOpacity = easeOutQuad(timePassed, self.currentOpacity, self.sidenavOpacity - self.currentOpacity, duration);
targetTransform = easeOutQuad(timePassed, self.currentWidth, self.width - self.currentWidth, duration);
} else {
targetOpacity = self.currentOpacity - easeOutQuad(timePassed, 0, self.currentOpacity, duration);
targetTransform = self.currentWidth - easeOutQuad(timePassed, 0, self.currentWidth + self.extraClosePixels, duration);
}
setTransformX(self.sidenav, (-1 * self.width) + targetTransform);
self.backdrop.style.opacity = targetOpacity;
if (timePassed < duration) {
requestAnimationFrame(animate);
} else {
if (show) {
self.currentOpacity = self.sidenavOpacity;
self.currentWidth = self.width;
} else {
self.currentOpacity = 0;
self.currentWidth = 0;
}
resolve();
}
}
});
return promise;
};
Sidenav.prototype.initEvents = function() {
var self = this;
var fingerId = null;
var startX = null;
var startY = null;
var startWidth = null;
var wasInSidenavOnce = false;
var horizontalLock = false;
var previousX = null;
var previousY = null;
self.backdrop.addEventListener("click", function() {
self.close();
});
doc.addEventListener("touchstart", touchstart);
function touchstart(e) {
if (self.isBusy) { return; }
if (fingerId !== null) { return; }
if (e.touches.length !== 1) { return; }
// if closed, check if the touch is from the left edge of the screen
if (!self.isOpened && e.touches[0].clientX > 10) { return; }
fingerId = e.touches[0].identifier;
startX = e.touches[0].clientX;
startY = e.touches[0].clientY;
startWidth = self.currentWidth;
wasInSidenavOnce = false;
horizontalLock = false;
previousX = -999;
previousY = -999;
// bind touchmove and end cancel events
doc.addEventListener("touchmove", touchmove);
doc.addEventListener("touchcancel", touchfinish);
doc.addEventListener("touchend", touchfinish);
}
function touchmove(e) {
for (var i = 0; i < e.changedTouches.length; i++) {
if (fingerId === e.changedTouches[i].identifier) {
if (Math.abs(e.changedTouches[i].clientX - previousX) < 1 &&
Math.abs(e.changedTouches[i].clientY - previousY) < 1) {
// Do not over invoke move event
return;
}
previousX = e.changedTouches[i].clientX;
previousY = e.changedTouches[i].clientY;
if (self.isOpened) {
if (!horizontalLock && Math.abs(startX - e.changedTouches[i].clientX) < Math.abs(startY - e.changedTouches[i].clientY)) {
doTouchFinish(null);
return;
}
horizontalLock = true;
// check if direction is horizontal, check if are on sidenav
if (!wasInSidenavOnce && e.changedTouches[i].clientX > self.currentWidth) { return; }
}
wasInSidenavOnce = true;
self.expandTo(startWidth + (e.changedTouches[i].clientX - Math.min(startX, self.width)));
return;
}
}
}
function touchfinish(e) {
for (var i = 0; i < e.changedTouches.length; i++) {
if (fingerId === e.changedTouches[i].identifier) {
doTouchFinish(wasInSidenavOnce ? self.currentWidth > self.width / 2 : null);
}
}
}
function doTouchFinish(shouldOpen) {
if (shouldOpen === true) {
self.open();
} else if (shouldOpen === false) {
self.close();
}
doc.removeEventListener("touchmove", touchmove);
doc.removeEventListener("touchcancel", touchfinish);
doc.removeEventListener("touchend", touchstart);
fingerId = null;
}
};
function setTransformX(domEl, px) {
domEl.style.transform = "translate3d(" + px + "px, 0, 0)";
domEl.style["-webkit-transform"] = "translate3d(" + px + "px, 0, 0)";
}
function easeOutQuad(t, b, c, d) {
t /= d;
return -c * t * (t - 2) + b;
} |
var promise = this.showHideSidebarBackdrop(true); |
UIHelper.js | /**
* @class Oskari.userinterface.component.UIHelper
* Generic UI helper methods
*/
Oskari.clazz.define('Oskari.userinterface.component.UIHelper',
/**
* @method create called automatically on construction
* @static
*/
function (sandbox) {
this.sandbox = sandbox;
}, {
/** | * @method processHelpLinks
* Processes given element and binds any element with
* "helptags" attribute with a popup showing the help article
* @param {String} title - help popup title
* @param {jQuery} content - element to scan
* @param {String} errorMsg - popup title if help article is not found
* @param {String} errorMsg - message if help article is not found
*/
processHelpLinks: function (title, content, errorTitle, errorMsg) {
if (!content) {
return;
}
var me = this,
getCallback;
// construct the callback for the button so we can position the popup accordingly
getCallback = function (btn) {
return function (isSuccess, pContent) {
var dialog = Oskari.clazz.create('Oskari.userinterface.component.Popup'),
okBtn = dialog.createCloseButton("OK");
okBtn.addClass('primary');
if (isSuccess) {
// help articles have only 'static' content
// FIXME static is a reserved word
dialog.show(title, pContent.static, [okBtn]);
dialog.moveTo(btn, 'bottom');
} else {
dialog.show(errorTitle, errorMsg, [okBtn]);
}
};
};
// Help popups (PORTTISK-812)
content.find('[helptags]').each(function (i, e) {
var btn = jQuery(e),
taglist = btn.attr("helptags");
btn.bind('click', function () {
me.getHelpArticle(taglist, getCallback(btn));
});
});
},
/**
* @method getHelpArticle
* Fetches an article from the server
* "helptags" attribute with a popup showing the help article
* @param {String} taglist - comma-separated list of article tags identifying the article
* @param {Function} callback - function that is called on completion. Functions first param is
* boolean that indicates success, second parameter is the loaded content if it was successfully loaded
*/
getHelpArticle: function (taglist, callback) {
var me = this;
jQuery.ajax({
url: me.sandbox.getAjaxUrl() + 'action_route=GetArticlesByTag',
data: {
tags: taglist
},
type: 'GET',
dataType: 'json',
beforeSend: function (x) {
if (x && x.overrideMimeType) {
x.overrideMimeType("application/j-son;charset=UTF-8");
}
},
success: function (resp) {
if (resp && resp.articles[0] && resp.articles[0].content) {
callback(true, resp.articles[0].content);
} else {
callback(false);
}
},
error: function () {
callback(false);
}
});
}
}); | |
page-mod.js | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
module.metadata = {
"stability": "stable"
};
const observers = require('./system/events');
const { contract: loaderContract } = require('./content/loader');
const { contract } = require('./util/contract');
const { getAttachEventType, WorkerHost } = require('./content/utils');
const { Class } = require('./core/heritage');
const { Disposable } = require('./core/disposable');
const { WeakReference } = require('./core/reference');
const { Worker } = require('./content/worker');
const { EventTarget } = require('./event/target');
const { on, emit, once, setListeners } = require('./event/core');
const { on: domOn, removeListener: domOff } = require('./dom/events');
const { pipe } = require('./event/utils');
const { isRegExp } = require('./lang/type'); | const { merge } = require('./util/object');
const { windowIterator } = require('./deprecated/window-utils');
const { isBrowser, getFrames } = require('./window/utils');
const { getTabs, getTabContentWindow, getTabForContentWindow,
getURI: getTabURI } = require('./tabs/utils');
const { ignoreWindow } = require('sdk/private-browsing/utils');
const { Style } = require("./stylesheet/style");
const { attach, detach } = require("./content/mod");
const { has, hasAny } = require("./util/array");
const { Rules } = require("./util/rules");
const { List, addListItem, removeListItem } = require('./util/list');
const { when: unload } = require("./system/unload");
// Valid values for `attachTo` option
const VALID_ATTACHTO_OPTIONS = ['existing', 'top', 'frame'];
const pagemods = new Set();
const workers = new WeakMap();
const styles = new WeakMap();
const models = new WeakMap();
let modelFor = (mod) => models.get(mod);
let workerFor = (mod) => workers.get(mod);
let styleFor = (mod) => styles.get(mod);
// Bind observer
observers.on('document-element-inserted', onContentWindow);
unload(() => observers.off('document-element-inserted', onContentWindow));
let isRegExpOrString = (v) => isRegExp(v) || typeof v === 'string';
// Validation Contracts
const modOptions = {
// contentStyle* / contentScript* are sharing the same validation constraints,
// so they can be mostly reused, except for the messages.
contentStyle: merge(Object.create(loaderContract.rules.contentScript), {
msg: 'The `contentStyle` option must be a string or an array of strings.'
}),
contentStyleFile: merge(Object.create(loaderContract.rules.contentScriptFile), {
msg: 'The `contentStyleFile` option must be a local URL or an array of URLs'
}),
include: {
is: ['string', 'array', 'regexp'],
ok: (rule) => {
if (isRegExpOrString(rule))
return true;
if (Array.isArray(rule) && rule.length > 0)
return rule.every(isRegExpOrString);
return false;
},
msg: 'The `include` option must always contain atleast one rule as a string, regular expression, or an array of strings and regular expressions.'
},
attachTo: {
is: ['string', 'array', 'undefined'],
map: function (attachTo) {
if (!attachTo) return ['top', 'frame'];
if (typeof attachTo === 'string') return [attachTo];
return attachTo;
},
ok: function (attachTo) {
return hasAny(attachTo, ['top', 'frame']) &&
attachTo.every(has.bind(null, ['top', 'frame', 'existing']));
},
msg: 'The `attachTo` option must be a string or an array of strings. ' +
'The only valid options are "existing", "top" and "frame", and must ' +
'contain at least "top" or "frame" values.'
},
};
const modContract = contract(merge({}, loaderContract.rules, modOptions));
/**
* PageMod constructor (exported below).
* @constructor
*/
const PageMod = Class({
implements: [
modContract.properties(modelFor),
EventTarget,
Disposable,
WeakReference
],
extends: WorkerHost(workerFor),
setup: function PageMod(options) {
let mod = this;
let model = modContract(options);
models.set(this, model);
// Set listeners on {PageMod} itself, not the underlying worker,
// like `onMessage`, as it'll get piped.
setListeners(this, options);
let include = model.include;
model.include = Rules();
model.include.add.apply(model.include, [].concat(include));
if (model.contentStyle || model.contentStyleFile) {
styles.set(mod, Style({
uri: model.contentStyleFile,
source: model.contentStyle
}));
}
pagemods.add(this);
// `applyOnExistingDocuments` has to be called after `pagemods.add()`
// otherwise its calls to `onContent` method won't do anything.
if (has(model.attachTo, 'existing'))
applyOnExistingDocuments(mod);
},
dispose: function() {
let style = styleFor(this);
if (style)
detach(style);
for (let i in this.include)
this.include.remove(this.include[i]);
pagemods.delete(this);
}
});
exports.PageMod = PageMod;
function onContentWindow({ subject: document }) {
// Return if we have no pagemods
if (pagemods.size === 0)
return;
let window = document.defaultView;
// XML documents don't have windows, and we don't yet support them.
if (!window)
return;
// We apply only on documents in tabs of Firefox
if (!getTabForContentWindow(window))
return;
// When the tab is private, only addons with 'private-browsing' flag in
// their package.json can apply content script to private documents
if (ignoreWindow(window))
return;
for (let pagemod of pagemods) {
if (pagemod.include.matchesAny(document.URL))
onContent(pagemod, window);
}
}
// Returns all tabs on all currently opened windows
function getAllTabs() {
let tabs = [];
// Iterate over all chrome windows
for (let window in windowIterator()) {
if (!isBrowser(window))
continue;
tabs = tabs.concat(getTabs(window));
}
return tabs;
}
function applyOnExistingDocuments (mod) {
let tabs = getAllTabs();
tabs.forEach(function (tab) {
// Fake a newly created document
let window = getTabContentWindow(tab);
if (has(mod.attachTo, "top") && mod.include.matchesAny(getTabURI(tab)))
onContent(mod, window);
if (has(mod.attachTo, "frame")) {
getFrames(window).
filter((iframe) => mod.include.matchesAny(iframe.location.href)).
forEach((frame) => onContent(mod, frame));
}
});
}
function createWorker (mod, window) {
let worker = Worker({
window: window,
contentScript: mod.contentScript,
contentScriptFile: mod.contentScriptFile,
contentScriptOptions: mod.contentScriptOptions,
// Bug 980468: Syntax errors from scripts can happen before the worker
// can set up an error handler. They are per-mod rather than per-worker
// so are best handled at the mod level.
onError: (e) => emit(mod, 'error', e)
});
workers.set(mod, worker);
pipe(worker, mod);
emit(mod, 'attach', worker);
once(worker, 'detach', function detach() {
worker.destroy();
});
}
function onContent (mod, window) {
// not registered yet
if (!pagemods.has(mod))
return;
let isTopDocument = window.top === window;
// Is a top level document and `top` is not set, ignore
if (isTopDocument && !has(mod.attachTo, "top"))
return;
// Is a frame document and `frame` is not set, ignore
if (!isTopDocument && !has(mod.attachTo, "frame"))
return;
let style = styleFor(mod);
if (style)
attach(style, window);
// Immediatly evaluate content script if the document state is already
// matching contentScriptWhen expectations
if (isMatchingAttachState(mod, window)) {
createWorker(mod, window);
return;
}
let eventName = getAttachEventType(mod) || 'load';
domOn(window, eventName, function onReady (e) {
if (e.target.defaultView !== window)
return;
domOff(window, eventName, onReady, true);
createWorker(mod, window);
}, true);
}
function isMatchingAttachState (mod, window) {
let state = window.document.readyState;
return 'start' === mod.contentScriptWhen ||
// Is `load` event already dispatched?
'complete' === state ||
// Is DOMContentLoaded already dispatched and waiting for it?
('ready' === mod.contentScriptWhen && state === 'interactive')
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.