file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
state.ts
|
import { BehaviorSubject, filter, finalize, map, observable, Observable, Subscribable, tap } from 'rxjs';
import { AjaxConfig as Request, AjaxResponse as Response } from 'rxjs/ajax';
import { Target } from '../linker/target';
import { Provider } from '../super/provider';
import { Singleton } from '../utility/singleton';
import { HttpHandler } from './client';
import { HttpProxy } from './proxy';
/**
* Built-in {@link HttpProxy} intercepting all requests fired through the
* {@link HttpClient}. This proxy implements {@link [observable]}, through which
* it emits an array of all currently open connections every time a new request
* is fired or a running request is completed.
*
* @decorator {@link Singleton}
* @decorator {@link Target}
*
* @see {@link HttpClient}
* @see {@link HttpProxy}
*/
@Target<typeof HttpState>()
@Singleton<typeof HttpState>()
export class
|
extends Provider<typeof HttpProxy>('sgrud.core.http.HttpProxy') {
/**
* Symbol property typed as callback to a Subscribable. The returned
* Subscribable emits an array of all active requests whenever this list
* mutates. Using the returned Subscribable, e.g., a load indicator can easily
* be implemented.
*
* @returns Callback to a Subscribable.
*
* @example Subscribe to the currently active requests.
* ```ts
* import { HttpState, Linker } from '@sgrud/core';
* import { from } from 'rxjs';
*
* const httpState = new Linker<typeof HttpState>().get(HttpState);
* from(httpState).subscribe(console.log);
* ```
*/
public readonly [Symbol.observable]!: () => Subscribable<Response<any>[]>;
/**
* BehaviorSubject emitting every time a request is added to or deleted from
* the internal {@link running} map.
*/
private readonly changes: BehaviorSubject<this>;
/**
* Internal map containing all running requests. Updating this map should
* always be accompanied by an emittance of the {@link changes}.
*/
private readonly running: Map<Request, Response<any>>;
/**
* `rxjs.observable` interop getter returning a callback to a Subscribable.
*/
public get [observable](): () => Subscribable<Response<any>[]> {
return () => this.changes.pipe(
map(() => Array.from(this.running.values()))
);
}
/**
* Public constructor. Called by the {@link Target} decorator to link this
* {@link HttpProxy} into the proxy chain.
*/
public constructor() {
super();
this.changes = new BehaviorSubject<this>(this);
this.running = new Map<Request, Response<any>>();
}
/**
* Overridden {@link proxy} method of the {@link HttpProxy} base class.
* Mutates the request to also emit progress events while the request is
* running. These progress events will be consumed by the HttpState
* interceptor and re-supplied via the Observable returned by the
* {@link [observable]} getter.
*
* @param request - Request.
* @param handler - Next handler.
* @typeParam T - Response type.
* @returns Observable response.
*/
public override proxy<T>(
request: Request,
handler: HttpHandler
): Observable<Response<T>> {
const includeDownloadProgress = request.includeDownloadProgress;
const includeUploadProgress = request.includeUploadProgress;
return handler.handle<T>({
...request,
includeDownloadProgress: true,
includeUploadProgress: true
}).pipe(
tap((event) => {
this.running.set(request, event);
this.changes.next(this);
}),
filter(({ type }) => {
return Boolean(type === 'download_load' ||
includeDownloadProgress && type.startsWith('download_') ||
includeUploadProgress && type.startsWith('upload_'));
}),
finalize(() => {
this.running.delete(request);
this.changes.next(this);
})
);
}
}
|
HttpState
|
instr_vsubsd.rs
|
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn vsubsd_1() {
run_test(&Instruction { mnemonic: Mnemonic::VSUBSD, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM7)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 195, 92, 219], OperandSize::Dword)
}
#[test]
fn vsubsd_2() {
run_test(&Instruction { mnemonic: Mnemonic::VSUBSD, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM4)), operand3: Some(IndirectScaledIndexedDisplaced(ESI, ECX, Four, 1323778319, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 219, 92, 132, 142, 15, 65, 231, 78], OperandSize::Dword)
}
#[test]
fn vsubsd_3() {
run_test(&Instruction { mnemonic: Mnemonic::VSUBSD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 251, 92, 212], OperandSize::Qword)
}
#[test]
fn vsubsd_4() {
run_test(&Instruction { mnemonic: Mnemonic::VSUBSD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectDisplaced(RCX, 1876202076, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 235, 92, 185, 92, 146, 212, 111], OperandSize::Qword)
}
#[test]
fn vsubsd_5() {
run_test(&Instruction { mnemonic: Mnemonic::VSUBSD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM6)), operand3: Some(Direct(XMM6)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Up), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 207, 221, 92, 246], OperandSize::Dword)
}
#[test]
fn
|
() {
run_test(&Instruction { mnemonic: Mnemonic::VSUBSD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM0)), operand3: Some(IndirectScaledDisplaced(EAX, Eight, 1324529119, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 255, 138, 92, 60, 197, 223, 181, 242, 78], OperandSize::Dword)
}
#[test]
fn vsubsd_7() {
run_test(&Instruction { mnemonic: Mnemonic::VSUBSD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM15)), operand3: Some(Direct(XMM10)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Nearest), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 209, 135, 156, 92, 242], OperandSize::Qword)
}
#[test]
fn vsubsd_8() {
run_test(&Instruction { mnemonic: Mnemonic::VSUBSD, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM8)), operand3: Some(IndirectScaledDisplaced(RSI, Four, 343074211, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 191, 139, 92, 4, 181, 163, 229, 114, 20], OperandSize::Qword)
}
|
vsubsd_6
|
text.py
|
# -*- coding: utf-8 -*-
"""
sphinx.writers.text
~~~~~~~~~~~~~~~~~~~
Custom docutils writer for plain text.
:copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import os
import re
import textwrap
from itertools import groupby
import warnings
from six.moves import zip_longest
from docutils import nodes, writers
from docutils.utils import column_width
from sphinx import addnodes
from sphinx.deprecation import RemovedInSphinx16Warning
from sphinx.locale import admonitionlabels, _
class TextWrapper(textwrap.TextWrapper):
"""Custom subclass that uses a different word separator regex."""
wordsep_re = re.compile(
r'(\s+|' # any whitespace
r'(?<=\s)(?::[a-z-]+:)?`\S+|' # interpreted text start
r'[^\s\w]*\w+[a-zA-Z]-(?=\w+[a-zA-Z])|' # hyphenated words
r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash
def _wrap_chunks(self, chunks):
"""_wrap_chunks(chunks : [string]) -> [string]
The original _wrap_chunks uses len() to calculate width.
This method respects wide/fullwidth characters for width adjustment.
"""
drop_whitespace = getattr(self, 'drop_whitespace', True) # py25 compat
lines = []
if self.width <= 0:
raise ValueError("invalid width %r (must be > 0)" % self.width)
chunks.reverse()
while chunks:
cur_line = []
cur_len = 0
if lines:
indent = self.subsequent_indent
else:
indent = self.initial_indent
width = self.width - column_width(indent)
if drop_whitespace and chunks[-1].strip() == '' and lines:
del chunks[-1]
while chunks:
l = column_width(chunks[-1])
if cur_len + l <= width:
cur_line.append(chunks.pop())
cur_len += l
else:
break
if chunks and column_width(chunks[-1]) > width:
self._handle_long_word(chunks, cur_line, cur_len, width)
if drop_whitespace and cur_line and cur_line[-1].strip() == '':
del cur_line[-1]
if cur_line:
lines.append(indent + ''.join(cur_line))
return lines
def _break_word(self, word, space_left):
"""_break_word(word : string, space_left : int) -> (string, string)
Break line by unicode width instead of len(word).
"""
total = 0
for i, c in enumerate(word):
total += column_width(c)
if total > space_left:
return word[:i - 1], word[i - 1:]
return word, ''
def _split(self, text):
"""_split(text : string) -> [string]
Override original method that only split by 'wordsep_re'.
This '_split' split wide-characters into chunk by one character.
"""
|
def split(t):
return textwrap.TextWrapper._split(self, t)
chunks = []
for chunk in split(text):
for w, g in groupby(chunk, column_width):
if w == 1:
chunks.extend(split(''.join(g)))
else:
chunks.extend(list(g))
return chunks
def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
"""_handle_long_word(chunks : [string],
cur_line : [string],
cur_len : int, width : int)
Override original method for using self._break_word() instead of slice.
"""
space_left = max(width - cur_len, 1)
if self.break_long_words:
l, r = self._break_word(reversed_chunks[-1], space_left)
cur_line.append(l)
reversed_chunks[-1] = r
elif not cur_line:
cur_line.append(reversed_chunks.pop())
MAXWIDTH = 70
STDINDENT = 3
def my_wrap(text, width=MAXWIDTH, **kwargs):
w = TextWrapper(width=width, **kwargs)
return w.wrap(text)
class TextWriter(writers.Writer):
supported = ('text',)
settings_spec = ('No options here.', '', ())
settings_defaults = {}
output = None
def __init__(self, builder):
writers.Writer.__init__(self)
self.builder = builder
self.translator_class = self.builder.translator_class or TextTranslator
def translate(self):
visitor = self.translator_class(self.document, self.builder)
self.document.walkabout(visitor)
self.output = visitor.body
class TextTranslator(nodes.NodeVisitor):
sectionchars = '*=-~"+`'
def __init__(self, document, builder):
nodes.NodeVisitor.__init__(self, document)
self.builder = builder
newlines = builder.config.text_newlines
if newlines == 'windows':
self.nl = '\r\n'
elif newlines == 'native':
self.nl = os.linesep
else:
self.nl = '\n'
self.sectionchars = builder.config.text_sectionchars
self.states = [[]]
self.stateindent = [0]
self.list_counter = []
self.sectionlevel = 0
self.lineblocklevel = 0
self.table = None
def add_text(self, text):
self.states[-1].append((-1, text))
def new_state(self, indent=STDINDENT):
self.states.append([])
self.stateindent.append(indent)
def end_state(self, wrap=True, end=[''], first=None):
content = self.states.pop()
maxindent = sum(self.stateindent)
indent = self.stateindent.pop()
result = []
toformat = []
def do_format():
if not toformat:
return
if wrap:
res = my_wrap(''.join(toformat), width=MAXWIDTH - maxindent)
else:
res = ''.join(toformat).splitlines()
if end:
res += end
result.append((indent, res))
for itemindent, item in content:
if itemindent == -1:
toformat.append(item)
else:
do_format()
result.append((indent + itemindent, item))
toformat = []
do_format()
if first is not None and result:
itemindent, item = result[0]
result_rest, result = result[1:], []
if item:
toformat = [first + ' '.join(item)]
do_format() # re-create `result` from `toformat`
_dummy, new_item = result[0]
result.insert(0, (itemindent - indent, [new_item[0]]))
result[1] = (itemindent, new_item[1:])
result.extend(result_rest)
self.states[-1].extend(result)
def visit_document(self, node):
self.new_state(0)
def depart_document(self, node):
self.end_state()
self.body = self.nl.join(line and (' ' * indent + line)
for indent, lines in self.states[0]
for line in lines)
# XXX header/footer?
def visit_highlightlang(self, node):
raise nodes.SkipNode
def visit_section(self, node):
self._title_char = self.sectionchars[self.sectionlevel]
self.sectionlevel += 1
def depart_section(self, node):
self.sectionlevel -= 1
def visit_topic(self, node):
self.new_state(0)
def depart_topic(self, node):
self.end_state()
visit_sidebar = visit_topic
depart_sidebar = depart_topic
def visit_rubric(self, node):
self.new_state(0)
self.add_text('-[ ')
def depart_rubric(self, node):
self.add_text(' ]-')
self.end_state()
def visit_compound(self, node):
pass
def depart_compound(self, node):
pass
def visit_glossary(self, node):
pass
def depart_glossary(self, node):
pass
def visit_title(self, node):
if isinstance(node.parent, nodes.Admonition):
self.add_text(node.astext() + ': ')
raise nodes.SkipNode
self.new_state(0)
def depart_title(self, node):
if isinstance(node.parent, nodes.section):
char = self._title_char
else:
char = '^'
text = ''.join(x[1] for x in self.states.pop() if x[0] == -1)
self.stateindent.pop()
title = ['', text, '%s' % (char * column_width(text)), '']
if len(self.states) == 2 and len(self.states[-1]) == 0:
# remove an empty line before title if it is first section title in the document
title.pop(0)
self.states[-1].append((0, title))
def visit_subtitle(self, node):
pass
def depart_subtitle(self, node):
pass
def visit_attribution(self, node):
self.add_text('-- ')
def depart_attribution(self, node):
pass
def visit_desc(self, node):
pass
def depart_desc(self, node):
pass
def visit_desc_signature(self, node):
self.new_state(0)
def depart_desc_signature(self, node):
# XXX: wrap signatures in a way that makes sense
self.end_state(wrap=False, end=None)
def visit_desc_signature_line(self, node):
pass
def depart_desc_signature_line(self, node):
self.add_text('\n')
def visit_desc_name(self, node):
pass
def depart_desc_name(self, node):
pass
def visit_desc_addname(self, node):
pass
def depart_desc_addname(self, node):
pass
def visit_desc_type(self, node):
pass
def depart_desc_type(self, node):
pass
def visit_desc_returns(self, node):
self.add_text(' -> ')
def depart_desc_returns(self, node):
pass
def visit_desc_parameterlist(self, node):
self.add_text('(')
self.first_param = 1
def depart_desc_parameterlist(self, node):
self.add_text(')')
def visit_desc_parameter(self, node):
if not self.first_param:
self.add_text(', ')
else:
self.first_param = 0
self.add_text(node.astext())
raise nodes.SkipNode
def visit_desc_optional(self, node):
self.add_text('[')
def depart_desc_optional(self, node):
self.add_text(']')
def visit_desc_annotation(self, node):
pass
def depart_desc_annotation(self, node):
pass
def visit_desc_content(self, node):
self.new_state()
self.add_text(self.nl)
def depart_desc_content(self, node):
self.end_state()
def visit_figure(self, node):
self.new_state()
def depart_figure(self, node):
self.end_state()
def visit_caption(self, node):
pass
def depart_caption(self, node):
pass
def visit_productionlist(self, node):
self.new_state()
names = []
for production in node:
names.append(production['tokenname'])
maxlen = max(len(name) for name in names)
lastname = None
for production in node:
if production['tokenname']:
self.add_text(production['tokenname'].ljust(maxlen) + ' ::=')
lastname = production['tokenname']
elif lastname is not None:
self.add_text('%s ' % (' ' * len(lastname)))
self.add_text(production.astext() + self.nl)
self.end_state(wrap=False)
raise nodes.SkipNode
def visit_footnote(self, node):
self._footnote = node.children[0].astext().strip()
self.new_state(len(self._footnote) + 3)
def depart_footnote(self, node):
self.end_state(first='[%s] ' % self._footnote)
def visit_citation(self, node):
if len(node) and isinstance(node[0], nodes.label):
self._citlabel = node[0].astext()
else:
self._citlabel = ''
self.new_state(len(self._citlabel) + 3)
def depart_citation(self, node):
self.end_state(first='[%s] ' % self._citlabel)
def visit_label(self, node):
raise nodes.SkipNode
def visit_legend(self, node):
pass
def depart_legend(self, node):
pass
# XXX: option list could use some better styling
def visit_option_list(self, node):
pass
def depart_option_list(self, node):
pass
def visit_option_list_item(self, node):
self.new_state(0)
def depart_option_list_item(self, node):
self.end_state()
def visit_option_group(self, node):
self._firstoption = True
def depart_option_group(self, node):
self.add_text(' ')
def visit_option(self, node):
if self._firstoption:
self._firstoption = False
else:
self.add_text(', ')
def depart_option(self, node):
pass
def visit_option_string(self, node):
pass
def depart_option_string(self, node):
pass
def visit_option_argument(self, node):
self.add_text(node['delimiter'])
def depart_option_argument(self, node):
pass
def visit_description(self, node):
pass
def depart_description(self, node):
pass
def visit_tabular_col_spec(self, node):
raise nodes.SkipNode
def visit_colspec(self, node):
self.table[0].append(node['colwidth'])
raise nodes.SkipNode
def visit_tgroup(self, node):
pass
def depart_tgroup(self, node):
pass
def visit_thead(self, node):
pass
def depart_thead(self, node):
pass
def visit_tbody(self, node):
self.table.append('sep')
def depart_tbody(self, node):
pass
def visit_row(self, node):
self.table.append([])
def depart_row(self, node):
pass
def visit_entry(self, node):
if 'morerows' in node or 'morecols' in node:
raise NotImplementedError('Column or row spanning cells are '
'not implemented.')
self.new_state(0)
def depart_entry(self, node):
text = self.nl.join(self.nl.join(x[1]) for x in self.states.pop())
self.stateindent.pop()
self.table[-1].append(text)
def visit_table(self, node):
if self.table:
raise NotImplementedError('Nested tables are not supported.')
self.new_state(0)
self.table = [[]]
def depart_table(self, node):
lines = self.table[1:]
fmted_rows = []
colwidths = self.table[0]
realwidths = colwidths[:]
separator = 0
# don't allow paragraphs in table cells for now
for line in lines:
if line == 'sep':
separator = len(fmted_rows)
else:
cells = []
for i, cell in enumerate(line):
par = my_wrap(cell, width=colwidths[i])
if par:
maxwidth = max(column_width(x) for x in par)
else:
maxwidth = 0
realwidths[i] = max(realwidths[i], maxwidth)
cells.append(par)
fmted_rows.append(cells)
def writesep(char='-'):
out = ['+']
for width in realwidths:
out.append(char * (width + 2))
out.append('+')
self.add_text(''.join(out) + self.nl)
def writerow(row):
lines = zip_longest(*row)
for line in lines:
out = ['|']
for i, cell in enumerate(line):
if cell:
adjust_len = len(cell) - column_width(cell)
out.append(' ' + cell.ljust(
realwidths[i] + 1 + adjust_len))
else:
out.append(' ' * (realwidths[i] + 2))
out.append('|')
self.add_text(''.join(out) + self.nl)
for i, row in enumerate(fmted_rows):
if separator and i == separator:
writesep('=')
else:
writesep('-')
writerow(row)
writesep('-')
self.table = None
self.end_state(wrap=False)
def visit_acks(self, node):
self.new_state(0)
self.add_text(', '.join(n.astext() for n in node.children[0].children) +
'.')
self.end_state()
raise nodes.SkipNode
def visit_image(self, node):
if 'alt' in node.attributes:
self.add_text(_('[image: %s]') % node['alt'])
self.add_text(_('[image]'))
raise nodes.SkipNode
def visit_transition(self, node):
indent = sum(self.stateindent)
self.new_state(0)
self.add_text('=' * (MAXWIDTH - indent))
self.end_state()
raise nodes.SkipNode
def visit_bullet_list(self, node):
self.list_counter.append(-1)
def depart_bullet_list(self, node):
self.list_counter.pop()
def visit_enumerated_list(self, node):
self.list_counter.append(node.get('start', 1) - 1)
def depart_enumerated_list(self, node):
self.list_counter.pop()
def visit_definition_list(self, node):
self.list_counter.append(-2)
def depart_definition_list(self, node):
self.list_counter.pop()
def visit_list_item(self, node):
if self.list_counter[-1] == -1:
# bullet list
self.new_state(2)
elif self.list_counter[-1] == -2:
# definition list
pass
else:
# enumerated list
self.list_counter[-1] += 1
self.new_state(len(str(self.list_counter[-1])) + 2)
def depart_list_item(self, node):
if self.list_counter[-1] == -1:
self.end_state(first='* ')
elif self.list_counter[-1] == -2:
pass
else:
self.end_state(first='%s. ' % self.list_counter[-1])
def visit_definition_list_item(self, node):
self._classifier_count_in_li = len(node.traverse(nodes.classifier))
def depart_definition_list_item(self, node):
pass
def visit_term(self, node):
self.new_state(0)
def depart_term(self, node):
if not self._classifier_count_in_li:
self.end_state(end=None)
def visit_termsep(self, node):
warnings.warn('sphinx.addnodes.termsep will be removed at Sphinx-1.6. '
'This warning is displayed because some Sphinx extension '
'uses sphinx.addnodes.termsep. Please report it to '
'author of the extension.', RemovedInSphinx16Warning)
self.add_text(', ')
raise nodes.SkipNode
def visit_classifier(self, node):
self.add_text(' : ')
def depart_classifier(self, node):
self._classifier_count_in_li -= 1
if not self._classifier_count_in_li:
self.end_state(end=None)
def visit_definition(self, node):
self.new_state()
def depart_definition(self, node):
self.end_state()
def visit_field_list(self, node):
pass
def depart_field_list(self, node):
pass
def visit_field(self, node):
pass
def depart_field(self, node):
pass
def visit_field_name(self, node):
self.new_state(0)
def depart_field_name(self, node):
self.add_text(':')
self.end_state(end=None)
def visit_field_body(self, node):
self.new_state()
def depart_field_body(self, node):
self.end_state()
def visit_centered(self, node):
pass
def depart_centered(self, node):
pass
def visit_hlist(self, node):
pass
def depart_hlist(self, node):
pass
def visit_hlistcol(self, node):
pass
def depart_hlistcol(self, node):
pass
def visit_admonition(self, node):
self.new_state(0)
def depart_admonition(self, node):
self.end_state()
def _visit_admonition(self, node):
self.new_state(2)
if isinstance(node.children[0], nodes.Sequential):
self.add_text(self.nl)
def _make_depart_admonition(name):
def depart_admonition(self, node):
self.end_state(first=admonitionlabels[name] + ': ')
return depart_admonition
visit_attention = _visit_admonition
depart_attention = _make_depart_admonition('attention')
visit_caution = _visit_admonition
depart_caution = _make_depart_admonition('caution')
visit_danger = _visit_admonition
depart_danger = _make_depart_admonition('danger')
visit_error = _visit_admonition
depart_error = _make_depart_admonition('error')
visit_hint = _visit_admonition
depart_hint = _make_depart_admonition('hint')
visit_important = _visit_admonition
depart_important = _make_depart_admonition('important')
visit_note = _visit_admonition
depart_note = _make_depart_admonition('note')
visit_tip = _visit_admonition
depart_tip = _make_depart_admonition('tip')
visit_warning = _visit_admonition
depart_warning = _make_depart_admonition('warning')
visit_seealso = _visit_admonition
depart_seealso = _make_depart_admonition('seealso')
def visit_versionmodified(self, node):
self.new_state(0)
def depart_versionmodified(self, node):
self.end_state()
def visit_literal_block(self, node):
self.new_state()
def depart_literal_block(self, node):
self.end_state(wrap=False)
def visit_doctest_block(self, node):
self.new_state(0)
def depart_doctest_block(self, node):
self.end_state(wrap=False)
def visit_line_block(self, node):
self.new_state()
self.lineblocklevel += 1
def depart_line_block(self, node):
self.lineblocklevel -= 1
self.end_state(wrap=False, end=None)
if not self.lineblocklevel:
self.add_text('\n')
def visit_line(self, node):
pass
def depart_line(self, node):
self.add_text('\n')
def visit_block_quote(self, node):
self.new_state()
def depart_block_quote(self, node):
self.end_state()
def visit_compact_paragraph(self, node):
pass
def depart_compact_paragraph(self, node):
pass
def visit_paragraph(self, node):
if not isinstance(node.parent, nodes.Admonition) or \
isinstance(node.parent, addnodes.seealso):
self.new_state(0)
def depart_paragraph(self, node):
if not isinstance(node.parent, nodes.Admonition) or \
isinstance(node.parent, addnodes.seealso):
self.end_state()
def visit_target(self, node):
raise nodes.SkipNode
def visit_index(self, node):
raise nodes.SkipNode
def visit_toctree(self, node):
raise nodes.SkipNode
def visit_substitution_definition(self, node):
raise nodes.SkipNode
def visit_pending_xref(self, node):
pass
def depart_pending_xref(self, node):
pass
def visit_reference(self, node):
pass
def depart_reference(self, node):
pass
def visit_number_reference(self, node):
text = nodes.Text(node.get('title', '#'))
self.visit_Text(text)
raise nodes.SkipNode
def visit_download_reference(self, node):
pass
def depart_download_reference(self, node):
pass
def visit_emphasis(self, node):
self.add_text('*')
def depart_emphasis(self, node):
self.add_text('*')
def visit_literal_emphasis(self, node):
self.add_text('*')
def depart_literal_emphasis(self, node):
self.add_text('*')
def visit_strong(self, node):
self.add_text('**')
def depart_strong(self, node):
self.add_text('**')
def visit_literal_strong(self, node):
self.add_text('**')
def depart_literal_strong(self, node):
self.add_text('**')
def visit_abbreviation(self, node):
self.add_text('')
def depart_abbreviation(self, node):
if node.hasattr('explanation'):
self.add_text(' (%s)' % node['explanation'])
def visit_manpage(self, node):
return self.visit_literal_emphasis(node)
def depart_manpage(self, node):
return self.depart_literal_emphasis(node)
def visit_title_reference(self, node):
self.add_text('*')
def depart_title_reference(self, node):
self.add_text('*')
def visit_literal(self, node):
self.add_text('"')
def depart_literal(self, node):
self.add_text('"')
def visit_subscript(self, node):
self.add_text('_')
def depart_subscript(self, node):
pass
def visit_superscript(self, node):
self.add_text('^')
def depart_superscript(self, node):
pass
def visit_footnote_reference(self, node):
self.add_text('[%s]' % node.astext())
raise nodes.SkipNode
def visit_citation_reference(self, node):
self.add_text('[%s]' % node.astext())
raise nodes.SkipNode
def visit_Text(self, node):
self.add_text(node.astext())
def depart_Text(self, node):
pass
def visit_generated(self, node):
pass
def depart_generated(self, node):
pass
def visit_inline(self, node):
if 'xref' in node['classes'] or 'term' in node['classes']:
self.add_text('*')
def depart_inline(self, node):
if 'xref' in node['classes'] or 'term' in node['classes']:
self.add_text('*')
def visit_container(self, node):
pass
def depart_container(self, node):
pass
def visit_problematic(self, node):
self.add_text('>>')
def depart_problematic(self, node):
self.add_text('<<')
def visit_system_message(self, node):
self.new_state(0)
self.add_text('<SYSTEM MESSAGE: %s>' % node.astext())
self.end_state()
raise nodes.SkipNode
def visit_comment(self, node):
raise nodes.SkipNode
def visit_meta(self, node):
# only valid for HTML
raise nodes.SkipNode
def visit_raw(self, node):
if 'text' in node.get('format', '').split():
self.new_state(0)
self.add_text(node.astext())
self.end_state(wrap = False)
raise nodes.SkipNode
def visit_math(self, node):
self.builder.warn('using "math" markup without a Sphinx math extension '
'active, please use one of the math extensions '
'described at http://sphinx-doc.org/ext/math.html',
(self.builder.current_docname, node.line))
raise nodes.SkipNode
visit_math_block = visit_math
def unknown_visit(self, node):
raise NotImplementedError('Unknown node: ' + node.__class__.__name__)
| |
service_account_create.py
|
from grouper.fe.forms import ServiceAccountCreateForm
from grouper.fe.settings import settings
from grouper.fe.util import GrouperHandler
from grouper.models.group import Group
from grouper.service_account import (
BadMachineSet,
can_create_service_account,
create_service_account,
DuplicateServiceAccount,
)
class ServiceAccountCreate(GrouperHandler):
def get(self, group_id=None, name=None):
group = Group.get(self.session, group_id, name)
if not group:
return self.notfound()
|
return self.forbidden()
form = ServiceAccountCreateForm()
return self.render("service-account-create.html", form=form, group=group)
def post(self, group_id=None, name=None):
group = Group.get(self.session, group_id, name)
if not group:
return self.notfound()
if "@" not in self.request.arguments["name"][0]:
self.request.arguments["name"][0] += "@" + settings.service_account_email_domain
if not can_create_service_account(self.session, self.current_user, group):
return self.forbidden()
form = ServiceAccountCreateForm(self.request.arguments)
if not form.validate():
return self.render(
"service-account-create.html",
form=form,
group=group,
alerts=self.get_form_alerts(form.errors),
)
if form.data["name"].split("@")[-1] != settings.service_account_email_domain:
form.name.errors.append(
"All service accounts must have a username ending in {}".format(
settings.service_account_email_domain
)
)
return self.render(
"service-account-create.html",
form=form,
group=group,
alerts=self.get_form_alerts(form.errors),
)
try:
create_service_account(
self.session,
self.current_user,
form.data["name"],
form.data["description"],
form.data["machine_set"],
group,
)
except DuplicateServiceAccount:
form.name.errors.append("A user with name {} already exists".format(form.data["name"]))
except BadMachineSet as e:
form.machine_set.errors.append(str(e))
if form.name.errors or form.machine_set.errors:
return self.render(
"service-account-create.html",
form=form,
group=group,
alerts=self.get_form_alerts(form.errors),
)
url = "/groups/{}/service/{}?refresh=yes".format(group.name, form.data["name"])
return self.redirect(url)
|
if not can_create_service_account(self.session, self.current_user, group):
|
Ex14_Clust-01_Sol.py
|
# Init Solution
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline
import seaborn as sns
sns.set()
from IPython.display import display, Markdown
# Init Solution completed
from sklearn.cluster import DBSCAN
from sklearn.base import clone
display(Markdown("###### Loading Wi-Fi Data"))
data = pd.read_csv("./Ex14_Clust-01_Data.csv")
display(data.head(5))
display(Markdown("###### NYC Plot"))
fig, ax = plt.subplots(figsize=(20,20))
data.plot.scatter("Longitude", "Latitude", ax=ax, c="b")
fig.suptitle("Wi-Fi Hotspots in NYC")
plt.show()
display(Markdown("###### Clustering"))
def
|
(data, model, metric, ax):
m = clone(model)
m.set_params(metric=metric)
l_pred = m.fit_predict(data)
n_cluster = len(np.unique(l_pred))
data_cluster = data[l_pred != -1]
label_cluster = l_pred[l_pred != -1]
data_outlier = data[l_pred == -1]
data_outlier.plot.scatter("Longitude", "Latitude", ax=ax, c="k", alpha=.5)
data_cluster.plot.scatter("Longitude", "Latitude", ax=ax, c=label_cluster, cmap="rainbow", colorbar=False)
ax.set(title=f"Found {n_cluster} clusters with distance metric {metric}")
model = DBSCAN(eps=.005)
data_coord = data[["Longitude", "Latitude"]]
fig, ax = plt.subplots(1,2,figsize=(20,10))
clustering(data_coord, model, "euclidean", ax[0])
clustering(data_coord, model, "manhattan", ax[1])
fig.suptitle("Wi-Fi Clusters in NYC")
|
clustering
|
test_v1_load_balancer_ingress.py
|
# coding: utf-8
"""
OpenShift API (with Kubernetes)
OpenShift provides builds, application lifecycle, image content management, and administrative policy on top of Kubernetes. The API allows consistent management of those objects. All API operations are authenticated via an Authorization bearer token that is provided for service accounts as a generated secret (in JWT form) or via the native OAuth endpoint located at /oauth/authorize. Core infrastructure components may use openshift.client certificates that require no authentication. All API operations return a 'resourceVersion' string that represents the version of the object in the underlying storage. The standard LIST operation performs a snapshot read of the underlying objects, returning a resourceVersion representing a consistent version of the listed objects. The WATCH operation allows all updates to a set of objects after the provided resourceVersion to be observed by a openshift.client. By listing and beginning a watch from the returned resourceVersion, openshift.clients may observe a consistent view of the state of one or more objects. Note that WATCH always returns the update after the provided resourceVersion. Watch may be extended a limited time in the past - using etcd 2 the watch window is 1000 events (which on a large cluster may only be a few tens of seconds) so openshift.clients must explicitly handle the \"watch to old error\" by re-listing. Objects are divided into two rough categories - those that have a lifecycle and must reflect the state of the cluster, and those that have no state. Objects with lifecycle typically have three main sections: * 'metadata' common to all objects * a 'spec' that represents the desired state * a 'status' that represents how much of the desired state is reflected on the cluster at the current time Objects that have no state have 'metadata' but may lack a 'spec' or 'status' section. Objects are divided into those that are namespace scoped (only exist inside of a namespace) and those that are cluster scoped (exist outside of a namespace). A namespace scoped resource will be deleted when the namespace is deleted and cannot be created if the namespace has not yet been created or is in the process of deletion. Cluster scoped resources are typically only accessible to admins - resources like nodes, persistent volumes, and cluster policy. All objects have a schema that is a combination of the 'kind' and 'apiVersion' fields. This schema is additive only for any given version - no backwards incompatible changes are allowed without incrementing the apiVersion. The server will return and accept a number of standard responses that share a common schema - for instance, the common error type is 'unversioned.Status' (described below) and will be returned on any error from the API server. The API is available in multiple serialization formats - the default is JSON (Accept: application/json and Content-Type: application/json) but openshift.clients may also use YAML (application/yaml) or the native Protobuf schema (application/vnd.kubernetes.protobuf). Note that the format of the WATCH API call is slightly different - for JSON it returns newline delimited objects while for Protobuf it returns length-delimited frames (4 bytes in network-order) that contain a 'versioned.Watch' Protobuf object. See the OpenShift documentation at https://docs.openshift.org for more information.
OpenAPI spec version: v3.6.0-alpha.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import openshift.client
from kubernetes.client.rest import ApiException
from openshift.client.models.v1_load_balancer_ingress import V1LoadBalancerIngress
class TestV1LoadBalancerIngress(unittest.TestCase):
""" V1LoadBalancerIngress unit test stubs """
def setUp(self):
pass
def tearDown(self):
|
def testV1LoadBalancerIngress(self):
"""
Test V1LoadBalancerIngress
"""
model = openshift.client.models.v1_load_balancer_ingress.V1LoadBalancerIngress()
if __name__ == '__main__':
unittest.main()
|
pass
|
issue2.py
|
from __future__ import absolute_import
import six
from rest_framework.response import Response
from social_auth.models import UserSocialAuth
from django.conf import settings
from django.conf.urls import url
from django.core.urlresolvers import reverse
from django.utils.html import format_html
from sentry.api.serializers.models.plugin import PluginSerializer
# api compat
from sentry.exceptions import PluginError # NOQA
from sentry.models import Activity, Event, GroupMeta
from sentry.plugins import Plugin
from sentry.plugins.base.configuration import react_plugin_config
from sentry.plugins.endpoints import PluginGroupEndpoint
from sentry.signals import issue_tracker_used
from sentry.utils.auth import get_auth_providers
from sentry.utils.http import absolute_uri
from sentry.utils.safe import safe_execute
# TODO(dcramer): remove this in favor of GroupEndpoint
class IssueGroupActionEndpoint(PluginGroupEndpoint):
view_method_name = None
plugin = None
def _handle(self, request, group, *args, **kwargs):
GroupMeta.objects.populate_cache([group])
return getattr(self.plugin, self.view_method_name)(request, group, *args, **kwargs)
class IssueTrackingPlugin2(Plugin):
auth_provider = None
allowed_actions = ('create', 'link', 'unlink')
# we default this to None to support legacy integrations, but newer style
# should explicitly call out what is stored
issue_fields = None
# issue_fields = frozenset(['id', 'title', 'url'])
def configure(self, project, request):
return react_plugin_config(self, project, request)
def get_plugin_type(self):
return 'issue-tracking'
def has_project_conf(self):
return True
def get_group_body(self, request, group, event, **kwargs):
result = []
for interface in six.itervalues(event.interfaces):
output = safe_execute(interface.to_string, event, _with_transaction=False)
if output:
result.append(output)
return '\n\n'.join(result)
def get_group_description(self, request, group, event):
output = [
absolute_uri(group.get_absolute_url()),
]
body = self.get_group_body(request, group, event)
if body:
output.extend([
'',
'```',
body,
'```',
])
return '\n'.join(output)
def get_group_title(self, request, group, event):
return event.error()
def is_configured(self, request, project, **kwargs):
raise NotImplementedError
def get_group_urls(self):
_urls = []
for action in self.allowed_actions:
view_method_name = 'view_%s' % action
_urls.append(
url(
r'^%s/' % action,
PluginGroupEndpoint.as_view(
view=getattr(self, view_method_name),
),
)
)
return _urls
def get_auth_for_user(self, user, **kwargs):
"""
Return a ``UserSocialAuth`` object for the given user based on this plugins ``auth_provider``.
"""
assert self.auth_provider, 'There is no auth provider configured for this plugin.'
if not user.is_authenticated():
return None
try:
return UserSocialAuth.objects.filter(user=user, provider=self.auth_provider)[0]
except IndexError:
return None
def needs_auth(self, request, project, **kwargs):
"""
Return ``True`` if the authenticated user needs to associate an auth service before
performing actions with this plugin.
"""
if self.auth_provider is None:
return False
if not request.user.is_authenticated():
return True
return not UserSocialAuth.objects.filter(
user=request.user, provider=self.auth_provider
).exists()
def get_new_issue_fields(self, request, group, event, **kwargs):
"""
If overriding, supported properties include 'readonly': true
"""
return [
{
'name': 'title',
'label': 'Title',
'default': self.get_group_title(request, group, event),
'type': 'text'
}, {
'name': 'description',
'label': 'Description',
'default': self.get_group_description(request, group, event),
'type': 'textarea'
}
]
def get_link_existing_issue_fields(self, request, group, event, **kwargs):
return []
def _get_issue_url_compat(self, group, issue, **kwargs):
if self.issue_fields is None:
return self.get_issue_url(group, issue['id'])
return self.get_issue_url(group, issue)
def _get_issue_label_compat(self, group, issue, **kwargs):
if self.issue_fields is None:
return self.get_issue_label(group, issue['id'])
return self.get_issue_label(group, issue)
def get_issue_url(self, group, issue, **kwargs):
"""
Given an issue context (issue_id string or issue dict) return an absolute URL to the issue's details
page.
"""
raise NotImplementedError
def get_issue_label(self, group, issue, **kwargs):
|
def create_issue(self, request, group, form_data, **kwargs):
"""
Creates the issue on the remote service and returns an issue ID.
Returns ``{'id': '1', 'title': issue_title}``
"""
raise NotImplementedError
def link_issue(self, request, group, form_data, **kwargs):
"""
Can be overridden for any actions needed when linking issues
(like adding a comment to an existing issue).
Returns ``{'id': '1', 'title': issue_title}``
"""
pass
def has_auth_configured(self, **kwargs):
if not self.auth_provider:
return True
return self.auth_provider in get_auth_providers()
def validate_form(self, fields, form_data):
errors = {}
for field in fields:
if field.get('required', True) and not field.get('readonly'):
value = form_data.get(field['name'])
if value is None or value == '':
errors[field['name']] = u'%s is a required field.' % field['label']
return errors
def get_issue_field_map(self):
# XXX(dcramer): legacy support
conf_key = self.get_conf_key()
if self.issue_fields is None:
return {
'id': u'{}:tid'.format(conf_key)
}
return {
key: u'{}:issue_{}'.format(
conf_key,
key,
)
for key in self.issue_fields
}
def build_issue(self, group):
issue_field_map = self.get_issue_field_map()
issue = {}
for key, meta_name in six.iteritems(issue_field_map):
issue[key] = GroupMeta.objects.get_value(group, meta_name, None)
if not any(issue.values()):
return None
return issue
def has_linked_issue(self, group):
return bool(self.build_issue(group))
def unlink_issue(self, request, group, issue, **kwargs):
issue_field_map = self.get_issue_field_map()
for meta_name in six.itervalues(issue_field_map):
GroupMeta.objects.unset_value(group, meta_name)
return self.redirect(group.get_absolute_url())
def view_create(self, request, group, **kwargs):
auth_errors = self.check_config_and_auth(request, group)
if auth_errors:
return Response(auth_errors, status=400)
event = group.get_latest_event()
if event is None:
return Response({
'message': 'Unable to create issues: there are '
'no events associated with this group',
}, status=400)
Event.objects.bind_nodes([event], 'data')
try:
fields = self.get_new_issue_fields(request, group, event, **kwargs)
except Exception as e:
return self.handle_api_error(e)
if request.method == 'GET':
return Response(fields)
errors = self.validate_form(fields, request.DATA)
if errors:
return Response({'error_type': 'validation', 'errors': errors}, status=400)
try:
issue = self.create_issue(
group=group,
form_data=request.DATA,
request=request,
)
except Exception as e:
return self.handle_api_error(e)
if not isinstance(issue, dict):
issue = {'id': issue}
issue_field_map = self.get_issue_field_map()
for key, meta_name in six.iteritems(issue_field_map):
if key in issue:
GroupMeta.objects.set_value(group, meta_name, issue[key])
else:
GroupMeta.objects.unset_value(group, meta_name)
issue_information = {
'title': issue.get('title') or request.DATA.get('title') or self._get_issue_label_compat(group, issue),
'provider': self.get_title(),
'location': self._get_issue_url_compat(group, issue),
'label': self._get_issue_label_compat(group, issue),
}
Activity.objects.create(
project=group.project,
group=group,
type=Activity.CREATE_ISSUE,
user=request.user,
data=issue_information,
)
issue_tracker_used.send_robust(
plugin=self, project=group.project, user=request.user,
sender=type(self)
)
return Response({'issue_url': self.get_issue_url(group, issue),
'link': self._get_issue_url_compat(group, issue),
'label': self._get_issue_label_compat(group, issue),
'id': issue['id']})
def view_link(self, request, group, **kwargs):
auth_errors = self.check_config_and_auth(request, group)
if auth_errors:
return Response(auth_errors, status=400)
event = group.get_latest_event()
if event is None:
return Response({
'message': 'Unable to create issues: there are '
'no events associated with this group',
}, status=400)
Event.objects.bind_nodes([event], 'data')
try:
fields = self.get_link_existing_issue_fields(request, group, event, **kwargs)
except Exception as e:
return self.handle_api_error(e)
if request.method == 'GET':
return Response(fields)
errors = self.validate_form(fields, request.DATA)
if errors:
return Response({'error_type': 'validation', 'errors': errors}, status=400)
try:
issue = self.link_issue(
group=group,
form_data=request.DATA,
request=request,
) or {}
except Exception as e:
return self.handle_api_error(e)
# HACK(dcramer): maintain data for legacy issues
if 'id' not in issue and 'issue_id' in request.DATA:
issue['id'] = request.DATA['issue_id']
issue_field_map = self.get_issue_field_map()
for key, meta_name in six.iteritems(issue_field_map):
if key in issue:
GroupMeta.objects.set_value(group, meta_name, issue[key])
else:
GroupMeta.objects.unset_value(group, meta_name)
issue_information = {
'title': issue.get('title') or self._get_issue_label_compat(group, issue),
'provider': self.get_title(),
'location': self._get_issue_url_compat(group, issue),
'label': self._get_issue_label_compat(group, issue),
}
Activity.objects.create(
project=group.project,
group=group,
type=Activity.CREATE_ISSUE,
user=request.user,
data=issue_information,
)
return Response({'message': 'Successfully linked issue.',
'link': self._get_issue_url_compat(group, issue),
'label': self._get_issue_label_compat(group, issue),
'id': issue['id']})
def view_unlink(self, request, group, **kwargs):
auth_errors = self.check_config_and_auth(request, group)
if auth_errors:
return Response(auth_errors, status=400)
issue = self.build_issue(group)
if issue and 'unlink' in self.allowed_actions:
self.unlink_issue(request, group, issue)
return Response({'message': 'Successfully unlinked issue.'})
return Response({'message': 'No issues to unlink.'}, status=400)
def plugin_issues(self, request, group, plugin_issues, **kwargs):
if not self.is_configured(request=request, project=group.project):
return plugin_issues
item = {
'slug': self.slug,
'allowed_actions': self.allowed_actions,
'title': self.get_title()
}
issue = self.build_issue(group)
if issue:
item['issue'] = {
'issue_id': issue.get('id'),
'url': self._get_issue_url_compat(group, issue),
'label': self._get_issue_label_compat(group, issue),
}
item.update(PluginSerializer(group.project).serialize(self, None, request.user))
plugin_issues.append(item)
return plugin_issues
def get_config(self, *args, **kwargs):
# TODO(dcramer): update existing plugins to just use get_config
# TODO(dcramer): remove request kwarg after sentry-plugins has been
# updated
kwargs.setdefault('request', None)
return self.get_configure_plugin_fields(*args, **kwargs)
def check_config_and_auth(self, request, group):
has_auth_configured = self.has_auth_configured()
if not (has_auth_configured and self.is_configured(
project=group.project, request=request)):
if self.auth_provider:
required_auth_settings = settings.AUTH_PROVIDERS[self.auth_provider]
else:
required_auth_settings = None
return {
'error_type': 'config',
'has_auth_configured': has_auth_configured,
'auth_provider': self.auth_provider,
'required_auth_settings': required_auth_settings,
}
if self.needs_auth(project=group.project, request=request):
return {
'error_type': 'auth',
'auth_url': reverse('socialauth_associate', args=[self.auth_provider])
}
# TODO: should we get rid of this (move it to react?)
def tags(self, request, group, tag_list, **kwargs):
if not self.is_configured(request=request, project=group.project):
return tag_list
issue = self.build_issue(group)
if not issue:
return tag_list
tag_list.append(
format_html(
'<a href="{}">{}</a>',
self._get_issue_url_compat(group, issue),
self._get_issue_label_compat(group, issue),
)
)
return tag_list
IssuePlugin2 = IssueTrackingPlugin2
|
"""
Given an issue context (issue_id string or issue dict) return a string representing the issue.
e.g. GitHub represents issues as GH-XXX
"""
if isinstance(issue, dict):
return u'#{}'.format(issue['id'])
return u'#{}'.format(issue)
|
npm.js
|
const child_process = require("child_process");
function
|
(args) {
return child_process.spawnSync("npm", args, {
cwd: process.cwd(),
stdio: ["inherit", "inherit", "inherit"]
});
}
module.exports = npm;
|
npm
|
persistent_volume_spec.rs
|
// Generated from definition io.k8s.api.core.v1.PersistentVolumeSpec
/// PersistentVolumeSpec is the specification of a persistent volume.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct PersistentVolumeSpec {
/// AccessModes contains all ways the volume can be mounted. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes
pub access_modes: Option<Vec<String>>,
/// AWSElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore
pub aws_elastic_block_store: Option<crate::api::core::v1::AWSElasticBlockStoreVolumeSource>,
/// AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.
pub azure_disk: Option<crate::api::core::v1::AzureDiskVolumeSource>,
/// AzureFile represents an Azure File Service mount on the host and bind mount to the pod.
pub azure_file: Option<crate::api::core::v1::AzureFilePersistentVolumeSource>,
/// A description of the persistent volume's resources and capacity. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#capacity
pub capacity: Option<std::collections::BTreeMap<String, crate::apimachinery::pkg::api::resource::Quantity>>,
/// CephFS represents a Ceph FS mount on the host that shares a pod's lifetime
pub cephfs: Option<crate::api::core::v1::CephFSPersistentVolumeSource>,
/// Cinder represents a cinder volume attached and mounted on kubelets host machine. More info: https://examples.k8s.io/mysql-cinder-pd/README.md
pub cinder: Option<crate::api::core::v1::CinderPersistentVolumeSource>,
/// ClaimRef is part of a bi-directional binding between PersistentVolume and PersistentVolumeClaim. Expected to be non-nil when bound. claim.VolumeName is the authoritative bind between PV and PVC. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#binding
pub claim_ref: Option<crate::api::core::v1::ObjectReference>,
/// CSI represents storage that is handled by an external CSI driver (Beta feature).
pub csi: Option<crate::api::core::v1::CSIPersistentVolumeSource>,
/// FC represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.
pub fc: Option<crate::api::core::v1::FCVolumeSource>,
/// FlexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin.
pub flex_volume: Option<crate::api::core::v1::FlexPersistentVolumeSource>,
/// Flocker represents a Flocker volume attached to a kubelet's host machine and exposed to the pod for its usage. This depends on the Flocker control service being running
pub flocker: Option<crate::api::core::v1::FlockerVolumeSource>,
/// GCEPersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. Provisioned by an admin. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk
pub gce_persistent_disk: Option<crate::api::core::v1::GCEPersistentDiskVolumeSource>,
/// Glusterfs represents a Glusterfs volume that is attached to a host and exposed to the pod. Provisioned by an admin. More info: https://examples.k8s.io/volumes/glusterfs/README.md
pub glusterfs: Option<crate::api::core::v1::GlusterfsPersistentVolumeSource>,
/// HostPath represents a directory on the host. Provisioned by a developer or tester. This is useful for single-node development and testing only! On-host storage is not supported in any way and WILL NOT WORK in a multi-node cluster. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath
pub host_path: Option<crate::api::core::v1::HostPathVolumeSource>,
/// ISCSI represents an ISCSI Disk resource that is attached to a kubelet's host machine and then exposed to the pod. Provisioned by an admin.
pub iscsi: Option<crate::api::core::v1::ISCSIPersistentVolumeSource>,
/// Local represents directly-attached storage with node affinity
pub local: Option<crate::api::core::v1::LocalVolumeSource>,
/// A list of mount options, e.g. \["ro", "soft"\]. Not validated - mount will simply fail if one is invalid. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes/#mount-options
pub mount_options: Option<Vec<String>>,
/// NFS represents an NFS mount on the host. Provisioned by an admin. More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs
pub nfs: Option<crate::api::core::v1::NFSVolumeSource>,
/// NodeAffinity defines constraints that limit what nodes this volume can be accessed from. This field influences the scheduling of pods that use this volume.
pub node_affinity: Option<crate::api::core::v1::VolumeNodeAffinity>,
/// What happens to a persistent volume when released from its claim. Valid options are Retain (default for manually created PersistentVolumes), Delete (default for dynamically provisioned PersistentVolumes), and Recycle (deprecated). Recycle must be supported by the volume plugin underlying this PersistentVolume. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#reclaiming
pub persistent_volume_reclaim_policy: Option<String>,
/// PhotonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine
pub photon_persistent_disk: Option<crate::api::core::v1::PhotonPersistentDiskVolumeSource>,
/// PortworxVolume represents a portworx volume attached and mounted on kubelets host machine
pub portworx_volume: Option<crate::api::core::v1::PortworxVolumeSource>,
/// Quobyte represents a Quobyte mount on the host that shares a pod's lifetime
pub quobyte: Option<crate::api::core::v1::QuobyteVolumeSource>,
/// RBD represents a Rados Block Device mount on the host that shares a pod's lifetime. More info: https://examples.k8s.io/volumes/rbd/README.md
pub rbd: Option<crate::api::core::v1::RBDPersistentVolumeSource>,
/// ScaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.
pub scale_io: Option<crate::api::core::v1::ScaleIOPersistentVolumeSource>,
/// Name of StorageClass to which this persistent volume belongs. Empty value means that this volume does not belong to any StorageClass.
pub storage_class_name: Option<String>,
/// StorageOS represents a StorageOS volume that is attached to the kubelet's host machine and mounted into the pod More info: https://examples.k8s.io/volumes/storageos/README.md
pub storageos: Option<crate::api::core::v1::StorageOSPersistentVolumeSource>,
/// volumeMode defines if a volume is intended to be used with a formatted filesystem or to remain in raw block state. Value of Filesystem is implied when not included in spec.
pub volume_mode: Option<String>,
/// VsphereVolume represents a vSphere volume attached and mounted on kubelets host machine
pub vsphere_volume: Option<crate::api::core::v1::VsphereVirtualDiskVolumeSource>,
}
impl<'de> serde::Deserialize<'de> for PersistentVolumeSpec {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_access_modes,
Key_aws_elastic_block_store,
Key_azure_disk,
Key_azure_file,
Key_capacity,
Key_cephfs,
Key_cinder,
Key_claim_ref,
Key_csi,
Key_fc,
Key_flex_volume,
Key_flocker,
Key_gce_persistent_disk,
Key_glusterfs,
Key_host_path,
Key_iscsi,
Key_local,
Key_mount_options,
Key_nfs,
Key_node_affinity,
Key_persistent_volume_reclaim_policy,
Key_photon_persistent_disk,
Key_portworx_volume,
Key_quobyte,
Key_rbd,
Key_scale_io,
Key_storage_class_name,
Key_storageos,
Key_volume_mode,
Key_vsphere_volume,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"accessModes" => Field::Key_access_modes,
"awsElasticBlockStore" => Field::Key_aws_elastic_block_store,
"azureDisk" => Field::Key_azure_disk,
"azureFile" => Field::Key_azure_file,
"capacity" => Field::Key_capacity,
"cephfs" => Field::Key_cephfs,
"cinder" => Field::Key_cinder,
"claimRef" => Field::Key_claim_ref,
"csi" => Field::Key_csi,
"fc" => Field::Key_fc,
"flexVolume" => Field::Key_flex_volume,
"flocker" => Field::Key_flocker,
"gcePersistentDisk" => Field::Key_gce_persistent_disk,
"glusterfs" => Field::Key_glusterfs,
"hostPath" => Field::Key_host_path,
"iscsi" => Field::Key_iscsi,
"local" => Field::Key_local,
"mountOptions" => Field::Key_mount_options,
"nfs" => Field::Key_nfs,
"nodeAffinity" => Field::Key_node_affinity,
"persistentVolumeReclaimPolicy" => Field::Key_persistent_volume_reclaim_policy,
"photonPersistentDisk" => Field::Key_photon_persistent_disk,
"portworxVolume" => Field::Key_portworx_volume,
"quobyte" => Field::Key_quobyte,
"rbd" => Field::Key_rbd,
"scaleIO" => Field::Key_scale_io,
"storageClassName" => Field::Key_storage_class_name,
"storageos" => Field::Key_storageos,
"volumeMode" => Field::Key_volume_mode,
"vsphereVolume" => Field::Key_vsphere_volume,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = PersistentVolumeSpec;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("PersistentVolumeSpec")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_access_modes: Option<Vec<String>> = None;
let mut value_aws_elastic_block_store: Option<crate::api::core::v1::AWSElasticBlockStoreVolumeSource> = None;
let mut value_azure_disk: Option<crate::api::core::v1::AzureDiskVolumeSource> = None;
let mut value_azure_file: Option<crate::api::core::v1::AzureFilePersistentVolumeSource> = None;
let mut value_capacity: Option<std::collections::BTreeMap<String, crate::apimachinery::pkg::api::resource::Quantity>> = None;
let mut value_cephfs: Option<crate::api::core::v1::CephFSPersistentVolumeSource> = None;
let mut value_cinder: Option<crate::api::core::v1::CinderPersistentVolumeSource> = None;
let mut value_claim_ref: Option<crate::api::core::v1::ObjectReference> = None;
let mut value_csi: Option<crate::api::core::v1::CSIPersistentVolumeSource> = None;
|
let mut value_flex_volume: Option<crate::api::core::v1::FlexPersistentVolumeSource> = None;
let mut value_flocker: Option<crate::api::core::v1::FlockerVolumeSource> = None;
let mut value_gce_persistent_disk: Option<crate::api::core::v1::GCEPersistentDiskVolumeSource> = None;
let mut value_glusterfs: Option<crate::api::core::v1::GlusterfsPersistentVolumeSource> = None;
let mut value_host_path: Option<crate::api::core::v1::HostPathVolumeSource> = None;
let mut value_iscsi: Option<crate::api::core::v1::ISCSIPersistentVolumeSource> = None;
let mut value_local: Option<crate::api::core::v1::LocalVolumeSource> = None;
let mut value_mount_options: Option<Vec<String>> = None;
let mut value_nfs: Option<crate::api::core::v1::NFSVolumeSource> = None;
let mut value_node_affinity: Option<crate::api::core::v1::VolumeNodeAffinity> = None;
let mut value_persistent_volume_reclaim_policy: Option<String> = None;
let mut value_photon_persistent_disk: Option<crate::api::core::v1::PhotonPersistentDiskVolumeSource> = None;
let mut value_portworx_volume: Option<crate::api::core::v1::PortworxVolumeSource> = None;
let mut value_quobyte: Option<crate::api::core::v1::QuobyteVolumeSource> = None;
let mut value_rbd: Option<crate::api::core::v1::RBDPersistentVolumeSource> = None;
let mut value_scale_io: Option<crate::api::core::v1::ScaleIOPersistentVolumeSource> = None;
let mut value_storage_class_name: Option<String> = None;
let mut value_storageos: Option<crate::api::core::v1::StorageOSPersistentVolumeSource> = None;
let mut value_volume_mode: Option<String> = None;
let mut value_vsphere_volume: Option<crate::api::core::v1::VsphereVirtualDiskVolumeSource> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_access_modes => value_access_modes = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_aws_elastic_block_store => value_aws_elastic_block_store = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_azure_disk => value_azure_disk = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_azure_file => value_azure_file = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_capacity => value_capacity = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_cephfs => value_cephfs = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_cinder => value_cinder = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_claim_ref => value_claim_ref = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_csi => value_csi = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_fc => value_fc = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_flex_volume => value_flex_volume = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_flocker => value_flocker = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_gce_persistent_disk => value_gce_persistent_disk = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_glusterfs => value_glusterfs = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_host_path => value_host_path = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_iscsi => value_iscsi = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_local => value_local = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_mount_options => value_mount_options = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_nfs => value_nfs = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_node_affinity => value_node_affinity = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_persistent_volume_reclaim_policy => value_persistent_volume_reclaim_policy = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_photon_persistent_disk => value_photon_persistent_disk = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_portworx_volume => value_portworx_volume = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_quobyte => value_quobyte = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_rbd => value_rbd = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_scale_io => value_scale_io = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_storage_class_name => value_storage_class_name = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_storageos => value_storageos = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_volume_mode => value_volume_mode = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_vsphere_volume => value_vsphere_volume = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(PersistentVolumeSpec {
access_modes: value_access_modes,
aws_elastic_block_store: value_aws_elastic_block_store,
azure_disk: value_azure_disk,
azure_file: value_azure_file,
capacity: value_capacity,
cephfs: value_cephfs,
cinder: value_cinder,
claim_ref: value_claim_ref,
csi: value_csi,
fc: value_fc,
flex_volume: value_flex_volume,
flocker: value_flocker,
gce_persistent_disk: value_gce_persistent_disk,
glusterfs: value_glusterfs,
host_path: value_host_path,
iscsi: value_iscsi,
local: value_local,
mount_options: value_mount_options,
nfs: value_nfs,
node_affinity: value_node_affinity,
persistent_volume_reclaim_policy: value_persistent_volume_reclaim_policy,
photon_persistent_disk: value_photon_persistent_disk,
portworx_volume: value_portworx_volume,
quobyte: value_quobyte,
rbd: value_rbd,
scale_io: value_scale_io,
storage_class_name: value_storage_class_name,
storageos: value_storageos,
volume_mode: value_volume_mode,
vsphere_volume: value_vsphere_volume,
})
}
}
deserializer.deserialize_struct(
"PersistentVolumeSpec",
&[
"accessModes",
"awsElasticBlockStore",
"azureDisk",
"azureFile",
"capacity",
"cephfs",
"cinder",
"claimRef",
"csi",
"fc",
"flexVolume",
"flocker",
"gcePersistentDisk",
"glusterfs",
"hostPath",
"iscsi",
"local",
"mountOptions",
"nfs",
"nodeAffinity",
"persistentVolumeReclaimPolicy",
"photonPersistentDisk",
"portworxVolume",
"quobyte",
"rbd",
"scaleIO",
"storageClassName",
"storageos",
"volumeMode",
"vsphereVolume",
],
Visitor,
)
}
}
impl serde::Serialize for PersistentVolumeSpec {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"PersistentVolumeSpec",
self.access_modes.as_ref().map_or(0, |_| 1) +
self.aws_elastic_block_store.as_ref().map_or(0, |_| 1) +
self.azure_disk.as_ref().map_or(0, |_| 1) +
self.azure_file.as_ref().map_or(0, |_| 1) +
self.capacity.as_ref().map_or(0, |_| 1) +
self.cephfs.as_ref().map_or(0, |_| 1) +
self.cinder.as_ref().map_or(0, |_| 1) +
self.claim_ref.as_ref().map_or(0, |_| 1) +
self.csi.as_ref().map_or(0, |_| 1) +
self.fc.as_ref().map_or(0, |_| 1) +
self.flex_volume.as_ref().map_or(0, |_| 1) +
self.flocker.as_ref().map_or(0, |_| 1) +
self.gce_persistent_disk.as_ref().map_or(0, |_| 1) +
self.glusterfs.as_ref().map_or(0, |_| 1) +
self.host_path.as_ref().map_or(0, |_| 1) +
self.iscsi.as_ref().map_or(0, |_| 1) +
self.local.as_ref().map_or(0, |_| 1) +
self.mount_options.as_ref().map_or(0, |_| 1) +
self.nfs.as_ref().map_or(0, |_| 1) +
self.node_affinity.as_ref().map_or(0, |_| 1) +
self.persistent_volume_reclaim_policy.as_ref().map_or(0, |_| 1) +
self.photon_persistent_disk.as_ref().map_or(0, |_| 1) +
self.portworx_volume.as_ref().map_or(0, |_| 1) +
self.quobyte.as_ref().map_or(0, |_| 1) +
self.rbd.as_ref().map_or(0, |_| 1) +
self.scale_io.as_ref().map_or(0, |_| 1) +
self.storage_class_name.as_ref().map_or(0, |_| 1) +
self.storageos.as_ref().map_or(0, |_| 1) +
self.volume_mode.as_ref().map_or(0, |_| 1) +
self.vsphere_volume.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.access_modes {
serde::ser::SerializeStruct::serialize_field(&mut state, "accessModes", value)?;
}
if let Some(value) = &self.aws_elastic_block_store {
serde::ser::SerializeStruct::serialize_field(&mut state, "awsElasticBlockStore", value)?;
}
if let Some(value) = &self.azure_disk {
serde::ser::SerializeStruct::serialize_field(&mut state, "azureDisk", value)?;
}
if let Some(value) = &self.azure_file {
serde::ser::SerializeStruct::serialize_field(&mut state, "azureFile", value)?;
}
if let Some(value) = &self.capacity {
serde::ser::SerializeStruct::serialize_field(&mut state, "capacity", value)?;
}
if let Some(value) = &self.cephfs {
serde::ser::SerializeStruct::serialize_field(&mut state, "cephfs", value)?;
}
if let Some(value) = &self.cinder {
serde::ser::SerializeStruct::serialize_field(&mut state, "cinder", value)?;
}
if let Some(value) = &self.claim_ref {
serde::ser::SerializeStruct::serialize_field(&mut state, "claimRef", value)?;
}
if let Some(value) = &self.csi {
serde::ser::SerializeStruct::serialize_field(&mut state, "csi", value)?;
}
if let Some(value) = &self.fc {
serde::ser::SerializeStruct::serialize_field(&mut state, "fc", value)?;
}
if let Some(value) = &self.flex_volume {
serde::ser::SerializeStruct::serialize_field(&mut state, "flexVolume", value)?;
}
if let Some(value) = &self.flocker {
serde::ser::SerializeStruct::serialize_field(&mut state, "flocker", value)?;
}
if let Some(value) = &self.gce_persistent_disk {
serde::ser::SerializeStruct::serialize_field(&mut state, "gcePersistentDisk", value)?;
}
if let Some(value) = &self.glusterfs {
serde::ser::SerializeStruct::serialize_field(&mut state, "glusterfs", value)?;
}
if let Some(value) = &self.host_path {
serde::ser::SerializeStruct::serialize_field(&mut state, "hostPath", value)?;
}
if let Some(value) = &self.iscsi {
serde::ser::SerializeStruct::serialize_field(&mut state, "iscsi", value)?;
}
if let Some(value) = &self.local {
serde::ser::SerializeStruct::serialize_field(&mut state, "local", value)?;
}
if let Some(value) = &self.mount_options {
serde::ser::SerializeStruct::serialize_field(&mut state, "mountOptions", value)?;
}
if let Some(value) = &self.nfs {
serde::ser::SerializeStruct::serialize_field(&mut state, "nfs", value)?;
}
if let Some(value) = &self.node_affinity {
serde::ser::SerializeStruct::serialize_field(&mut state, "nodeAffinity", value)?;
}
if let Some(value) = &self.persistent_volume_reclaim_policy {
serde::ser::SerializeStruct::serialize_field(&mut state, "persistentVolumeReclaimPolicy", value)?;
}
if let Some(value) = &self.photon_persistent_disk {
serde::ser::SerializeStruct::serialize_field(&mut state, "photonPersistentDisk", value)?;
}
if let Some(value) = &self.portworx_volume {
serde::ser::SerializeStruct::serialize_field(&mut state, "portworxVolume", value)?;
}
if let Some(value) = &self.quobyte {
serde::ser::SerializeStruct::serialize_field(&mut state, "quobyte", value)?;
}
if let Some(value) = &self.rbd {
serde::ser::SerializeStruct::serialize_field(&mut state, "rbd", value)?;
}
if let Some(value) = &self.scale_io {
serde::ser::SerializeStruct::serialize_field(&mut state, "scaleIO", value)?;
}
if let Some(value) = &self.storage_class_name {
serde::ser::SerializeStruct::serialize_field(&mut state, "storageClassName", value)?;
}
if let Some(value) = &self.storageos {
serde::ser::SerializeStruct::serialize_field(&mut state, "storageos", value)?;
}
if let Some(value) = &self.volume_mode {
serde::ser::SerializeStruct::serialize_field(&mut state, "volumeMode", value)?;
}
if let Some(value) = &self.vsphere_volume {
serde::ser::SerializeStruct::serialize_field(&mut state, "vsphereVolume", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
|
let mut value_fc: Option<crate::api::core::v1::FCVolumeSource> = None;
|
start.ts
|
import { Command } from '../../deps.ts'
import { ManifestFile } from '../helpers/manifest_helper.ts'
import { execDenoRun } from '../utils/cmds.ts'
import { wrap } from '../lifecycles/hooks.ts'
export const action = async (command: Command, args: string[]) => {
const specifiedFile = args && args[0]
const manifest = new ManifestFile()
const isExists = manifest.exists
if (!isExists) {
throw new Error('No pkg.json file found. Run `dep init` to create.')
}
await manifest.load()
if (!manifest.data) {
|
}
else if (!specifiedFile && !manifest.data.main) {
throw new Error('Entry file is not provided.')
}
await execDenoRun(manifest.data, specifiedFile)
}
export const command = new Command('start')
.description('Start a deno program with automatically generated flags.')
.usage('[file]')
.action(wrap(action))
|
throw new Error('Malformed pkg.json file.')
|
api_service_spec.rs
|
// Generated from definition io.k8s.kube-aggregator.pkg.apis.apiregistration.v1.APIServiceSpec
/// APIServiceSpec contains information for locating and communicating with a server. Only https is supported, though you are able to disable certificate verification.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct APIServiceSpec {
/// CABundle is a PEM encoded CA bundle which will be used to validate an API server's serving certificate. If unspecified, system trust roots on the apiserver are used.
pub ca_bundle: Option<crate::ByteString>,
/// Group is the API group name this server hosts
pub group: Option<String>,
/// GroupPriorityMininum is the priority this group should have at least. Higher priority means that the group is preferred by clients over lower priority ones. Note that other versions of this group might specify even higher GroupPriorityMininum values such that the whole group gets a higher priority. The primary sort is based on GroupPriorityMinimum, ordered highest number to lowest (20 before 10). The secondary sort is based on the alphabetical comparison of the name of the object. (v1.bar before v1.foo) We'd recommend something like: *.k8s.io (except extensions) at 18000 and PaaSes (OpenShift, Deis) are recommended to be in the 2000s
pub group_priority_minimum: i32,
/// InsecureSkipTLSVerify disables TLS certificate verification when communicating with this server. This is strongly discouraged. You should use the CABundle instead.
pub insecure_skip_tls_verify: Option<bool>,
/// Service is a reference to the service for this API server. It must communicate on port 443 If the Service is nil, that means the handling for the API groupversion is handled locally on this server. The call will simply delegate to the normal handler chain to be fulfilled.
pub service: Option<crate::kube_aggregator::pkg::apis::apiregistration::v1::ServiceReference>,
/// Version is the API version this server hosts. For example, "v1"
pub version: Option<String>,
/// VersionPriority controls the ordering of this API version inside of its group. Must be greater than zero. The primary sort is based on VersionPriority, ordered highest to lowest (20 before 10). Since it's inside of a group, the number can be small, probably in the 10s. In case of equal version priorities, the version string will be used to compute the order inside a group. If the version string is "kube-like", it will sort above non "kube-like" version strings, which are ordered lexicographically. "Kube-like" versions start with a "v", then are followed by a number (the major version), then optionally the string "alpha" or "beta" and another number (the minor version). These are sorted first by GA \> beta \> alpha (where GA is a version with no suffix such as beta or alpha), and then by comparing major version, then minor version. An example sorted list of versions: v10, v2, v1, v11beta2, v10beta3, v3beta1, v12alpha1, v11alpha2, foo1, foo10.
pub version_priority: i32,
}
impl<'de> crate::serde::Deserialize<'de> for APIServiceSpec {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_ca_bundle,
Key_group,
Key_group_priority_minimum,
Key_insecure_skip_tls_verify,
Key_service,
Key_version,
Key_version_priority,
Other,
}
impl<'de> crate::serde::Deserialize<'de> for Field {
fn
|
<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error {
Ok(match v {
"caBundle" => Field::Key_ca_bundle,
"group" => Field::Key_group,
"groupPriorityMinimum" => Field::Key_group_priority_minimum,
"insecureSkipTLSVerify" => Field::Key_insecure_skip_tls_verify,
"service" => Field::Key_service,
"version" => Field::Key_version,
"versionPriority" => Field::Key_version_priority,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = APIServiceSpec;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("APIServiceSpec")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> {
let mut value_ca_bundle: Option<crate::ByteString> = None;
let mut value_group: Option<String> = None;
let mut value_group_priority_minimum: Option<i32> = None;
let mut value_insecure_skip_tls_verify: Option<bool> = None;
let mut value_service: Option<crate::kube_aggregator::pkg::apis::apiregistration::v1::ServiceReference> = None;
let mut value_version: Option<String> = None;
let mut value_version_priority: Option<i32> = None;
while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_ca_bundle => value_ca_bundle = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_group => value_group = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_group_priority_minimum => value_group_priority_minimum = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_insecure_skip_tls_verify => value_insecure_skip_tls_verify = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_service => value_service = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_version => value_version = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_version_priority => value_version_priority = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(APIServiceSpec {
ca_bundle: value_ca_bundle,
group: value_group,
group_priority_minimum: value_group_priority_minimum.unwrap_or_default(),
insecure_skip_tls_verify: value_insecure_skip_tls_verify,
service: value_service,
version: value_version,
version_priority: value_version_priority.unwrap_or_default(),
})
}
}
deserializer.deserialize_struct(
"APIServiceSpec",
&[
"caBundle",
"group",
"groupPriorityMinimum",
"insecureSkipTLSVerify",
"service",
"version",
"versionPriority",
],
Visitor,
)
}
}
impl crate::serde::Serialize for APIServiceSpec {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer {
let mut state = serializer.serialize_struct(
"APIServiceSpec",
2 +
self.ca_bundle.as_ref().map_or(0, |_| 1) +
self.group.as_ref().map_or(0, |_| 1) +
self.insecure_skip_tls_verify.as_ref().map_or(0, |_| 1) +
self.service.as_ref().map_or(0, |_| 1) +
self.version.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.ca_bundle {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "caBundle", value)?;
}
if let Some(value) = &self.group {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "group", value)?;
}
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "groupPriorityMinimum", &self.group_priority_minimum)?;
if let Some(value) = &self.insecure_skip_tls_verify {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "insecureSkipTLSVerify", value)?;
}
if let Some(value) = &self.service {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "service", value)?;
}
if let Some(value) = &self.version {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "version", value)?;
}
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "versionPriority", &self.version_priority)?;
crate::serde::ser::SerializeStruct::end(state)
}
}
#[cfg(feature = "schemars")]
impl crate::schemars::JsonSchema for APIServiceSpec {
fn schema_name() -> String {
"io.k8s.kube-aggregator.pkg.apis.apiregistration.v1.APIServiceSpec".to_owned()
}
fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema {
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("APIServiceSpec contains information for locating and communicating with a server. Only https is supported, though you are able to disable certificate verification.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))),
object: Some(Box::new(crate::schemars::schema::ObjectValidation {
properties: [
(
"caBundle".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("CABundle is a PEM encoded CA bundle which will be used to validate an API server's serving certificate. If unspecified, system trust roots on the apiserver are used.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
format: Some("byte".to_owned()),
..Default::default()
}),
),
(
"group".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Group is the API group name this server hosts".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
..Default::default()
}),
),
(
"groupPriorityMinimum".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("GroupPriorityMininum is the priority this group should have at least. Higher priority means that the group is preferred by clients over lower priority ones. Note that other versions of this group might specify even higher GroupPriorityMininum values such that the whole group gets a higher priority. The primary sort is based on GroupPriorityMinimum, ordered highest number to lowest (20 before 10). The secondary sort is based on the alphabetical comparison of the name of the object. (v1.bar before v1.foo) We'd recommend something like: *.k8s.io (except extensions) at 18000 and PaaSes (OpenShift, Deis) are recommended to be in the 2000s".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Integer))),
format: Some("int32".to_owned()),
..Default::default()
}),
),
(
"insecureSkipTLSVerify".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("InsecureSkipTLSVerify disables TLS certificate verification when communicating with this server. This is strongly discouraged. You should use the CABundle instead.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Boolean))),
..Default::default()
}),
),
(
"service".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::kube_aggregator::pkg::apis::apiregistration::v1::ServiceReference>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Service is a reference to the service for this API server. It must communicate on port 443 If the Service is nil, that means the handling for the API groupversion is handled locally on this server. The call will simply delegate to the normal handler chain to be fulfilled.".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
(
"version".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("Version is the API version this server hosts. For example, \"v1\"".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
..Default::default()
}),
),
(
"versionPriority".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("VersionPriority controls the ordering of this API version inside of its group. Must be greater than zero. The primary sort is based on VersionPriority, ordered highest to lowest (20 before 10). Since it's inside of a group, the number can be small, probably in the 10s. In case of equal version priorities, the version string will be used to compute the order inside a group. If the version string is \"kube-like\", it will sort above non \"kube-like\" version strings, which are ordered lexicographically. \"Kube-like\" versions start with a \"v\", then are followed by a number (the major version), then optionally the string \"alpha\" or \"beta\" and another number (the minor version). These are sorted first by GA > beta > alpha (where GA is a version with no suffix such as beta or alpha), and then by comparing major version, then minor version. An example sorted list of versions: v10, v2, v1, v11beta2, v10beta3, v3beta1, v12alpha1, v11alpha2, foo1, foo10.".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Integer))),
format: Some("int32".to_owned()),
..Default::default()
}),
),
].into(),
required: [
"groupPriorityMinimum".to_owned(),
"versionPriority".to_owned(),
].into(),
..Default::default()
})),
..Default::default()
})
}
}
|
deserialize
|
session_test.go
|
package scs
import (
"context"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/http/cookiejar"
"net/http/httptest"
"reflect"
"sort"
"strconv"
"strings"
"testing"
"time"
)
type testServer struct {
*httptest.Server
}
func newTestServer(t *testing.T, h http.Handler) *testServer {
ts := httptest.NewTLSServer(h)
jar, err := cookiejar.New(nil)
if err != nil {
t.Fatal(err)
}
ts.Client().Jar = jar
ts.Client().CheckRedirect = func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
}
return &testServer{ts}
}
func (ts *testServer) execute(t *testing.T, urlPath string) (http.Header, string) {
rs, err := ts.Client().Get(ts.URL + urlPath)
if err != nil {
t.Fatal(err)
}
defer rs.Body.Close()
body, err := ioutil.ReadAll(rs.Body)
if err != nil {
t.Fatal(err)
}
return rs.Header, string(body)
}
func extractTokenFromCookie(c string) string {
parts := strings.Split(c, ";")
return strings.SplitN(parts[0], "=", 2)[1]
}
func TestEnable(t *testing.T) {
t.Parallel()
sessionManager := New()
mux := http.NewServeMux()
mux.HandleFunc("/put", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.Put(r.Context(), "foo", "bar")
}))
mux.HandleFunc("/get", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
s := sessionManager.Get(r.Context(), "foo").(string)
w.Write([]byte(s))
}))
ts := newTestServer(t, sessionManager.LoadAndSave(mux))
defer ts.Close()
header, _ := ts.execute(t, "/put")
token1 := extractTokenFromCookie(header.Get("Set-Cookie"))
header, body := ts.execute(t, "/get")
if body != "bar" {
t.Errorf("want %q; got %q", "bar", body)
}
if header.Get("Set-Cookie") != "" {
t.Errorf("want %q; got %q", "", header.Get("Set-Cookie"))
}
header, _ = ts.execute(t, "/put")
token2 := extractTokenFromCookie(header.Get("Set-Cookie"))
if token1 != token2 {
t.Error("want tokens to be the same")
}
}
func TestLifetime(t *testing.T) {
t.Parallel()
sessionManager := New()
sessionManager.Lifetime = 500 * time.Millisecond
mux := http.NewServeMux()
mux.HandleFunc("/put", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.Put(r.Context(), "foo", "bar")
}))
mux.HandleFunc("/get", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
v := sessionManager.Get(r.Context(), "foo")
if v == nil {
http.Error(w, "foo does not exist in session", 500)
return
}
w.Write([]byte(v.(string)))
}))
ts := newTestServer(t, sessionManager.LoadAndSave(mux))
defer ts.Close()
ts.execute(t, "/put")
_, body := ts.execute(t, "/get")
if body != "bar" {
t.Errorf("want %q; got %q", "bar", body)
}
time.Sleep(time.Second)
_, body = ts.execute(t, "/get")
if body != "foo does not exist in session\n" {
t.Errorf("want %q; got %q", "foo does not exist in session\n", body)
}
}
func TestIdleTimeout(t *testing.T) {
t.Parallel()
sessionManager := New()
sessionManager.IdleTimeout = 200 * time.Millisecond
sessionManager.Lifetime = time.Second
mux := http.NewServeMux()
mux.HandleFunc("/put", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.Put(r.Context(), "foo", "bar")
}))
mux.HandleFunc("/get", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
v := sessionManager.Get(r.Context(), "foo")
if v == nil {
http.Error(w, "foo does not exist in session", 500)
return
}
w.Write([]byte(v.(string)))
}))
ts := newTestServer(t, sessionManager.LoadAndSave(mux))
defer ts.Close()
ts.execute(t, "/put")
time.Sleep(100 * time.Millisecond)
ts.execute(t, "/get")
time.Sleep(150 * time.Millisecond)
_, body := ts.execute(t, "/get")
if body != "bar" {
t.Errorf("want %q; got %q", "bar", body)
}
time.Sleep(200 * time.Millisecond)
_, body = ts.execute(t, "/get")
if body != "foo does not exist in session\n" {
t.Errorf("want %q; got %q", "foo does not exist in session\n", body)
}
}
func TestDestroy(t *testing.T)
|
func TestRenewToken(t *testing.T) {
t.Parallel()
sessionManager := New()
mux := http.NewServeMux()
mux.HandleFunc("/put", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.Put(r.Context(), "foo", "bar")
}))
mux.HandleFunc("/renew", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
err := sessionManager.RenewToken(r.Context())
if err != nil {
http.Error(w, err.Error(), 500)
return
}
}))
mux.HandleFunc("/get", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
v := sessionManager.Get(r.Context(), "foo")
if v == nil {
http.Error(w, "foo does not exist in session", 500)
return
}
w.Write([]byte(v.(string)))
}))
ts := newTestServer(t, sessionManager.LoadAndSave(mux))
defer ts.Close()
header, _ := ts.execute(t, "/put")
cookie := header.Get("Set-Cookie")
originalToken := extractTokenFromCookie(cookie)
header, _ = ts.execute(t, "/renew")
cookie = header.Get("Set-Cookie")
newToken := extractTokenFromCookie(cookie)
if newToken == originalToken {
t.Fatal("token has not changed")
}
_, body := ts.execute(t, "/get")
if body != "bar" {
t.Errorf("want %q; got %q", "bar", body)
}
}
func TestRememberMe(t *testing.T) {
t.Parallel()
sessionManager := New()
sessionManager.Cookie.Persist = false
mux := http.NewServeMux()
mux.HandleFunc("/put-normal", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.Put(r.Context(), "foo", "bar")
}))
mux.HandleFunc("/put-rememberMe-true", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.RememberMe(r.Context(), true)
sessionManager.Put(r.Context(), "foo", "bar")
}))
mux.HandleFunc("/put-rememberMe-false", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.RememberMe(r.Context(), false)
sessionManager.Put(r.Context(), "foo", "bar")
}))
ts := newTestServer(t, sessionManager.LoadAndSave(mux))
defer ts.Close()
header, _ := ts.execute(t, "/put-normal")
header.Get("Set-Cookie")
if strings.Contains(header.Get("Set-Cookie"), "Max-Age=") || strings.Contains(header.Get("Set-Cookie"), "Expires=") {
t.Errorf("want no Max-Age or Expires attributes; got %q", header.Get("Set-Cookie"))
}
header, _ = ts.execute(t, "/put-rememberMe-true")
header.Get("Set-Cookie")
if !strings.Contains(header.Get("Set-Cookie"), "Max-Age=") || !strings.Contains(header.Get("Set-Cookie"), "Expires=") {
t.Errorf("want Max-Age and Expires attributes; got %q", header.Get("Set-Cookie"))
}
header, _ = ts.execute(t, "/put-rememberMe-false")
header.Get("Set-Cookie")
if strings.Contains(header.Get("Set-Cookie"), "Max-Age=") || strings.Contains(header.Get("Set-Cookie"), "Expires=") {
t.Errorf("want no Max-Age or Expires attributes; got %q", header.Get("Set-Cookie"))
}
}
func TestIterate(t *testing.T) {
t.Parallel()
sessionManager := New()
mux := http.NewServeMux()
mux.HandleFunc("/put", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.Put(r.Context(), "foo", r.URL.Query().Get("foo"))
}))
for i := 0; i < 3; i++ {
ts := newTestServer(t, sessionManager.LoadAndSave(mux))
defer ts.Close()
ts.execute(t, "/put?foo="+strconv.Itoa(i))
}
results := []string{}
err := sessionManager.Iterate(context.Background(), func(ctx context.Context) error {
i := sessionManager.GetString(ctx, "foo")
results = append(results, i)
return nil
})
if err != nil {
t.Fatal(err)
}
sort.Strings(results)
if !reflect.DeepEqual(results, []string{"0", "1", "2"}) {
t.Fatalf("unexpected value: got %v", results)
}
err = sessionManager.Iterate(context.Background(), func(ctx context.Context) error {
return errors.New("expected error")
})
if err.Error() != "expected error" {
t.Fatal("didn't get expected error")
}
}
|
{
t.Parallel()
sessionManager := New()
mux := http.NewServeMux()
mux.HandleFunc("/put", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
sessionManager.Put(r.Context(), "foo", "bar")
}))
mux.HandleFunc("/destroy", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
err := sessionManager.Destroy(r.Context())
if err != nil {
http.Error(w, err.Error(), 500)
return
}
}))
mux.HandleFunc("/get", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
v := sessionManager.Get(r.Context(), "foo")
if v == nil {
http.Error(w, "foo does not exist in session", 500)
return
}
w.Write([]byte(v.(string)))
}))
ts := newTestServer(t, sessionManager.LoadAndSave(mux))
defer ts.Close()
ts.execute(t, "/put")
header, _ := ts.execute(t, "/destroy")
cookie := header.Get("Set-Cookie")
if strings.HasPrefix(cookie, fmt.Sprintf("%s=;", sessionManager.Cookie.Name)) == false {
t.Fatalf("got %q: expected prefix %q", cookie, fmt.Sprintf("%s=;", sessionManager.Cookie.Name))
}
if strings.Contains(cookie, "Expires=Thu, 01 Jan 1970 00:00:01 GMT") == false {
t.Fatalf("got %q: expected to contain %q", cookie, "Expires=Thu, 01 Jan 1970 00:00:01 GMT")
}
if strings.Contains(cookie, "Max-Age=0") == false {
t.Fatalf("got %q: expected to contain %q", cookie, "Max-Age=0")
}
_, body := ts.execute(t, "/get")
if body != "foo does not exist in session\n" {
t.Errorf("want %q; got %q", "foo does not exist in session\n", body)
}
}
|
dependencies.go
|
package install
import (
"fmt"
"github.com/thoas/go-funk"
"github.com/yourbasic/graph"
"github.com/kudobuilder/kudo/pkg/apis/kudo/v1beta1"
engtask "github.com/kudobuilder/kudo/pkg/engine/task"
"github.com/kudobuilder/kudo/pkg/kudoctl/clog"
"github.com/kudobuilder/kudo/pkg/kudoctl/packages"
pkgresolver "github.com/kudobuilder/kudo/pkg/kudoctl/packages/resolver"
)
// dependencyGraph is modeled after 'graph.Mutable' but allows to add vertices.
type dependencyGraph struct {
edges []map[int]struct{}
}
// AddVertex adds a new vertex to the dependency graph.
func (g *dependencyGraph) AddVertex() {
g.edges = append(g.edges, map[int]struct{}{})
}
// AddEdge adds an edge from vertex v to w to the dependency graph.
func (g *dependencyGraph) AddEdge(v, w int) {
g.edges[v][w] = struct{}{}
}
// Order returns the number of vertices of the dependency graph.
func (g *dependencyGraph) Order() int {
return len(g.edges)
}
func (g *dependencyGraph) Visit(v int, do func(w int, c int64) bool) bool {
for w := range g.edges[v] {
if do(w, 1) {
return true
}
}
return false
}
type Dependency struct {
packages.Resources
PackageName string
}
// ResolveDependencies resolved all dependencies of a package.
// Dependencies are resolved recursively.
// Cyclic dependencies are detected and result in an error.
func ResolveDependencies(root packages.Resources, resolver pkgresolver.Resolver) ([]Dependency, error) {
dependencies := []Dependency{
{Resources: root},
}
// Each vertex in 'g' matches an index in 'dependencies'.
g := dependencyGraph{
edges: []map[int]struct{}{{}},
}
if err := dependencyWalk(&dependencies, &g, root, 0, resolver); err != nil
|
// Remove 'root' from the list of dependencies.
return dependencies[1:], nil
}
func dependencyWalk(
dependencies *[]Dependency,
g *dependencyGraph,
parent packages.Resources,
parentIndex int,
resolver pkgresolver.Resolver) error {
//nolint:errcheck
childrenTasks := funk.Filter(parent.OperatorVersion.Spec.Tasks, func(task v1beta1.Task) bool {
return task.Kind == engtask.KudoOperatorTaskKind
}).([]v1beta1.Task)
for _, childTask := range childrenTasks {
childPkg, err := resolver.Resolve(
childTask.Spec.KudoOperatorTaskSpec.Package,
childTask.Spec.KudoOperatorTaskSpec.AppVersion,
childTask.Spec.KudoOperatorTaskSpec.OperatorVersion)
if err != nil {
return fmt.Errorf(
"failed to resolve package %s, dependency of package %s: %v", fullyQualifiedName(childTask.Spec.KudoOperatorTaskSpec), parent.OperatorVersion.FullyQualifiedName(), err)
}
childDependency := Dependency{
Resources: *childPkg.Resources,
PackageName: childTask.Spec.KudoOperatorTaskSpec.Package,
}
newPackage := false
childIndex := indexOf(dependencies, &childDependency)
if childIndex == -1 {
clog.V(2).Printf("Adding new dependency %s", childPkg.Resources.OperatorVersion.FullyQualifiedName())
newPackage = true
*dependencies = append(*dependencies, childDependency)
childIndex = len(*dependencies) - 1
// The number of vertices in 'g' has to match the number of packages we're tracking.
g.AddVertex()
}
// This is a directed graph. The edge represents a dependency of the parent package on the current package.
g.AddEdge(parentIndex, childIndex)
if !graph.Acyclic(g) {
return fmt.Errorf(
"cyclic package dependency found when adding package %s -> %s", parent.OperatorVersion.FullyQualifiedName(), childPkg.Resources.OperatorVersion.FullyQualifiedName())
}
// We only need to walk the dependencies if the package is new
if newPackage {
if err := dependencyWalk(dependencies, g, *childPkg.Resources, childIndex, resolver); err != nil {
return err
}
}
}
return nil
}
// indexOf method searches for the dependency in dependencies that has the same
// OperatorVersion/AppVersion (using EqualOperatorVersion method) and returns
// its index or -1 if not found.
func indexOf(dependencies *[]Dependency, dependency *Dependency) int {
for i, d := range *dependencies {
if d.OperatorVersion.EqualOperatorVersion(dependency.OperatorVersion) {
return i
}
}
return -1
}
func fullyQualifiedName(kt v1beta1.KudoOperatorTaskSpec) string {
return fmt.Sprintf("%s-%s", v1beta1.OperatorVersionName(kt.Package, kt.OperatorVersion), kt.AppVersion)
}
|
{
return nil, err
}
|
successComposite.spec.ts
|
import { OkProcess } from '../../../src/utils/helpers/successComposite'
import { Ok } from '../../../src/utils/plugins/statuscodes/ok'
import { HttpResponse } from '../../../src/presentation/protocols'
const makeOkStub = () => {
return new Ok()
|
const response = sut.ok('any values')
return response
}
describe('OkProcess', () => {
test('ensure ok return body and statuscode', () => {
const sut = makeSut()
expect(sut).toEqual({ body: 'any values', statuscode: 200 })
})
})
|
}
const makeSut = (): HttpResponse => {
const sut = new OkProcess(makeOkStub())
|
mini.view.js
|
var view=function(e){"use strict";function t(e){return(t="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function n(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function o(e,t){for(var n=0;n<t.length;n++){var o=t[n];o.enumerable=o.enumerable||!1,o.configurable=!0,"value"in o&&(o.writable=!0),Object.defineProperty(e,o.key,o)}}function r(e,t,n){return t&&o(e.prototype,t),n&&o(e,n),e}function c(e,t,n){for(;e.length;){var o=e[0];if(t.call(n,o,e))break}}function a(e,t,n){if(e)return n=n||e,Object.keys(e).every(function(o){var r=e[o];return!t.call(r,r,o,n)}),n}function i(e,t,n){if(e)if(e.hasOwnProperty("$index"))for(var o=e.$index;o<e.length;o++)t.call(n,e[o],o);else Object.keys(e).forEach(function(o){t.call(n,e[o],o)})}function s(e){return[].slice.call(e)}function l(e){return null==e||void 0==e||""==e}Object.assign(Array.prototype,{remove:function(e){var t=this.indexOf(e);return t>-1&&this.splice(t,1),this},replace:function(e,t){var n=this.indexOf(e);n>-1&&this.splice(n,1,t)},splices:function(e){this.splice.apply(this,e)},has:function(e){return this.indexOf(e)>-1},ones:function(e){this.has(e)||this.push(e)}});var u=/(@each|@when|\.when)\s*\((.*)\)\s*\{|\.when\s*\{|\{([^\{\}]*)\}|\}/g,d=/(@each|@when|\.when)\s*\((.*)\)\s*\{|\.when\s*\{/,h=/(@each)\s*\((.*)\)\s*\{/g,f=/(@when|\.when)\s*\((.*)\)\s*\{|\.when\s*\{/g,p=/\.when\s*\((.*)\)\s*\{|\.when\s*\{/g,v=/@when/g,m=/\{\s*@?([^\{\}]*)\}/,y=/\{([^\{\}]*)\}/g,w=/\{\s*@([^\{\}]*)\}/,g=/^\}$/,N=/(["'][^"']*["'])|(([_\$a-zA-Z]+\w?)((\.\w+)|(\[(.+)\]))*)/g,E=/^@(.*)/,b=new Map;function $(e,t,n){try{return L.$path=void 0,O(e=e.replace(m,"$1"),t,n)}catch(e){return void console.warn(e)}}function x(e,t,n){try{return L.$path=void 0,O(e="'".concat(e.replace(y,"'+($1)+'"),"'"),t,n)}catch(e){return void console.warn(e)}}function O(e,t,n){try{if(!n)return R(e,t);var o=Reflect.getPrototypeOf(n.filter);return Reflect.setPrototypeOf(o,t),R(e,n.filter)}catch(e){return void console.warn(e)}}function R(e,t){var n=b.get(e);return void 0==n&&b.set(e,n=e.replace(N,function(e){return e.match(/["']/)?e:"scope.".concat(e)})),new Function("scope","return ".concat(n,";"))(t)}function V(e,t,n,o){return{get:function(r,c){return t==c?Reflect.get(n,o):"".concat(t,"$")==c?Reflect.get(n,"".concat(o,"$")):"$target"==c?r:r.hasOwnProperty(c)?Reflect.get(r,c):Reflect.get(e,c)},set:function(r,c,a){return t==c?Reflect.set(n,o,a):r.hasOwnProperty(c)?Reflect.set(r,c,a):Reflect.set(e,c,a)}}}function C(e,t,n,o,r){function a(e,t){i(e.attributes,function(e){if(e){var n=function(e,t,n){return{node:e,clas:n,scope:t,children:[],childNodes:[]}}(e,t,e.cloneNode());if(":model"==n.clas.name)b(e,t);else if(new RegExp(y).test(e.nodeValue)){"value"==n.clas.name&&b(e,t);var o=e.nodeValue;e.nodeValue=x(o,t,r),N.attrExpress(e,t,n,o)}!function(e,t){e.name.replace(E,function(n){n=n.replace(E,"$1");var o=e.ownerElement,c=e.nodeValue.toString().match(/\(([^)]*)\)/);if(c){var a=e.nodeValue.toString().replace(c[0],""),i=$(a,r.action);o.on(n,i,t,c[1])}else{var s=$(e.nodeValue,r.action);o.on(n,s,t)}})}(e,t)}})}function u(e,t,n,o){var c;a(e,t),new RegExp(w).test(e.nodeValue)?(!function(e,t,n,o){var r=document.createComment("component");e.parentNode.replaceChild(r,e),n.scope=t,n.resolver="component",n.content=o,n.childNodes.push({node:r,content:n,children:[],childNodes:[]})}(e,t,n,o),P.component(n,r)):(c=new RegExp(y).exec(e.nodeValue))&&(e.nodeValue=$(c[1],t,r),N.express(e,t,n,c[1]))}function g(e){if(e)return new RegExp(p).test(e.clas.nodeValue)}var N={attrEach:function(e,t,n,o,c){n.resolver="each",n.content=o,n.scope=t,n.node=e,k(n,r,c)},each:function(e,t,n,o,c){n.resolver="each",n.content=o,n.scope=t,n.node=e,k(n,r,c)},when:function(e,t,n){var o=n.clas.nodeValue;new RegExp(f).exec(o)&&(n.resolver="when",n.scope=t,n.node=e,k(n,r,o))},express:function(e,t,n,o){n.resolver="express",n.scope=t,n.node=e,k(n,r,o)},attrExpress:function(e,t,n,o){n.resolver="express",n.scope=t,n.node=e,k(n,r,o)}};function b(e,t){var n=e.ownerElement;n._express=e.nodeValue.replace(m,"$1");var o="scope.".concat(n._express);(O[n.type]||O[n.localName]||O.other)(e,t,o)}var O={checkbox:function(e,t,n){try{var o=e.ownerElement;o.on("change",function(){var e=o.value.replace(/(\'|\")/g,"\\$1"),n=$(o._express,t);o.checked?n.ones(e):n.remove(e)},t);var r=$(o._express,t);Array.isArray(r)&&r.has(o.value)&&(o.checked=!0)}catch(e){console.error(e)}},radio:function(e,t,n){try{var o=e.ownerElement;o.on("change",function(){var e=o.value.replace(/(\'|\")/g,"\\$1"),r="".concat(n,"='").concat(e,"';");new Function("scope",r)(t)},t),$(o._express,t)==o.value&&(o.checked=!0),o.name=L.$path}catch(e){console.error(e)}},select:function(e,t,n){try{var o,r=e.ownerElement;r.on("change",o=function(){var e=r.value.replace(/(\'|\")/g,"\\$1"),o="".concat(n,"='").concat(e,"';");new Function("scope",o)(t)},t);var c=$(r._express,t);l(c)?o():r.value=c}catch(e){console.error(e)}},other:function(e,t,n){try{var o=e.ownerElement;o.on("change",function(){var e=o.value.replace(/(\'|\")/g,"\\$1"),r="".concat(n,"='").concat(e,"';");new Function("scope",r)(t)},t)}catch(e){console.error(e)}}};function R(e,t){return{node:e,clas:t.clas,children:t.children,scope:t.scope,childNodes:[]}}function C(e,t,n){var o=document.createComment("each:"+L.$path);return t.appendChild(o),{node:e,clas:n.clas,children:n.children,scope:n.scope,childNodes:[{node:o,clas:n.clas,scope:n.scope,children:[],childNodes:[]}]}}!function e(t,n,o,a){c(o,function(o,l){if(1==o.clas.nodeType)if(o.clas.hasAttribute("@each")){var p=(O=o.clas.getAttribute("@each").split(":")).shift().trim(),m=O.pop().trim(),y=O.shift(),w=$(m,n),E=C(null,t,o);a.childNodes.push(E),N.attrEach(null,n,E,a,m),i(w,function(r,c){var i=Object.create(n.$target);y&&(i[y.trim()]=c),i=new Proxy(i,V(n,p,w,c));var l=o.clas.cloneNode();l.removeAttribute("@each"),t.appendChild(l);var d=R(l,o);E.childNodes.push(d),e(l,i,s(o.children),d),u(l,i,d,a)})}else if(/(CODE|SCRIPT)/.test(o.clas.nodeName)){var b=o.clas.cloneNode(!0);t.appendChild(b);var x=R(b,o);a.childNodes.push(x)}else b=o.clas.cloneNode(),t.appendChild(b),x=R(b,o),a.childNodes.push(x),e(b,n,s(o.children),x),u(b,n,x,a);else if(h.test(o.clas.nodeValue)){var O;p=(O=o.clas.nodeValue.replace(h,"$2").split(":")).shift().trim(),m=O.pop().trim(),y=O.shift(),w=$(m,n),E=C(null,t,o),a.childNodes.push(E),N.each(null,n,E,a,m);var P=s(o.children);i(w,function(r,c){var a=Object.create(n.$target);y&&(a[y.trim()]=c),a=new Proxy(a,V(n,p,w,c));var i=R(null,o);E.childNodes.push(i),e(t,a,s(P),i)})}else{if(f.test(o.clas.nodeValue)){var j=$(o.clas.nodeValue.replace(f,"$2"),n,r);return(E=function(e,t,n,o,r){if(new RegExp(v).test(n.clas.nodeValue)){var c=document.createComment("when:"+L.$path);t.appendChild(c),o.childNodes.push(o={node:e,clas:n.clas,children:[],scope:n.scope,content:o,childNodes:[{node:c,clas:n.clas,scope:n.scope,children:[],childNodes:[]}]}),N.when(null,r,o)}return o}(null,t,o,a,n)).children.push(l.shift()),j?(N.when(null,n,E),c(l,function(e,t){if(!g(e))return!0;E.children.push(t.shift())}),c(s(o.children),function(o,r){if(1==o.clas.nodeType||d.test(o.clas.nodeValue))e(t,n,r,E);else{var c=o.clas.cloneNode();t.appendChild(c);var a=R(c,o);E.childNodes.push(a),u(c,n,a,E)}r.shift()})):void 0==j?(N.when(null,n,E),c(s(o.children),function(o,r){if(1==o.clas.nodeType||d.test(o.clas.nodeValue))e(t,n,r,E);else{var c=o.clas.cloneNode();t.appendChild(c);var a=R(c,o);E.childNodes.push(a),u(c,n,a,E)}r.shift()})):g(l[0])&&e(t,n,l,E),g(o)}b=o.clas.cloneNode(),t.appendChild(b),x=R(b,o),a.childNodes.push(x),u(b,n,x,a)}l.shift()})}(e,t,n,o)}var P={view:function(e,t,n,o,r){try{var c=document.createDocumentFragment();new C(c,n,s(t.children),o,r),o.children=t.children,o.clas=t.clas,e.reappend(c)}catch(e){console.error(e)}},component:function(e,t){try{var n=$(e.clas.nodeValue,e.scope,t);if(n.model=n.model.$target||n.model,l(n))return;Reflect.setPrototypeOf(n.model,e.scope);var o=_(e.childNodes),r=e.content.childNodes;S(e.childNodes);var c=new D({view:n.view,model:n.model,action:n.action});n.model=c.model;var a=function(e,t,n){var o=document.createComment("component:"+t.path);return e.before(o),n.content.node=n.view,{clas:t.clas,children:[n.node],scope:t.scope,resolver:t.resolver,content:t.content,childNodes:[{node:o,scope:t.scope,children:[],childNodes:[]},n.content]}}(o,e,c);k(a,t,e.clas.nodeValue),r.replace(e,a),o.parentNode&&o.parentNode.replaceChild(c.view,o)}catch(e){console.error(e)}},when:function(e,t){try{var n=_(e.childNodes),o=document.createDocumentFragment(),r=e.content.childNodes;S(e.childNodes),new C(o,e.scope,s(e.children),e.content,t),r.replace(e,r.pop()),n.parentNode&&n.parentNode.replaceChild(o,n)}catch(e){console.error(e)}},each:function(e,t){try{var n=_(e.childNodes),o=document.createDocumentFragment(),r=e.content.childNodes;S(e.childNodes),new C(o,e.scope,[e],e.content,t),r.replace(e,r.pop()),n.parentNode&&n.parentNode.replaceChild(o,n)}catch(e){console.error(e)}},arrayEach:function(e,t,n,o){try{var r=function e(t,n){try{return a(t,function(t){if(t.node&&t.node.parentNode)return n=t.node;if(t.childNodes.length){var o=t.childNodes[t.childNodes.length-1];if(o.node&&o.node.parentNode)return n=o.node;n=e([o])}}),n}catch(e){console.error(e)}}([e.childNodes[n]]),c=document.createDocumentFragment(),i={clas:e.clas,children:e.children,scope:e.scope},l={childNodes:[],children:[]};new C(c,e.scope,[i],l,t),c.removeChild(c.childNodes[0]);var u=s(l.childNodes[0].childNodes);u.splice(0,1,n+1,0),e.childNodes.splices(u),o.remove(l.childNodes[0]),r.parentNode&&r.after(c)}catch(e){console.error(e)}},express:function(e,t){try{e.node.nodeValue=x(e.clas.nodeValue,e.scope,t),k(e,t,e.clas.nodeValue),"value"==e.node.name&&(e.node.ownerElement.value=e.node.nodeValue)}catch(e){console.error(e)}},attribute:function(e,t){try{var n=document.createAttribute(x(e.clas.name,scope,t));k(e,t,e.clas.name),n.nodeValue=e.clas.nodeValue,e.node.ownerElement.setAttributeNode(n),e.node.ownerElement.removeAttributeNode(e.node)}catch(e){console.error(e)}}},j=function(e,t,n){e.forEach(function(o,r){o.forEach(function(c){try{A[c.resolver]?A[c.resolver](c,r,o,t,n):P[c.resolver](c,r,e)}catch(e){console.error(e)}})})},A={each:function(e,t,n,o,r){try{if(r>0)P.arrayEach(e,t,o,n);else S(e.childNodes.splice(o+1))}catch(e){console.error(e)}}};function k(e,t,n){n.replace(N,function(n){if(!n.match(/["']/)){var o=new Function("scope","return scope.".concat(n,"$;"))(e.scope);if(void 0!=o){var r=o.get(t);r?r.ones(e):o.set(t,[e])}}})}function _(e,t){try{return a(e,function(e){if(e.node&&e.node.parentNode)return t=e.node,e.node=null,t;t=_(e.childNodes)}),t}catch(e){console.error(e)}}function S(e){e.forEach(function(e){if(e.node&&e.node.parentNode)return e.node.parentNode.removeChild(e.node);e.childNodes&&S(e.childNodes)})}function T(e,n){if("object"!=t(e))return e;return e=new Proxy(e,function n(o){var r=new Map,c=new Map;return{get:function(a,i,s){if("$target"==i)return a;if(new String(i).endsWith("$")){var l=c.get(i);return void 0!=l?l:Reflect.get(a,i)}if(!a.hasOwnProperty(i)&&Reflect.has(a,i))return Reflect.get(a,i);var u=o?"".concat(o,".").concat(i):i;M.publish(e,"get",[u]);var d=r.get(i);return void 0!=d?d:(function(e){if(e instanceof Component)return;if(e instanceof Date)return;if("object"==t(e))return e}(d=Reflect.get(a,i))&&(d=new Proxy(d,n(u))),r.set(i,d),c.set("".concat(i,"$"),new Map),function(e,t){if(!Array.isArray(e))return;var n={shift:function(){var e=Array.prototype.shift,n=e.apply(this,arguments),o=this.length;return j(t,o),n},pop:function(){var e=Array.prototype.pop,n=e.apply(this,arguments),o=this.length;return j(t,o),n},splice:function(){var e=Array.prototype.splice;if(this.length){var n=this.length,o=e.apply(this,arguments);return arguments.length>2?this.$index=n:n=this.length,j(t,n,arguments.length-2),Reflect.deleteProperty(this,"$index"),o}},unshift:function(){var e=Array.prototype.unshift;if(arguments.length){var n=this.$index=this.length,o=e.apply(this,arguments);return j(t,n,arguments.length),Reflect.deleteProperty(this,"$index"),o}},push:function(){var e=Array.prototype.push;if(arguments.length){var n=this.$index=this.length,o=e.apply(this,arguments);return j(t,n,arguments.length),Reflect.deleteProperty(this,"$index"),o}},reverse:function(){var e=Array.prototype.reverse,t=e.apply(this,arguments);return t},sort:function(){var e=Array.prototype.sort,t=e.apply(this,arguments);return t}};Reflect.setPrototypeOf(n,Array.prototype),Reflect.setPrototypeOf(e,n)}(d,c.get("".concat(i,"$"))),d)},set:function(n,o,a,i){if(!n.hasOwnProperty(o)&&Reflect.has(n,o))return Reflect.set(n,o,a);var s=r.get(o),l=c.get("".concat(o,"$"));r.delete(o),c.delete("".concat(o,"$")),Reflect.set(n,o,a.$target||a);var u=i[o];return function n(o,r){if(o instanceof Component)return;"object"==t(o)&&"object"==t(r)&&Object.keys(r).forEach(function(c){var a=o[c],i=o["".concat(c,"$")],s=r[c],l=r["".concat(c,"$")];"object"!=t(a)&&"object"!=t(s)&&M.publish(e,"set",[l,i]),n(a,s)})}(u,s),M.publish(e,"set",[l,c.get("".concat(o,"$"))]),!0}}}()),Object.keys(n).forEach(function(t){return M.subscribe(e,t,n[t])}),e}var M=new(function(){function e(){n(this,e),this.map=new Map}return r(e,[{key:"publish",value:function(e,t,n){var o=this.map.get(e);if(o){var r=o.get(t);r?r.data.push(n):o.set(t,{data:[n],queue:[]})}else{var c=new Map;c.set(t,{data:[c],queue:[]}),this.map.set(e,c)}this.notify(o.get(t),e)}},{key:"notify",value:function(e,t){if(e)for(var n=function(){var n=e.data.shift();e.queue.forEach(function(e){e.apply(t,n)})};e.data.length;)n();else this.map.forEach(function(e){e.forEach(function(e){for(var n=function(){var n=e.data.shift();e.queue.forEach(function(e){e.apply(t,n)})};e.data.length;)n()})})}},{key:"subscribe",value:function(e,t,n){var o=this.map.get(e);if(o){var r=o.get(t);r?r.queue.push(n):o.set(t,{data:[],queue:[n]})}else{var c=new Map;c.set(t,{data:[],queue:[n]}),this.map.set(e,c)}}}]),e}());function F(e){try{return document.querySelectorAll(e)}catch(n){var t=document.createElement("div");return t.innerHTML=e.trim(),t.childNodes}}function q(e,t,n){this.addEventListener?this.addEventListener(e,function(e){t.forEach(function(t,o){t.forEach(function(t){var r=t?$("[".concat(t,"]"),n):[];r.push(e);var c=Reflect.getPrototypeOf(o),a=Object.assign({},c);Reflect.setPrototypeOf(a,n||o.$model),o.apply(a,r)})})},!1):this.attachEvent?this.attachEvent("on"+e,function(e){t.forEach(function(t,o){t.forEach(function(t){var r=t?$("[".concat(t,"]"),n):[];r.push(e);var c=Reflect.getPrototypeOf(o),a=Object.assign({},c);Reflect.setPrototypeOf(a,n||o.$model),o.apply(a,r)})})}):element["on"+e]=function(e){t.forEach(function(t,o){t.forEach(function(t){var r=t?$("[".concat(t,"]"),n):[];r.push(e);var c=Reflect.getPrototypeOf(o),a=Object.assign({},c);Reflect.setPrototypeOf(a,n||o.$model),o.apply(a,r)})})}}Object.assign(Node.prototype,{on:function(e,t,n,o){if(this._manager)if(this._manager.get(e)){var r=this._manager.get(e);r.get(t)?r.get(t).ones(o):r.set(t,[o])}else{var c=new Map;c.set(t,[o]),this._manager.set(e,c),q.call(this,e,c,n)}else{var a=new Map;a.set(t,[o]),this._manager=new Map,this._manager.set(e,a),q.call(this,e,a,n)}return this},off:function(e,t){if(this._manager){var n=this._manager.get(e);if(void 0==n)return;if(n.delete(t),n.size)return;this._manager.delete(e),function(e,t){this.addEventListener?this.removeEventListener(e,t,!1):this.detachEvent?this.detachEvent("on"+e,t):element["on"+e]=null}.call(this,e,t)}return this},reappend:function(e){return a(s(this.childNodes),function(e){e.parentNode.removeChild(e)}),this.appendChild(e),this},before:function(e){this.parentNode.insertBefore(e,this)},after:function(e){this.nextSibling?this.parentNode.insertBefore(e,this.nextSibling):this.parentNode.appendChild(e)}}),Object.assign(NodeList.prototype,{on:function(e,t){return a(this,function(n){n.on(e,t)}),this},off:function(e,t){return a(this,function(n){n.off(e,t)}),this}});var L={$path:void 0},D=function(){function e(t){n(this,e),this.model=T(t.model,B),this.action=t.action,this.watch=t.watch,this.filter=t.filter,this.creater(t)}return r(e,[{key:"creater",value:function(e){this.content={childNodes:[],children:[]},this.view=F(e.view)[0];var t=function e(t,n){var o=n||[];return c(t,function(n){if(t.shift(),new RegExp(g).test(n.nodeValue))return!0;var r={clas:n.cloneNode(!0),children:[]};3==n.nodeType&&""==n.nodeValue.trim()||o.push(r),1==n.nodeType?e(s(n.childNodes),r.children):new RegExp(d).test(n.nodeValue)&&e(t,r.children)}),o}(function e(t){return a(t,function(t){t.childNodes[0]&&!/(CODE|SCRIPT)/.test(t.nodeName)&&e(s(t.childNodes)),3==t.nodeType&&t.nodeValue.replace(u,function(e){var n=t.nodeValue.split(e);t.parentNode.insertBefore(document.createTextNode(n[0]),t),t.parentNode.insertBefore(document.createTextNode(e.trim()),t),t.nodeValue=t.nodeValue.replace(n[0],"").replace(e,"")})}),t}([this.view]))[0];!function(e){var t={$view:e.view,$model:e.model,$action:e.action,$watch:e.watch};e.action=e.action||{},Reflect.setPrototypeOf(t,Function.prototype),Object.values(e.action).forEach(function(e){return Reflect.setPrototypeOf(e,t)});var n=Object.assign({},t);e.filter=e.filter||{},Reflect.setPrototypeOf(e.filter,n)}(this),P.view(this.view,t,this.model,this.content,this)}}]),e}(),B={set:function(e,t){!function(e,t){e&&t?e.forEach(function(e,n){s(e).forEach(function(o){!function e(t,n){try{return t.every(function(t){if(t.node){var o=t.node.ownerElement||t.node;return n=document.body.contains(o),!1}n=e(t.childNodes)}),n}catch(e){console.error(e)}}([o])?e.remove(o):P[o.resolver](o,n,t)})}):e&&!t&&e.forEach(function(e,t){S(e)})}(e,t)},get:function(e){L.$path=e}},I=function(){function e(t){n(this,e),this.model=t.model,this.action=t.action,this.watch=t.watch,this.filter=t.filter,this.creater(t)}return r(e,[{key:"creater",value:function(e){this.content={childNodes:[],children:[]};var t=F(e.view)[0];t.parentNode.removeChild(t),this.view=t.outerHTML}}]),e}();return window.query=F,window.Router=function(e,t){var n,o,r,c=/^:/,a=/^\/(.+)/;this.redreact=d;var i,s=!((i=window.navigator.userAgent).indexOf("compatible")>-1&&i.indexOf("MSIE")>-1||i.indexOf("Trident")>-1||i.indexOf("Edge")>-1)&&window.history&&"pushState"in window.history;function l(e){for(r=Object.keys(t);r.length;){n=r.shift(),o={};var c=n.replace(a,"$1");if(u(c=c.split("/"),e.split("/")))return{component:t[n].component,router:t[n].router,action:t[n].action,after:t[n].after,params:o,path:e}}}function u(e,t){for(;t.length;){var n=e.shift(),r=t.shift();if(r!=n){if(!c.test(n))return!1;n=n.replace(c,""),o[n]=r}}return!0}function d(e){var t=window.location.pathname;window.location.href=t+"#"+e}function h(t){var n=l(window.location.hash.replace(/^#\/?/,""));n?(n.action(n.params),e.model[n.router]=n.component,n.after&&n.after()):void 0!=t&&"load"!=t.type||d("")}window.addEventListener("load",h,h()),window.addEventListener(s?"popstate":"hashchange",h,!1)},window.View=D,window.Component=I,e.Component=I,e.View=D,e.global=L,e}({});
| ||
test_events.py
|
import asyncio
from unittest import IsolatedAsyncioTestCase
from aiodiskdb import exceptions
from aiodiskdb.aiodiskdb import AioDiskDB
from aiodiskdb.local_types import EventsHandlers
class TestEventsHandlerStrictTyping(IsolatedAsyncioTestCase):
def setUp(self) -> None:
self.sut = EventsHandlers()
self._dummy_ex = 0
async def _dummy(self, *a, **kw):
self._dummy_ex += 1
async def test(self):
with self.assertRaises(TypeError):
self.sut.on_start = lambda w: ''
self.sut.on_start = self._dummy
await self.sut.on_start()
self.sut.on_start = None
self.assertEqual(1, self._dummy_ex)
class AioDiskDBTestCase(IsolatedAsyncioTestCase):
_path = '/tmp/aiodiskdb_test'
def setUp(self, max_file_size=128, max_buffer_size=16, overwrite=True):
self.loop = asyncio.get_event_loop()
self._overwrite = True
self._max_file_size = max_file_size
self._max_buffer_size = max_buffer_size
self._setup_sut()
self.sut.destroy_db()
self._overwrite = overwrite
self._setup_sut()
def tearDown(self) -> None:
self.sut.destroy_db()
def _setup_sut(self):
self.sut = AioDiskDB(
self._path,
create_if_not_exists=True,
read_timeout=5,
max_file_size=self._max_file_size,
max_buffer_size=self._max_buffer_size,
overwrite=self._overwrite
)
def run_test_db(f):
async def _decorator(self, *a, **kw):
|
return _decorator
|
try:
self.loop.create_task(self.sut.run(), name='aiodiskdb_main_loop')
while not self.sut.running:
await asyncio.sleep(0.01)
return await f(self, *a, **kw)
finally:
try:
await self.sut.stop()
except exceptions.NotRunningException:
print('run_test_db requested to shutdown a not running database')
|
codemirror.min.js
|
}function Gn(e,t,r,n,i){if(n&&n.shared)return Un(e,t,r,n,i);if(e.cm&&!e.cm.curOp)return Mr(e.cm,Gn)(e,t,r,n,i);var o=new ss(e,i),l=Nl(t,r);if(n&&Oo(n,o,!1),l>0||0==l&&o.clearWhenEmpty!==!1)return o;if(o.replacedWith&&(o.collapsed=!0,o.widgetNode=zo("span",[o.replacedWith],"CodeMirror-widget"),n.handleMouseEvents||o.widgetNode.setAttribute("cm-ignore-events","true"),n.insertLeft&&(o.widgetNode.insertLeft=!0)),o.collapsed){if(ci(e,t.line,t,r,o)||t.line!=r.line&&ci(e,r.line,t,r,o))throw new Error("Inserting collapsed marker partially overlapping an existing one");Ml=!0}o.addToHistory&&ro(e,{from:t,to:r,origin:"markText"},e.sel,0/0);var s,a=t.line,u=e.cm;if(e.iter(a,r.line+1,function(e){u&&o.collapsed&&!u.options.lineWrapping&&fi(e)==u.display.maxLine&&(s=!0),o.collapsed&&a!=t.line&&_i(e,0),$n(e,new Xn(o,a==t.line?t.ch:null,a==r.line?r.ch:null)),++a}),o.collapsed&&e.iter(t.line,r.line+1,function(t){gi(e,t)&&_i(t,0)}),o.clearOnEnter&&ws(o,"beforeCursorEnter",function(){o.clear()}),o.readOnly&&(Tl=!0,(e.history.done.length||e.history.undone.length)&&e.clearHistory()),o.collapsed&&(o.id=++ls,o.atomic=!0),u){if(s&&(u.curOp.updateMaxLine=!0),o.collapsed)Dr(u,t.line,r.line+1);else if(o.className||o.title||o.startStyle||o.endStyle||o.css)for(var c=t.line;c<=r.line;c++)Hr(u,c,"text");o.atomic&&At(u.doc),mo(u,"markerAdded",u,o)}return o}function Un(e,t,r,n,i){n=Oo(n),n.shared=!1;var o=[Gn(e,t,r,n,i)],l=o[0],s=n.widgetNode;return Ui(e,function(e){s&&(n.widgetNode=s.cloneNode(!0)),o.push(Gn(e,pt(e,t),pt(e,r),n,i));for(var a=0;a<e.linked.length;++a)if(e.linked[a].isParent)return;l=To(o)}),new as(o,l)}function Vn(e){return e.findMarks(Al(e.first,0),e.clipPos(Al(e.lastLine())),function(e){return e.parent})}function Kn(e,t){for(var r=0;r<t.length;r++){var n=t[r],i=n.find(),o=e.clipPos(i.from),l=e.clipPos(i.to);if(Nl(o,l)){var s=Gn(e,o,l,n.primary,n.primary.type);n.markers.push(s),s.parent=n}}}function jn(e){for(var t=0;t<e.length;t++){var r=e[t],n=[r.primary.doc];Ui(r.primary.doc,function(e){n.push(e)});for(var i=0;i<r.markers.length;i++){var o=r.markers[i];-1==Mo(n,o.doc)&&(o.parent=null,r.markers.splice(i--,1))}}}function Xn(e,t,r){this.marker=e,this.from=t,this.to=r}function _n(e,t){if(e)for(var r=0;r<e.length;++r){var n=e[r];if(n.marker==t)return n}}function Yn(e,t){for(var r,n=0;n<e.length;++n)e[n]!=t&&(r||(r=[])).push(e[n]);return r}function $n(e,t){e.markedSpans=e.markedSpans?e.markedSpans.concat([t]):[t],t.marker.attachLine(e)}function qn(e,t,r){if(e)for(var n,i=0;i<e.length;++i){var o=e[i],l=o.marker,s=null==o.from||(l.inclusiveLeft?o.from<=t:o.from<t);if(s||o.from==t&&"bookmark"==l.type&&(!r||!o.marker.insertLeft)){var a=null==o.to||(l.inclusiveRight?o.to>=t:o.to>t);(n||(n=[])).push(new Xn(l,o.from,a?null:o.to))}}return n}function Zn(e,t,r){if(e)for(var n,i=0;i<e.length;++i){var o=e[i],l=o.marker,s=null==o.to||(l.inclusiveRight?o.to>=t:o.to>t);if(s||o.from==t&&"bookmark"==l.type&&(!r||o.marker.insertLeft)){var a=null==o.from||(l.inclusiveLeft?o.from<=t:o.from<t);(n||(n=[])).push(new Xn(l,a?null:o.from-t,null==o.to?null:o.to-t))}}return n}function Qn(e,t){if(t.full)return null;var r=vt(e,t.from.line)&&Ki(e,t.from.line).markedSpans,n=vt(e,t.to.line)&&Ki(e,t.to.line).markedSpans;if(!r&&!n)return null;var i=t.from.ch,o=t.to.ch,l=0==Nl(t.from,t.to),s=qn(r,i,l),a=Zn(n,o,l),u=1==t.text.length,c=To(t.text).length+(u?i:0);if(s)for(var f=0;f<s.length;++f){var h=s[f];if(null==h.to){var d=_n(a,h.marker);d?u&&(h.to=null==d.to?null:d.to+c):h.to=i}}if(a)for(var f=0;f<a.length;++f){var h=a[f];if(null!=h.to&&(h.to+=c),null==h.from){var d=_n(s,h.marker);d||(h.from=c,u&&(s||(s=[])).push(h))}else h.from+=c,u&&(s||(s=[])).push(h)}s&&(s=Jn(s)),a&&a!=s&&(a=Jn(a));var p=[s];if(!u){var g,v=t.text.length-2;if(v>0&&s)for(var f=0;f<s.length;++f)null==s[f].to&&(g||(g=[])).push(new Xn(s[f].marker,null,null));for(var f=0;v>f;++f)p.push(g);p.push(a)}return p}function Jn(e){for(var t=0;t<e.length;++t){var r=e[t];null!=r.from&&r.from==r.to&&r.marker.clearWhenEmpty!==!1&&e.splice(t--,1)}return e.length?e:null}function ei(e,t){var r=ao(e,t),n=Qn(e,t);if(!r)return n;if(!n)return r;for(var i=0;i<r.length;++i){var o=r[i],l=n[i];if(o&&l)e:for(var s=0;s<l.length;++s){for(var a=l[s],u=0;u<o.length;++u)if(o[u].marker==a.marker)continue e;o.push(a)}else l&&(r[i]=l)}return r}function ti(e,t,r){var n=null;if(e.iter(t.line,r.line+1,function(e){if(e.markedSpans)for(var t=0;t<e.markedSpans.length;++t){var r=e.markedSpans[t].marker;!r.readOnly||n&&-1!=Mo(n,r)||(n||(n=[])).push(r)}}),!n)return null;for(var i=[{from:t,to:r}],o=0;o<n.length;++o)for(var l=n[o],s=l.find(0),a=0;a<i.length;++a){var u=i[a];if(!(Nl(u.to,s.from)<0||Nl(u.from,s.to)>0)){var c=[a,1],f=Nl(u.from,s.from),h=Nl(u.to,s.to);(0>f||!l.inclusiveLeft&&!f)&&c.push({from:u.from,to:s.from}),(h>0||!l.inclusiveRight&&!h)&&c.push({from:s.to,to:u.to}),i.splice.apply(i,c),a+=c.length-1}}return i}function ri(e){var t=e.markedSpans;if(t){for(var r=0;r<t.length;++r)t[r].marker.detachLine(e);e.markedSpans=null}}function ni(e,t){if(t){for(var r=0;r<t.length;++r)t[r].marker.attachLine(e);e.markedSpans=t}}function ii(e){return e.inclusiveLeft?-1:0}function oi(e){return e.inclusiveRight?1:0}function li(e,t){var r=e.lines.length-t.lines.length;if(0!=r)return r;var n=e.find(),i=t.find(),o=Nl(n.from,i.from)||ii(e)-ii(t);if(o)return-o;var l=Nl(n.to,i.to)||oi(e)-oi(t);return l?l:t.id-e.id}function si(e,t){var r,n=Ml&&e.markedSpans;if(n)for(var i,o=0;o<n.length;++o)i=n[o],i.marker.collapsed&&null==(t?i.from:i.to)&&(!r||li(r,i.marker)<0)&&(r=i.marker);return r}function ai(e){return si(e,!0)}function ui(e){return si(e,!1)}function ci(e,t,r,n,i){var o=Ki(e,t),l=Ml&&o.markedSpans;if(l)for(var s=0;s<l.length;++s){var a=l[s];if(a.marker.collapsed){var u=a.marker.find(0),c=Nl(u.from,r)||ii(a.marker)-ii(i),f=Nl(u.to,n)||oi(a.marker)-oi(i);if(!(c>=0&&0>=f||0>=c&&f>=0)&&(0>=c&&(Nl(u.to,r)>0||a.marker.inclusiveRight&&i.inclusiveLeft)||c>=0&&(Nl(u.from,n)<0||a.marker.inclusiveLeft&&i.inclusiveRight)))return!0}}}function fi(e){for(var t;t=ai(e);)e=t.find(-1,!0).line;return e}function hi(e){for(var t,r;t=ui(e);)e=t.find(1,!0).line,(r||(r=[])).push(e);return r}function di(e,t){var r=Ki(e,t),n=fi(r);return r==n?t:Yi(n)}function pi(e,t){if(t>e.lastLine())return t;var r,n=Ki(e,t);if(!gi(e,n))return t;for(;r=ui(n);)n=r.find(1,!0).line;return Yi(n)+1}function gi(e,t){var r=Ml&&t.markedSpans;if(r)for(var n,i=0;i<r.length;++i)if(n=r[i],n.marker.collapsed){if(null==n.from)return!0;if(!n.marker.widgetNode&&0==n.from&&n.marker.inclusiveLeft&&vi(e,t,n))return!0}}function vi(e,t,r){if(null==r.to){var n=r.marker.find(1,!0);return vi(e,n.line,_n(n.line.markedSpans,r.marker))}if(r.marker.inclusiveRight&&r.to==t.text.length)return!0;for(var i,o=0;o<t.markedSpans.length;++o)if(i=t.markedSpans[o],i.marker.collapsed&&!i.marker.widgetNode&&i.from==r.to&&(null==i.to||i.to!=r.from)&&(i.marker.inclusiveLeft||r.marker.inclusiveRight)&&vi(e,t,i))return!0}function mi(e,t,r){qi(t)<(e.curOp&&e.curOp.scrollTop||e.doc.scrollTop)&&Wn(e,null,r)}function yi(e){if(null!=e.height)return e.height;if(!zs(document.body,e.node)){var t="position: relative;";e.coverGutter&&(t+="margin-left: -"+e.cm.display.gutters.offsetWidth+"px;"),e.noHScroll&&(t+="width: "+e.cm.display.wrapper.clientWidth+"px;"),Fo(e.cm.display.measure,zo("div",[e.node],null,t))}return e.height=e.node.offsetHeight}function bi(e,t,r,n){var i=new us(e,r,n);return i.noHScroll&&(e.display.alignWidgets=!0),Pn(e.doc,t,"widget",function(t){var r=t.widgets||(t.widgets=[]);if(null==i.insertAt?r.push(i):r.splice(Math.min(r.length-1,Math.max(0,i.insertAt)),0,i),i.line=t,!gi(e.doc,t)){var n=qi(t)<e.doc.scrollTop;_i(t,t.height+yi(i)),n&&Wn(e,null,i.height),e.curOp.forceUpdate=!0}return!0}),i}function wi(e,t,r,n){e.text=t,e.stateAfter&&(e.stateAfter=null),e.styles&&(e.styles=null),null!=e.order&&(e.order=null),ri(e),ni(e,r);var i=n?n(e):1;i!=e.height&&_i(e,i)}function xi(e){e.parent=null,ri(e)}function Ci(e,t){if(e)for(;;){var r=e.match(/(?:^|\s+)line-(background-)?(\S+)/);if(!r)break;e=e.slice(0,r.index)+e.slice(r.index+r[0].length);var n=r[1]?"bgClass":"textClass";null==t[n]?t[n]=r[2]:new RegExp("(?:^|s)"+r[2]+"(?:$|s)").test(t[n])||(t[n]+=" "+r[2])}return e}function Si(t,r){if(t.blankLine)return t.blankLine(r);if(t.innerMode){var n=e.innerMode(t,r);return n.mode.blankLine?n.mode.blankLine(n.state):void 0}}function Li(t,r,n,i){for(var o=0;10>o;o++){i&&(i[0]=e.innerMode(t,n).mode);var l=t.token(r,n);if(r.pos>r.start)return l}throw new Error("Mode "+t.name+" failed to advance stream.")}function ki(e,t,r,n){function i(e){return{start:f.start,end:f.pos,string:f.current(),type:o||null,state:e?Ql(l.mode,c):c}}var o,l=e.doc,s=l.mode;t=pt(l,t);var a,u=Ki(l,t.line),c=Rt(e,t.line,r),f=new os(u.text,e.options.tabSize);for(n&&(a=[]);(n||f.pos<t.ch)&&!f.eol();)f.start=f.pos,o=Li(s,f,c),n&&a.push(i(!0));return n?a:i()}function Ti(e,t,r,n,i,o,l){var s=r.flattenSpans;null==s&&(s=e.options.flattenSpans);var a,u=0,c=null,f=new os(t,e.options.tabSize),h=e.options.addModeClass&&[null];for(""==t&&Ci(Si(r,n),o);!f.eol();){if(f.pos>e.options.maxHighlightLength?(s=!1,l&&Ni(e,t,n,f.pos),f.pos=t.length,a=null):a=Ci(Li(r,f,n,h),o),h){var d=h[0].name;d&&(a="m-"+(a?d+" "+a:d))}if(!s||c!=a){for(;u<f.start;)u=Math.min(f.start,u+5e4),i(u,c);c=a}f.start=f.pos}for(;u<f.pos;){var p=Math.min(f.pos,u+5e4);i(p,c),u=p}}function Mi(e,t,r,n){var i=[e.state.modeGen],o={};Ti(e,t.text,e.doc.mode,r,function(e,t){i.push(e,t)},o,n);for(var l=0;l<e.state.overlays.length;++l){var s=e.state.overlays[l],a=1,u=0;Ti(e,t.text,s.mode,!0,function(e,t){for(var r=a;e>u;){var n=i[a];n>e&&i.splice(a,1,e,i[a+1],n),a+=2,u=Math.min(e,n)}if(t)if(s.opaque)i.splice(r,a-r,e,"cm-overlay "+t),a=r+2;else for(;a>r;r+=2){var o=i[r+1];i[r+1]=(o?o+" ":"")+"cm-overlay "+t}},o)}return{styles:i,classes:o.bgClass||o.textClass?o:null}}function Ai(e,t,r){if(!t.styles||t.styles[0]!=e.state.modeGen){var n=Mi(e,t,t.stateAfter=Rt(e,Yi(t)));t.styles=n.styles,n.classes?t.styleClasses=n.classes:t.styleClasses&&(t.styleClasses=null),r===e.doc.frontier&&e.doc.frontier++}return t.styles}function Ni(e,t,r,n){var i=e.doc.mode,o=new os(t,e.options.tabSize);for(o.start=o.pos=n||0,""==t&&Si(i,r);!o.eol()&&o.pos<=e.options.maxHighlightLength;)Li(i,o,r),o.start=o.pos}function Wi(e,t){if(!e||/^\s*$/.test(e))return null;var r=t.addModeClass?hs:fs;return r[e]||(r[e]=e.replace(/\S+/g,"cm-$&"))}function Oi(e,t){var r=zo("span",null,null,hl?"padding-right: .1px":null),n={pre:zo("pre",[r]),content:r,col:0,pos:0,cm:e,splitSpaces:(cl||hl)&&e.getOption("lineWrapping")};t.measure={};for(var i=0;i<=(t.rest?t.rest.length:0);i++){var o,l=i?t.rest[i-1]:t.line;n.pos=0,n.addToken=Hi,Xo(e.display.measure)&&(o=Zi(l))&&(n.addToken=Ii(n.addToken,o)),n.map=[];var s=t!=e.display.externalMeasured&&Yi(l);Ei(l,n,Ai(e,l,s)),l.styleClasses&&(l.styleClasses.bgClass&&(n.bgClass=Go(l.styleClasses.bgClass,n.bgClass||"")),l.styleClasses.textClass&&(n.textClass=Go(l.styleClasses.textClass,n.textClass||""))),0==n.map.length&&n.map.push(0,0,n.content.appendChild(jo(e.display.measure))),0==i?(t.measure.map=n.map,t.measure.cache={}):((t.measure.maps||(t.measure.maps=[])).push(n.map),(t.measure.caches||(t.measure.caches=[])).push({}))}return hl&&/\bcm-tab\b/.test(n.content.lastChild.className)&&(n.content.className="cm-tab-wrap-hack"),Cs(e,"renderLine",e,t.line,n.pre),n.pre.className&&(n.textClass=Go(n.pre.className,n.textClass||"")),n}function Di(e){var t=zo("span","•","cm-invalidchar");return t.title="\\u"+e.charCodeAt(0).toString(16),t.setAttribute("aria-label",t.title),t}function Hi(e,t,r,n,i,o,l){if(t){var s=e.splitSpaces?t.replace(/ {3,}/g,Pi):t,a=e.cm.state.specialChars,u=!1;if(a.test(t))for(var c=document.createDocumentFragment(),f=0;;){a.lastIndex=f;var h=a.exec(t),d=h?h.index-f:t.length-f;if(d){var p=document.createTextNode(s.slice(f,f+d));c.appendChild(cl&&9>fl?zo("span",[p]):p),e.map.push(e.pos,e.pos+d,p),e.col+=d,e.pos+=d}if(!h)break;if(f+=d+1," "==h[0]){var g=e.cm.options.tabSize,v=g-e.col%g,p=c.appendChild(zo("span",ko(v),"cm-tab"));p.setAttribute("role","presentation"),p.setAttribute("cm-text"," "),e.col+=v}else{var p=e.cm.options.specialCharPlaceholder(h[0]);p.setAttribute("cm-text",h[0]),c.appendChild(cl&&9>fl?zo("span",[p]):p),e.col+=1}e.map.push(e.pos,e.pos+1,p),e.pos++}else{e.col+=t.length;var c=document.createTextNode(s);e.map.push(e.pos,e.pos+t.length,c),cl&&9>fl&&(u=!0),e.pos+=t.length}if(r||n||i||u||l){var m=r||"";n&&(m+=n),i&&(m+=i);var y=zo("span",[c],m,l);return o&&(y.title=o),e.content.appendChild(y)}e.content.appendChild(c)}}function Pi(e){for(var t=" ",r=0;r<e.length-2;++r)t+=r%2?" ":" ";return t+=" "}function Ii(e,t){return function(r,n,i,o,l,s,a){i=i?i+" cm-force-border":"cm-force-border";for(var u=r.pos,c=u+n.length;;){for(var f=0;f<t.length;f++){var h=t[f];if(h.to>u&&h.from<=u)break}if(h.to>=c)return e(r,n,i,o,l,s,a);e(r,n.slice(0,h.to-u),i,o,null,s,a),o=null,n=n.slice(h.to-u),u=h.to}}}function zi(e,t,r,n){var i=!n&&r.widgetNode;i&&e.map.push(e.pos,e.pos+t,i),!n&&e.cm.display.input.needsContentAttribute&&(i||(i=e.content.appendChild(document.createElement("span"))),i.setAttribute("cm-marker",r.id)),i&&(e.cm.display.input.setUneditable(i),e.content.appendChild(i)),e.pos+=t}function Ei(e,t,r){var n=e.markedSpans,i=e.text,o=0;if(n)for(var l,s,a,u,c,f,h,d=i.length,p=0,g=1,v="",m=0;;){if(m==p){a=u=c=f=s="",h=null,m=1/0;for(var y=[],b=0;b<n.length;++b){var w=n[b],x=w.marker;w.from<=p&&(null==w.to||w.to>p)?(null!=w.to&&m>w.to&&(m=w.to,u=""),x.className&&(a+=" "+x.className),x.css&&(s=x.css),x.startStyle&&w.from==p&&(c+=" "+x.startStyle),x.endStyle&&w.to==m&&(u+=" "+x.endStyle),x.title&&!f&&(f=x.title),x.collapsed&&(!h||li(h.marker,x)<0)&&(h=w)):w.from>p&&m>w.from&&(m=w.from),"bookmark"==x.type&&w.from==p&&x.widgetNode&&y.push(x)}if(h&&(h.from||0)==p&&(zi(t,(null==h.to?d+1:h.to)-p,h.marker,null==h.from),null==h.to))return;if(!h&&y.length)for(var b=0;b<y.length;++b)zi(t,0,y[b])}if(p>=d)break;for(var C=Math.min(d,m);;){if(v){var S=p+v.length;if(!h){var L=S>C?v.slice(0,C-p):v;t.addToken(t,L,l?l+a:a,c,p+L.length==m?u:"",f,s)}if(S>=C){v=v.slice(C-p),p=C;break}p=S,c=""}v=i.slice(o,o=r[g++]),l=Wi(r[g++],t.cm.options)}}else for(var g=1;g<r.length;g+=2)t.addToken(t,i.slice(o,o=r[g]),Wi(r[g+1],t.cm.options))}function Fi(e,t){return 0==t.from.ch&&0==t.to.ch&&""==To(t.text)&&(!e.cm||e.cm.options.wholeLineUpdateBefore)}function Ri(e,t,r,n){function i(e){return r?r[e]:null}function o(e,r,i){wi(e,r,i,n),mo(e,"change",e,t)}function l(e,t){for(var r=e,o=[];t>r;++r)o.push(new cs(u[r],i(r),n));return o}var s=t.from,a=t.to,u=t.text,c=Ki(e,s.line),f=Ki(e,a.line),h=To(u),d=i(u.length-1),p=a.line-s.line;if(t.full)e.insert(0,l(0,u.length)),e.remove(u.length,e.size-u.length);else if(Fi(e,t)){var g=l(0,u.length-1);o(f,f.text,d),p&&e.remove(s.line,p),g.length&&e.insert(s.line,g)}else if(c==f)if(1==u.length)o(c,c.text.slice(0,s.ch)+h+c.text.slice(a.ch),d);else{var g=l(1,u.length-1);g.push(new cs(h+c.text.slice(a.ch),d,n)),o(c,c.text.slice(0,s.ch)+u[0],i(0)),e.insert(s.line+1,g)}else if(1==u.length)o(c,c.text.slice(0,s.ch)+u[0]+f.text.slice(a.ch),i(0)),e.remove(s.line+1,p);else{o(c,c.text.slice(0,s.ch)+u[0],i(0)),o(f,h+f.text.slice(a.ch),d);var g=l(1,u.length-1);p>1&&e.remove(s.line+1,p-1),e.insert(s.line+1,g)}mo(e,"change",e,t)}function Bi(e){this.lines=e,this.parent=null;for(var t=0,r=0;t<e.length;++t)e[t].parent=this,r+=e[t].height;this.height=r}function Gi(e){this.children=e;for(var t=0,r=0,n=0;n<e.length;++n){var i=e[n];t+=i.chunkSize(),r+=i.height,i.parent=this}this.size=t,this.height=r,this.parent=null}function Ui(e,t,r){function n(e,i,o){if(e.linked)for(var l=0;l<e.linked.length;++l){var s=e.linked[l];if(s.doc!=i){var a=o&&s.sharedHist;(!r||a)&&(t(s.doc,a),n(s.doc,e,a))}}}n(e,null,!0)}function Vi(e,t){if(t.cm)throw new Error("This document is already in use.");e.doc=t,t.cm=e,l(e),r(e),e.options.lineWrapping||h(e),e.options.mode=t.modeOption,Dr(e)}function Ki(e,t){if(t-=e.first,0>t||t>=e.size)throw new Error("There is no line "+(t+e.first)+" in the document.");for(var r=e;!r.lines;)for(var n=0;;++n){var i=r.children[n],o=i.chunkSize();if(o>t){r=i;break}t-=o}return r.lines[t]}function ji(e,t,r){var n=[],i=t.line;return e.iter(t.line,r.line+1,function(e){var o=e.text;i==r.line&&(o=o.slice(0,r.ch)),i==t.line&&(o=o.slice(t.ch)),n.push(o),++i}),n}function Xi(e,t,r){var n=[];return e.iter(t,r,function(e){n.push(e.text)}),n}function _i(e,t){var r=t-e.height;if(r)for(var n=e;n;n=n.parent)n.height+=r}function Yi(e){if(null==e.parent)return null;for(var t=e.parent,r=Mo(t.lines,e),n=t.parent;n;t=n,n=n.parent)for(var i=0;n.children[i]!=t;++i)r+=n.children[i].chunkSize();return r+t.first}function $i(e,t){var r=e.first;e:do{for(var n=0;n<e.children.length;++n){var i=e.children[n],o=i.height;if(o>t){e=i;continue e}t-=o,r+=i.chunkSize()}return r}while(!e.lines);for(var n=0;n<e.lines.length;++n){var l=e.lines[n],s=l.height;if(s>t)break;t-=s}return r+n}function qi(e){e=fi(e);for(var t=0,r=e.parent,n=0;n<r.lines.length;++n){var i=r.lines[n];if(i==e)break;t+=i.height}for(var o=r.parent;o;r=o,o=r.parent)for(var n=0;n<o.children.length;++n){var l=o.children[n];if(l==r)break;t+=l.height}return t}function Zi(e){var t=e.order;return null==t&&(t=e.order=$s(e.text)),t}function Qi(e){this.done=[],this.undone=[],this.undoDepth=1/0,this.lastModTime=this.lastSelTime=0,this.lastOp=this.lastSelOp=null,this.lastOrigin=this.lastSelOrigin=null,this.generation=this.maxGeneration=e||1}function Ji(e,t){var r={from:_(t.from),to:Vl(t),text:ji(e,t.from,t.to)};return lo(e,r,t.from.line,t.to.line+1),Ui(e,function(e){lo(e,r,t.from.line,t.to.line+1)},!0),r}function eo(e){for(;e.length;){var t=To(e);if(!t.ranges)break;e.pop()}}function to(e,t){return t?(eo(e.done),To(e.done)):e.done.length&&!To(e.done).ranges?To(e.done):e.done.length>1&&!e.done[e.done.length-2].ranges?(e.done.pop(),To(e.done)):void 0}function ro(e,t,r,n){var i=e.history;i.undone.length=0;var o,l=+new Date;if((i.lastOp==n||i.lastOrigin==t.origin&&t.origin&&("+"==t.origin.charAt(0)&&e.cm&&i.lastModTime>l-e.cm.options.historyEventDelay||"*"==t.origin.charAt(0)))&&(o=to(i,i.lastOp==n))){var s=To(o.changes);0==Nl(t.from,t.to)&&0==Nl(t.from,s.to)?s.to=Vl(t):o.changes.push(Ji(e,t))}else{var a=To(i.done);for(a&&a.ranges||oo(e.sel,i.done),o={changes:[Ji(e,t)],generation:i.generation},i.done.push(o);i.done.length>i.undoDepth;)i.done.shift(),i.done[0].ranges||i.done.shift()}i.done.push(r),i.generation=++i.maxGeneration,i.lastModTime=i.lastSelTime=l,i.lastOp=i.lastSelOp=n,i.lastOrigin=i.lastSelOrigin=t.origin,s||Cs(e,"historyAdded")}function no(e,t,r,n){var i=t.charAt(0);return"*"==i||"+"==i&&r.ranges.length==n.ranges.length&&r.somethingSelected()==n.somethingSelected()&&new Date-e.history.lastSelTime<=(e.cm?e.cm.options.historyEventDelay:500)}function io(e,t,r,n){var i=e.history,o=n&&n.origin;r==i.lastSelOp||o&&i.lastSelOrigin==o&&(i.lastModTime==i.lastSelTime&&i.lastOrigin==o||no(e,o,To(i.done),t))?i.done[i.done.length-1]=t:oo(t,i.done),i.lastSelTime=+new Date,i.lastSelOrigin=o,i.lastSelOp=r,n&&n.clearRedo!==!1&&eo(i.undone)}function oo(e,t){var r=To(t);r&&r.ranges&&r.equals(e)||t.push(e)}function lo(e,t,r,n){var i=t["spans_"+e.id],o=0;e.iter(Math.max(e.first,r),Math.min(e.first+e.size,n),function(r){r.markedSpans&&((i||(i=t["spans_"+e.id]={}))[o]=r.markedSpans),++o})}function so(e){if(!e)return null;for(var t,r=0;r<e.length;++r)e[r].marker.explicitlyCleared?t||(t=e.slice(0,r)):t&&t.push(e[r]);return t?t.length?t:null:e}function ao(e,t){var r=t["spans_"+e.id];if(!r)return null;for(var n=0,i=[];n<t.text.length;++n)i.push(so(r[n]));return i}function uo(e,t,r){for(var n=0,i=[];n<e.length;++n){var o=e[n];if(o.ranges)i.push(r?ut.prototype.deepCopy.call(o):o);else{var l=o.changes,s=[];i.push({changes:s});for(var a=0;a<l.length;++a){var u,c=l[a];if(s.push({from:c.from,to:c.to,text:c.text}),t)for(var f in c)(u=f.match(/^spans_(\d+)$/))&&Mo(t,Number(u[1]))>-1&&(To(s)[f]=c[f],delete c[f])}}}return i}function co(e,t,r,n){r<e.line?e.line+=n:t<e.line&&(e.line=t,e.ch=0)}function fo(e,t,r,n){for(var i=0;i<e.length;++i){var o=e[i],l=!0;if(o.ranges){o.copied||(o=e[i]=o.deepCopy(),o.copied=!0);for(var s=0;s<o.ranges.length;s++)co(o.ranges[s].anchor,t,r,n),co(o.ranges[s].head,t,r,n)}else{for(var s=0;s<o.changes.length;++s){var a=o.changes[s];if(r<a.from.line)a.from=Al(a.from.line+n,a.from.ch),a.to=Al(a.to.line+n,a.to.ch);else if(t<=a.to.line){l=!1;break}}l||(e.splice(0,i+1),i=0)}}}function ho(e,t){var r=t.from.line,n=t.to.line,i=t.text.length-(n-r)-1;fo(e.done,r,n,i),fo(e.undone,r,n,i)}function po(e){return null!=e.defaultPrevented?e.defaultPrevented:0==e.returnValue}function go(e){return e.target||e.srcElement}function vo(e){var t=e.which;return null==t&&(1&e.button?t=1:2&e.button?t=3:4&e.button&&(t=2)),xl&&e.ctrlKey&&1==t&&(t=3),t}function mo(e,t){function r(e){return function(){e.apply(null,o)}}var n=e._handlers&&e._handlers[t];if(n){var i,o=Array.prototype.slice.call(arguments,2);Il?i=Il.delayedCallbacks:Ss?i=Ss:(i=Ss=[],setTimeout(yo,0));for(var l=0;l<n.length;++l)i.push(r(n[l]))}}function yo(){var e=Ss;Ss=null;for(var t=0;t<e.length;++t)e[t]()}function bo(e,t,r){return"string"==typeof t&&(t={type:t,preventDefault:function(){this.defaultPrevented=!0}}),Cs(e,r||t.type,e,t),po(t)||t.codemirrorIgnore}function wo(e){var t=e._handlers&&e._handlers.cursorActivity;if(t)for(var r=e.curOp.cursorActivityHandlers||(e.curOp.cursorActivityHandlers=[]),n=0;n<t.length;++n)-1==Mo(r,t[n])&&r.push(t[n])}function xo(e,t){var r=e._handlers&&e._handlers[t];return r&&r.length>0}function Co(e){e.prototype.on=function(e,t){ws(this,e,t)},e.prototype.off=function(e,t){xs(this,e,t)}}function So(){this.id=null}function Lo(e,t,r){for(var n=0,i=0;;){var o=e.indexOf(" ",n);-1==o&&(o=e.length);var l=o-n;if(o==e.length||i+l>=t)return n+Math.min(l,t-i);if(i+=o-n,i+=r-i%r,n=o+1,i>=t)return n}}function ko(e){for(;Ws.length<=e;)Ws.push(To(Ws)+" ");return Ws[e]}function To(e){return e[e.length-1]}function Mo(e,t){for(var r=0;r<e.length;++r)if(e[r]==t)return r;return-1}function Ao(e,t){for(var r=[],n=0;n<e.length;n++)r[n]=t(e[n],n);return r}function No(){}function Wo(e,t){var r;return Object.create?r=Object.create(e):(No.prototype=e,r=new No),t&&Oo(t,r),r}function Oo(e,t,r){t||(t={});for(var n in e)!e.hasOwnProperty(n)||r===!1&&t.hasOwnProperty(n)||(t[n]=e[n]);return t}function Do(e){var t=Array.prototype.slice.call(arguments,1);return function(){return e.apply(null,t)}}function Ho(e,t){return t?t.source.indexOf("\\w")>-1&&Ps(e)?!0:t.test(e):Ps(e)}function Po(e){for(var t in e)if(e.hasOwnProperty(t)&&e[t])return!1;return!0}function Io(e){return e.charCodeAt(0)>=768&&Is.test(e)}function zo(e,t,r,n){var i=document.createElement(e);if(r&&(i.className=r),n&&(i.style.cssText=n),"string"==typeof t)i.appendChild(document.createTextNode(t));else if(t)for(var o=0;o<t.length;++o)i.appendChild(t[o]);return i}function Eo(e){for(var t=e.childNodes.length;t>0;--t)e.removeChild(e.firstChild);return e}function Fo(e,t){return Eo(e).appendChild(t)}function Ro(){return document.activeElement}function Bo(e){return new RegExp("(^|\\s)"+e+"(?:$|\\s)\\s*")}function Go(e,t){for(var r=e.split(" "),n=0;n<r.length;n++)r[n]&&!Bo(r[n]).test(t)&&(t+=" "+r[n]);return t}function Uo(e){if(document.body.getElementsByClassName)for(var t=document.body.getElementsByClassName("CodeMirror"),r=0;r<t.length;r++){var n=t[r].CodeMirror;n&&e(n)}}function Vo(){Gs||(Ko(),Gs=!0)}function Ko(){var e;ws(window,"resize",function(){null==e&&(e=setTimeout(function(){e=null,Uo(Br)},100))}),ws(window,"blur",function(){Uo(fn)})}function jo(e){if(null==Es){var t=zo("span","");Fo(e,zo("span",[t,document.createTextNode("x")])),0!=e.firstChild.offsetHeight&&(Es=t.offsetWidth<=1&&t.offsetHeight>2&&!(cl&&8>fl))}var r=Es?zo("span",""):zo("span"," ",null,"display: inline-block; width: 1px; margin-right: -1px");return r.setAttribute("cm-text",""),r}function Xo(e){if(null!=Fs)return Fs;var t=Fo(e,document.createTextNode("AخA")),r=Ds(t,0,1).getBoundingClientRect();if(!r||r.left==r.right)return!1;var n=Ds(t,1,2).getBoundingClientRect();return Fs=n.right-r.right<3}function _o(e){if(null!=Xs)return Xs;var t=Fo(e,zo("span","x")),r=t.getBoundingClientRect(),n=Ds(t,0,1).getBoundingClientRect();return Xs=Math.abs(r.left-n.left)>1}function Yo(e,t,r,n){if(!e)return n(t,r,"ltr");for(var i=!1,o=0;o<e.length;++o){var l=e[o];(l.from<r&&l.to>t||t==r&&l.to==t)&&(n(Math.max(l.from,t),Math.min(l.to,r),1==l.level?"rtl":"ltr"),i=!0)}i||n(t,r,"ltr")}function $o(e){return e.level%2?e.to:e.from}function qo(e){return e.level%2?e.from:e.to}function Zo(e){var t=Zi(e);return t?$o(t[0]):0}function Qo(e){var t=Zi(e);return t?qo(To(t)):e.text.length}function Jo(e,t){var r=Ki(e.doc,t),n=fi(r);n!=r&&(t=Yi(n));var i=Zi(n),o=i?i[0].level%2?Qo(n):Zo(n):0;return Al(t,o)}function el(e,t){for(var r,n=Ki(e.doc,t);r=ui(n);)n=r.find(1,!0).line,t=null;var i=Zi(n),o=i?i[0].level%2?Zo(n):Qo(n):n.text.length;return Al(null==t?Yi(n):t,o)}function tl(e,t){var r=Jo(e,t.line),n=Ki(e.doc,r.line),i=Zi(n);if(!i||0==i[0].level){var o=Math.max(0,n.text.search(/\S/)),l=t.line==r.line&&t.ch<=o&&t.ch;return Al(r.line,l?0:o)}return r}function rl(e,t,r){var n=e[0].level;return t==n?!0:r==n?!1:r>t}function nl(e,t){Ys=null;for(var r,n=0;n<e.length;++n){var i=e[n];if(i.from<t&&i.to>t)return n;if(i.from==t||i.to==t){if(null!=r)return rl(e,i.level,e[r].level)?(i.from!=i.to&&(Ys=r),n):(i.from!=i.to&&(Ys=n),r);r=n}}return r}function il(e,t,r,n){if(!n)return t+r;do t+=r;while(t>0&&Io(e.text.charAt(t)));return t}function ol(e,t,r,n){var i=Zi(e);if(!i)return ll(e,t,r,n);for(var o=nl(i,t),l=i[o],s=il(e,t,l.level%2?-r:r,n);;){if(s>l.from&&s<l.to)return s;if(s==l.from||s==l.to)return nl(i,s)==o?s:(l=i[o+=r],r>0==l.level%2?l.to:l.from);if(l=i[o+=r],!l)return null;s=r>0==l.level%2?il(e,l.to,-1,n):il(e,l.from,1,n)}}function ll(e,t,r,n){var i=t+r;if(n)for(;i>0&&Io(e.text.charAt(i));)i+=r;return 0>i||i>e.text.length?null:i}var sl=/gecko\/\d/i.test(navigator.userAgent),al=/MSIE \d/.test(navigator.userAgent),ul=/Trident\/(?:[7-9]|\d{2,})\..*rv:(\d+)/.exec(navigator.userAgent),cl=al||ul,fl=cl&&(al?document.documentMode||6:ul[1]),hl=/WebKit\//.test(navigator.userAgent),dl=hl&&/Qt\/\d+\.\d+/.test(navigator.userAgent),pl=/Chrome\//.test(navigator.userAgent),gl=/Opera\//.test(navigator.userAgent),vl=/Apple Computer/.test(navigator.vendor),ml=/Mac OS X 1\d\D([8-9]|\d\d)\D/.test(navigator.userAgent),yl=/PhantomJS/.test(navigator.userAgent),bl=/AppleWebKit/.test(navigator.userAgent)&&/Mobile\/\w+/.test(navigator.userAgent),wl=bl||/Android|webOS|BlackBerry|Opera Mini|Opera Mobi|IEMobile/i.test(navigator.userAgent),xl=bl||/Mac/.test(navigator.platform),Cl=/win/i.test(navigator.platform),Sl=gl&&navigator.userAgent.match(/Version\/(\d*\.\d*)/);Sl&&(Sl=Number(Sl[1])),Sl&&Sl>=15&&(gl=!1,hl=!0);var Ll=xl&&(dl||gl&&(null==Sl||12.11>Sl)),kl=sl||cl&&fl>=9,Tl=!1,Ml=!1;g.prototype=Oo({update:function(e){var t=e.scrollWidth>e.clientWidth+1,r=e.scrollHeight>e.clientHeight+1,n=e.nativeBarWidth;if(r){this.vert.style.display="block",this.vert.style.bottom=t?n+"px":"0";var i=e.viewHeight-(t?n:0);this.vert.firstChild.style.height=Math.max(0,e.scrollHeight-e.clientHeight+i)+"px"}else this.vert.style.display="",this.vert.firstChild.style.height="0";if(t){this.horiz.style.display="block",this.horiz.style.right=r?n+"px":"0",this.horiz.style.left=e.barLeft+"px";var o=e.viewWidth-e.barLeft-(r?n:0);this.horiz.firstChild.style.width=e.scrollWidth-e.clientWidth+o+"px"}else this.horiz.style.display="",this.horiz.firstChild.style.width="0";return!this.checkedOverlay&&e.clientHeight>0&&(0==n&&this.overlayHack(),this.checkedOverlay=!0),{right:r?n:0,bottom:t?n:0}},setScrollLeft:function(e){this.horiz.scrollLeft!=e&&(this.horiz.scrollLeft=e)},setScrollTop:function(e){this.vert.scrollTop!=e&&(this.vert.scrollTop=e)},overlayHack:function(){var e=xl&&!ml?"12px":"18px";this.horiz.style.minHeight=this.vert.style.minWidth=e;var t=this,r=function(e){go(e)!=t.vert&&go(e)!=t.horiz&&Mr(t.cm,Vr)(e)};ws(this.vert,"mousedown",r),ws(this.horiz,"mousedown",r)},clear:function(){var e=this.horiz.parentNode;e.removeChild(this.horiz),e.removeChild(this.vert)}},g.prototype),v.prototype=Oo({update:function(){return{bottom:0,right:0}},setScrollLeft:function(){},setScrollTop:function(){},clear:function(){}},v.prototype),e.scrollbarModel={"native":g,"null":v},k.prototype.signal=function(e,t){xo(e,t)&&this.events.push(arguments)},k.prototype.finish=function(){for(var e=0;e<this.events.length;e++)Cs.apply(null,this.events[e])};var Al=e.Pos=function(e,t){return this instanceof Al?(this.line=e,void(this.ch=t)):new Al(e,t)},Nl=e.cmpPos=function(e,t){return e.line-t.line||e.ch-t.ch},Wl=null;tt.prototype=Oo({init:function(e){function t(e){if(n.somethingSelected())Wl=n.getSelections(),r.inaccurateSelection&&(r.prevInput="",r.inaccurateSelection=!1,o.value=Wl.join("\n"),Os(o));else{var t=J(n);Wl=t.text,"cut"==e.type?n.setSelections(t.ranges,null,Ts):(r.prevInput="",o.value=t.text.join("\n"),Os(o))}"cut"==e.type&&(n.state.cutIncoming=!0)}var r=this,n=this.cm,i=this.wrapper=rt(),o=this.textarea=i.firstChild;e.wrapper.insertBefore(i,e.wrapper.firstChild),bl&&(o.style.width="0px"),ws(o,"input",function(){cl&&fl>=9&&r.hasSelection&&(r.hasSelection=null),r.poll()}),ws(o,"paste",function(){if(hl&&!n.state.fakedLastChar&&!(new Date-n.state.lastMiddleDown<200)){var e=o.selectionStart,t=o.selectionEnd;o.value+="$",o.selectionEnd=t,o.selectionStart=e,n.state.fakedLastChar=!0}n.state.pasteIncoming=!0,r.fastPoll()}),ws(o,"cut",t),ws(o,"copy",t),ws(e.scroller,"paste",function(t){Gr(e,t)||(n.state.pasteIncoming=!0,r.focus())}),ws(e.lineSpace,"selectstart",function(t){Gr(e,t)||ms(t)})},prepareSelection:function(){var e=this.cm,t=e.display,r=e.doc,n=Dt(e);if(e.options.moveInputWithCursor){var i=cr(e,r.sel.primary().head,"div"),o=t.wrapper.getBoundingClientRect(),l=t.lineDiv.getBoundingClientRect();n.teTop=Math.max(0,Math.min(t.wrapper.clientHeight-10,i.top+l.top-o.top)),n.teLeft=Math.max(0,Math.min(t.wrapper.clientWidth-10,i.left+l.left-o.left))}return n},showSelection:function(e){var t=this.cm,r=t.display;Fo(r.cursorDiv,e.cursors),Fo(r.selectionDiv,e.selection),null!=e.teTop&&(this.wrapper.style.top=e.teTop+"px",this.wrapper.style.left=e.teLeft+"px")},reset:function(e){if(!this.contextMenuPending){var t,r,n=this.cm,i=n.doc;if(n.somethingSelected()){this.prevInput="";var o=i.sel.primary();t=js&&(o.to().line-o.from().line>100||(r=n.getSelection()).length>1e3);var l=t?"-":r||n.getSelection();this.textarea.value=l,n.state.focused&&Os(this.textarea),cl&&fl>=9&&(this.hasSelection=l)}else e||(this.prevInput=this.textarea.value="",cl&&fl>=9&&(this.hasSelection=null));this.inaccurateSelection=t}},getField:function(){return this.textarea},supportsTouch:function(){return!1},focus:function(){if("nocursor"!=this.cm.options.readOnly&&(!wl||Ro()!=this.textarea))try{this.textarea.focus()}catch(e){}},blur:function(){this.textarea.blur()},resetPosition:function(){this.wrapper.style.top=this.wrapper.style.left=0},receivedFocus:function(){this.slowPoll()},slowPoll:function(){var e=this;e.pollingFast||e.polling.set(this.cm.options.pollInterval,function(){e.poll(),e.cm.state.focused&&e.slowPoll()})},fastPoll:function(){function e(){var n=r.poll();n||t?(r.pollingFast=!1,r.slowPoll()):(t=!0,r.polling.set(60,e))}var t=!1,r=this;r.pollingFast=!0,r.polling.set(20,e)},poll:function(){var e=this.cm,t=this.textarea,r=this.prevInput;if(!e.state.focused||Ks(t)&&!r||Z(e)||e.options.disableInput||e.state.keySeq)return!1;e.state.pasteIncoming&&e.state.fakedLastChar&&(t.value=t.value.substring(0,t.value.length-1),e.state.fakedLastChar=!1);var n=t.value;if(n==r&&!e.somethingSelected())return!1;if(cl&&fl>=9&&this.hasSelection===n||xl&&/[\uf700-\uf7ff]/.test(n))return e.display.input.reset(),!1;8203!=n.charCodeAt(0)||e.doc.sel!=e.display.selForContextMenu||r||(r="");for(var i=0,o=Math.min(r.length,n.length);o>i&&r.charCodeAt(i)==n.charCodeAt(i);)++i;var l=this;return Tr(e,function(){Q(e,n.slice(i),r.length-i),n.length>1e3||n.indexOf("\n")>-1?t.value=l.prevInput="":l.prevInput=n
}),!0},ensurePolled:function(){this.pollingFast&&this.poll()&&(this.pollingFast=!1)},onKeyPress:function(){cl&&fl>=9&&(this.hasSelection=null),this.fastPoll()},onContextMenu:function(e){function t(){if(null!=l.selectionStart){var e=i.somethingSelected(),t=l.value=""+(e?l.value:"");n.prevInput=e?"":"",l.selectionStart=1,l.selectionEnd=t.length,o.selForContextMenu=i.doc.sel}}function r(){if(n.contextMenuPending=!1,n.wrapper.style.position="relative",l.style.cssText=c,cl&&9>fl&&o.scrollbars.setScrollTop(o.scroller.scrollTop=a),null!=l.selectionStart){(!cl||cl&&9>fl)&&t();var e=0,r=function(){o.selForContextMenu==i.doc.sel&&0==l.selectionStart?Mr(i,es.selectAll)(i):e++<10?o.detectingSelectAll=setTimeout(r,500):o.input.reset()};o.detectingSelectAll=setTimeout(r,200)}}var n=this,i=n.cm,o=i.display,l=n.textarea,s=Ur(i,e),a=o.scroller.scrollTop;if(s&&!gl){var u=i.options.resetSelectionOnContextMenu;u&&-1==i.doc.sel.contains(s)&&Mr(i,kt)(i.doc,ht(s),Ts);var c=l.style.cssText;if(n.wrapper.style.position="absolute",l.style.cssText="position: fixed; width: 30px; height: 30px; top: "+(e.clientY-5)+"px; left: "+(e.clientX-5)+"px; z-index: 1000; background: "+(cl?"rgba(255, 255, 255, .05)":"transparent")+"; outline: none; border-width: 0; outline: none; overflow: hidden; opacity: .05; filter: alpha(opacity=5);",hl)var f=window.scrollY;if(o.input.focus(),hl&&window.scrollTo(null,f),o.input.reset(),i.somethingSelected()||(l.value=n.prevInput=" "),n.contextMenuPending=!0,o.selForContextMenu=i.doc.sel,clearTimeout(o.detectingSelectAll),cl&&fl>=9&&t(),kl){bs(e);var h=function(){xs(window,"mouseup",h),setTimeout(r,20)};ws(window,"mouseup",h)}else setTimeout(r,50)}},setUneditable:No,needsContentAttribute:!1},tt.prototype),nt.prototype=Oo({init:function(e){function t(e){if(n.somethingSelected())Wl=n.getSelections(),"cut"==e.type&&n.replaceSelection("",null,"cut");else{var t=J(n);Wl=t.text,"cut"==e.type&&n.operation(function(){n.setSelections(t.ranges,0,Ts),n.replaceSelection("",null,"cut")})}if(e.clipboardData&&!bl)e.preventDefault(),e.clipboardData.clearData(),e.clipboardData.setData("text/plain",Wl.join("\n"));else{var r=rt(),i=r.firstChild;n.display.lineSpace.insertBefore(r,n.display.lineSpace.firstChild),i.value=Wl.join("\n");var o=document.activeElement;Os(i),setTimeout(function(){n.display.lineSpace.removeChild(r),o.focus()},50)}}var r=this,n=r.cm,i=r.div=e.lineDiv;i.contentEditable="true",et(i),ws(i,"paste",function(e){var t=e.clipboardData&&e.clipboardData.getData("text/plain");t&&(e.preventDefault(),n.replaceSelection(t,null,"paste"))}),ws(i,"compositionstart",function(e){var t=e.data;if(r.composing={sel:n.doc.sel,data:t,startData:t},t){var i=n.doc.sel.primary(),o=n.getLine(i.head.line),l=o.indexOf(t,Math.max(0,i.head.ch-t.length));l>-1&&l<=i.head.ch&&(r.composing.sel=ht(Al(i.head.line,l),Al(i.head.line,l+t.length)))}}),ws(i,"compositionupdate",function(e){r.composing.data=e.data}),ws(i,"compositionend",function(e){var t=r.composing;t&&(e.data==t.startData||/\u200b/.test(e.data)||(t.data=e.data),setTimeout(function(){t.handled||r.applyComposition(t),r.composing==t&&(r.composing=null)},50))}),ws(i,"touchstart",function(){r.forceCompositionEnd()}),ws(i,"input",function(){r.composing||r.pollContent()||Tr(r.cm,function(){Dr(n)})}),ws(i,"copy",t),ws(i,"cut",t)},prepareSelection:function(){var e=Dt(this.cm,!1);return e.focus=this.cm.state.focused,e},showSelection:function(e){e&&this.cm.display.view.length&&(e.focus&&this.showPrimarySelection(),this.showMultipleSelections(e))},showPrimarySelection:function(){var e=window.getSelection(),t=this.cm.doc.sel.primary(),r=lt(this.cm,e.anchorNode,e.anchorOffset),n=lt(this.cm,e.focusNode,e.focusOffset);if(!r||r.bad||!n||n.bad||0!=Nl($(r,n),t.from())||0!=Nl(Y(r,n),t.to())){var i=it(this.cm,t.from()),o=it(this.cm,t.to());if(i||o){var l=this.cm.display.view,s=e.rangeCount&&e.getRangeAt(0);if(i){if(!o){var a=l[l.length-1].measure,u=a.maps?a.maps[a.maps.length-1]:a.map;o={node:u[u.length-1],offset:u[u.length-2]-u[u.length-3]}}}else i={node:l[0].measure.map[2],offset:0};try{var c=Ds(i.node,i.offset,o.offset,o.node)}catch(f){}c&&(e.removeAllRanges(),e.addRange(c),s&&null==e.anchorNode?e.addRange(s):sl&&this.startGracePeriod()),this.rememberSelection()}}},startGracePeriod:function(){var e=this;clearTimeout(this.gracePeriod),this.gracePeriod=setTimeout(function(){e.gracePeriod=!1,e.selectionChanged()&&e.cm.operation(function(){e.cm.curOp.selectionChanged=!0})},20)},showMultipleSelections:function(e){Fo(this.cm.display.cursorDiv,e.cursors),Fo(this.cm.display.selectionDiv,e.selection)},rememberSelection:function(){var e=window.getSelection();this.lastAnchorNode=e.anchorNode,this.lastAnchorOffset=e.anchorOffset,this.lastFocusNode=e.focusNode,this.lastFocusOffset=e.focusOffset},selectionInEditor:function(){var e=window.getSelection();if(!e.rangeCount)return!1;var t=e.getRangeAt(0).commonAncestorContainer;return zs(this.div,t)},focus:function(){"nocursor"!=this.cm.options.readOnly&&this.div.focus()},blur:function(){this.div.blur()},getField:function(){return this.div},supportsTouch:function(){return!0},receivedFocus:function(){function e(){t.cm.state.focused&&(t.pollSelection(),t.polling.set(t.cm.options.pollInterval,e))}var t=this;this.selectionInEditor()?this.pollSelection():Tr(this.cm,function(){t.cm.curOp.selectionChanged=!0}),this.polling.set(this.cm.options.pollInterval,e)},selectionChanged:function(){var e=window.getSelection();return e.anchorNode!=this.lastAnchorNode||e.anchorOffset!=this.lastAnchorOffset||e.focusNode!=this.lastFocusNode||e.focusOffset!=this.lastFocusOffset},pollSelection:function(){if(!this.composing&&!this.gracePeriod&&this.selectionChanged()){var e=window.getSelection(),t=this.cm;this.rememberSelection();var r=lt(t,e.anchorNode,e.anchorOffset),n=lt(t,e.focusNode,e.focusOffset);r&&n&&Tr(t,function(){kt(t.doc,ht(r,n),Ts),(r.bad||n.bad)&&(t.curOp.selectionChanged=!0)})}},pollContent:function(){var e=this.cm,t=e.display,r=e.doc.sel.primary(),n=r.from(),i=r.to();if(n.line<t.viewFrom||i.line>t.viewTo-1)return!1;var o;if(n.line==t.viewFrom||0==(o=Ir(e,n.line)))var l=Yi(t.view[0].line),s=t.view[0].node;else var l=Yi(t.view[o].line),s=t.view[o-1].node.nextSibling;var a=Ir(e,i.line);if(a==t.view.length-1)var u=t.viewTo-1,c=t.view[a].node;else var u=Yi(t.view[a+1].line)-1,c=t.view[a+1].node.previousSibling;for(var f=Vs(at(e,s,c,l,u)),h=ji(e.doc,Al(l,0),Al(u,Ki(e.doc,u).text.length));f.length>1&&h.length>1;)if(To(f)==To(h))f.pop(),h.pop(),u--;else{if(f[0]!=h[0])break;f.shift(),h.shift(),l++}for(var d=0,p=0,g=f[0],v=h[0],m=Math.min(g.length,v.length);m>d&&g.charCodeAt(d)==v.charCodeAt(d);)++d;for(var y=To(f),b=To(h),w=Math.min(y.length-(1==f.length?d:0),b.length-(1==h.length?d:0));w>p&&y.charCodeAt(y.length-p-1)==b.charCodeAt(b.length-p-1);)++p;f[f.length-1]=y.slice(0,y.length-p),f[0]=f[0].slice(d);var x=Al(l,d),C=Al(u,h.length?To(h).length-p:0);return f.length>1||f[0]||Nl(x,C)?(kn(e.doc,f,x,C,"+input"),!0):void 0},ensurePolled:function(){this.forceCompositionEnd()},reset:function(){this.forceCompositionEnd()},forceCompositionEnd:function(){this.composing&&!this.composing.handled&&(this.applyComposition(this.composing),this.composing.handled=!0,this.div.blur(),this.div.focus())},applyComposition:function(e){e.data&&e.data!=e.startData&&Mr(this.cm,Q)(this.cm,e.data,0,e.sel)},setUneditable:function(e){e.setAttribute("contenteditable","false")},onKeyPress:function(e){e.preventDefault(),Mr(this.cm,Q)(this.cm,String.fromCharCode(null==e.charCode?e.keyCode:e.charCode),0)},onContextMenu:No,resetPosition:No,needsContentAttribute:!0},nt.prototype),e.inputStyles={textarea:tt,contenteditable:nt},ut.prototype={primary:function(){return this.ranges[this.primIndex]},equals:function(e){if(e==this)return!0;if(e.primIndex!=this.primIndex||e.ranges.length!=this.ranges.length)return!1;for(var t=0;t<this.ranges.length;t++){var r=this.ranges[t],n=e.ranges[t];if(0!=Nl(r.anchor,n.anchor)||0!=Nl(r.head,n.head))return!1}return!0},deepCopy:function(){for(var e=[],t=0;t<this.ranges.length;t++)e[t]=new ct(_(this.ranges[t].anchor),_(this.ranges[t].head));return new ut(e,this.primIndex)},somethingSelected:function(){for(var e=0;e<this.ranges.length;e++)if(!this.ranges[e].empty())return!0;return!1},contains:function(e,t){t||(t=e);for(var r=0;r<this.ranges.length;r++){var n=this.ranges[r];if(Nl(t,n.from())>=0&&Nl(e,n.to())<=0)return r}return-1}},ct.prototype={from:function(){return $(this.anchor,this.head)},to:function(){return Y(this.anchor,this.head)},empty:function(){return this.head.line==this.anchor.line&&this.head.ch==this.anchor.ch}};var Ol,Dl,Hl,Pl={left:0,right:0,top:0,bottom:0},Il=null,zl=0,El=0,Fl=0,Rl=null;cl?Rl=-.53:sl?Rl=15:pl?Rl=-.7:vl&&(Rl=-1/3);var Bl=function(e){var t=e.wheelDeltaX,r=e.wheelDeltaY;return null==t&&e.detail&&e.axis==e.HORIZONTAL_AXIS&&(t=e.detail),null==r&&e.detail&&e.axis==e.VERTICAL_AXIS?r=e.detail:null==r&&(r=e.wheelDelta),{x:t,y:r}};e.wheelEventPixels=function(e){var t=Bl(e);return t.x*=Rl,t.y*=Rl,t};var Gl=new So,Ul=null,Vl=e.changeEnd=function(e){return e.text?Al(e.from.line+e.text.length-1,To(e.text).length+(1==e.text.length?e.from.ch:0)):e.to};e.prototype={constructor:e,focus:function(){window.focus(),this.display.input.focus()},setOption:function(e,t){var r=this.options,n=r[e];(r[e]!=t||"mode"==e)&&(r[e]=t,jl.hasOwnProperty(e)&&Mr(this,jl[e])(this,t,n))},getOption:function(e){return this.options[e]},getDoc:function(){return this.doc},addKeyMap:function(e,t){this.state.keyMaps[t?"push":"unshift"](Bn(e))},removeKeyMap:function(e){for(var t=this.state.keyMaps,r=0;r<t.length;++r)if(t[r]==e||t[r].name==e)return t.splice(r,1),!0},addOverlay:Ar(function(t,r){var n=t.token?t:e.getMode(this.options,t);if(n.startState)throw new Error("Overlays may not be stateful.");this.state.overlays.push({mode:n,modeSpec:t,opaque:r&&r.opaque}),this.state.modeGen++,Dr(this)}),removeOverlay:Ar(function(e){for(var t=this.state.overlays,r=0;r<t.length;++r){var n=t[r].modeSpec;if(n==e||"string"==typeof e&&n.name==e)return t.splice(r,1),this.state.modeGen++,void Dr(this)}}),indentLine:Ar(function(e,t,r){"string"!=typeof t&&"number"!=typeof t&&(t=null==t?this.options.smartIndent?"smart":"prev":t?"add":"subtract"),vt(this.doc,e)&&Hn(this,e,t,r)}),indentSelection:Ar(function(e){for(var t=this.doc.sel.ranges,r=-1,n=0;n<t.length;n++){var i=t[n];if(i.empty())i.head.line>r&&(Hn(this,i.head.line,e,!0),r=i.head.line,n==this.doc.sel.primIndex&&On(this));else{var o=i.from(),l=i.to(),s=Math.max(r,o.line);r=Math.min(this.lastLine(),l.line-(l.ch?0:1))+1;for(var a=s;r>a;++a)Hn(this,a,e);var u=this.doc.sel.ranges;0==o.ch&&t.length==u.length&&u[n].from().ch>0&&xt(this.doc,n,new ct(o,u[n].to()),Ts)}}}),getTokenAt:function(e,t){return ki(this,e,t)},getLineTokens:function(e,t){return ki(this,Al(e),t,!0)},getTokenTypeAt:function(e){e=pt(this.doc,e);var t,r=Ai(this,Ki(this.doc,e.line)),n=0,i=(r.length-1)/2,o=e.ch;if(0==o)t=r[2];else for(;;){var l=n+i>>1;if((l?r[2*l-1]:0)>=o)i=l;else{if(!(r[2*l+1]<o)){t=r[2*l+2];break}n=l+1}}var s=t?t.indexOf("cm-overlay "):-1;return 0>s?t:0==s?null:t.slice(0,s-1)},getModeAt:function(t){var r=this.doc.mode;return r.innerMode?e.innerMode(r,this.getTokenAt(t).state).mode:r},getHelper:function(e,t){return this.getHelpers(e,t)[0]},getHelpers:function(e,t){var r=[];if(!Zl.hasOwnProperty(t))return r;var n=Zl[t],i=this.getModeAt(e);if("string"==typeof i[t])n[i[t]]&&r.push(n[i[t]]);else if(i[t])for(var o=0;o<i[t].length;o++){var l=n[i[t][o]];l&&r.push(l)}else i.helperType&&n[i.helperType]?r.push(n[i.helperType]):n[i.name]&&r.push(n[i.name]);for(var o=0;o<n._global.length;o++){var s=n._global[o];s.pred(i,this)&&-1==Mo(r,s.val)&&r.push(s.val)}return r},getStateAfter:function(e,t){var r=this.doc;return e=dt(r,null==e?r.first+r.size-1:e),Rt(this,e+1,t)},cursorCoords:function(e,t){var r,n=this.doc.sel.primary();return r=null==e?n.head:"object"==typeof e?pt(this.doc,e):e?n.from():n.to(),cr(this,r,t||"page")},charCoords:function(e,t){return ur(this,pt(this.doc,e),t||"page")},coordsChar:function(e,t){return e=ar(this,e,t||"page"),dr(this,e.left,e.top)},lineAtHeight:function(e,t){return e=ar(this,{top:e,left:0},t||"page").top,$i(this.doc,e+this.display.viewOffset)},heightAtLine:function(e,t){var r=!1,n=this.doc.first+this.doc.size-1;e<this.doc.first?e=this.doc.first:e>n&&(e=n,r=!0);var i=Ki(this.doc,e);return sr(this,i,{top:0,left:0},t||"page").top+(r?this.doc.height-qi(i):0)},defaultTextHeight:function(){return gr(this.display)},defaultCharWidth:function(){return vr(this.display)},setGutterMarker:Ar(function(e,t,r){return Pn(this.doc,e,"gutter",function(e){var n=e.gutterMarkers||(e.gutterMarkers={});return n[t]=r,!r&&Po(n)&&(e.gutterMarkers=null),!0})}),clearGutter:Ar(function(e){var t=this,r=t.doc,n=r.first;r.iter(function(r){r.gutterMarkers&&r.gutterMarkers[e]&&(r.gutterMarkers[e]=null,Hr(t,n,"gutter"),Po(r.gutterMarkers)&&(r.gutterMarkers=null)),++n})}),addLineWidget:Ar(function(e,t,r){return bi(this,e,t,r)}),removeLineWidget:function(e){e.clear()},lineInfo:function(e){if("number"==typeof e){if(!vt(this.doc,e))return null;var t=e;if(e=Ki(this.doc,e),!e)return null}else{var t=Yi(e);if(null==t)return null}return{line:t,handle:e,text:e.text,gutterMarkers:e.gutterMarkers,textClass:e.textClass,bgClass:e.bgClass,wrapClass:e.wrapClass,widgets:e.widgets}},getViewport:function(){return{from:this.display.viewFrom,to:this.display.viewTo}},addWidget:function(e,t,r,n,i){var o=this.display;e=cr(this,pt(this.doc,e));var l=e.bottom,s=e.left;if(t.style.position="absolute",t.setAttribute("cm-ignore-events","true"),this.display.input.setUneditable(t),o.sizer.appendChild(t),"over"==n)l=e.top;else if("above"==n||"near"==n){var a=Math.max(o.wrapper.clientHeight,this.doc.height),u=Math.max(o.sizer.clientWidth,o.lineSpace.clientWidth);("above"==n||e.bottom+t.offsetHeight>a)&&e.top>t.offsetHeight?l=e.top-t.offsetHeight:e.bottom+t.offsetHeight<=a&&(l=e.bottom),s+t.offsetWidth>u&&(s=u-t.offsetWidth)}t.style.top=l+"px",t.style.left=t.style.right="","right"==i?(s=o.sizer.clientWidth-t.offsetWidth,t.style.right="0px"):("left"==i?s=0:"middle"==i&&(s=(o.sizer.clientWidth-t.offsetWidth)/2),t.style.left=s+"px"),r&&An(this,s,l,s+t.offsetWidth,l+t.offsetHeight)},triggerOnKeyDown:Ar(ln),triggerOnKeyPress:Ar(un),triggerOnKeyUp:an,execCommand:function(e){return es.hasOwnProperty(e)?es[e](this):void 0},findPosH:function(e,t,r,n){var i=1;0>t&&(i=-1,t=-t);for(var o=0,l=pt(this.doc,e);t>o&&(l=zn(this.doc,l,i,r,n),!l.hitSide);++o);return l},moveH:Ar(function(e,t){var r=this;r.extendSelectionsBy(function(n){return r.display.shift||r.doc.extend||n.empty()?zn(r.doc,n.head,e,t,r.options.rtlMoveVisually):0>e?n.from():n.to()},As)}),deleteH:Ar(function(e,t){var r=this.doc.sel,n=this.doc;r.somethingSelected()?n.replaceSelection("",null,"+delete"):In(this,function(r){var i=zn(n,r.head,e,t,!1);return 0>e?{from:i,to:r.head}:{from:r.head,to:i}})}),findPosV:function(e,t,r,n){var i=1,o=n;0>t&&(i=-1,t=-t);for(var l=0,s=pt(this.doc,e);t>l;++l){var a=cr(this,s,"div");if(null==o?o=a.left:a.left=o,s=En(this,a,i,r),s.hitSide)break}return s},moveV:Ar(function(e,t){var r=this,n=this.doc,i=[],o=!r.display.shift&&!n.extend&&n.sel.somethingSelected();if(n.extendSelectionsBy(function(l){if(o)return 0>e?l.from():l.to();var s=cr(r,l.head,"div");null!=l.goalColumn&&(s.left=l.goalColumn),i.push(s.left);var a=En(r,s,e,t);return"page"==t&&l==n.sel.primary()&&Wn(r,null,ur(r,a,"div").top-s.top),a},As),i.length)for(var l=0;l<n.sel.ranges.length;l++)n.sel.ranges[l].goalColumn=i[l]}),findWordAt:function(e){var t=this.doc,r=Ki(t,e.line).text,n=e.ch,i=e.ch;if(r){var o=this.getHelper(e,"wordChars");(e.xRel<0||i==r.length)&&n?--n:++i;for(var l=r.charAt(n),s=Ho(l,o)?function(e){return Ho(e,o)}:/\s/.test(l)?function(e){return/\s/.test(e)}:function(e){return!/\s/.test(e)&&!Ho(e)};n>0&&s(r.charAt(n-1));)--n;for(;i<r.length&&s(r.charAt(i));)++i}return new ct(Al(e.line,n),Al(e.line,i))},toggleOverwrite:function(e){(null==e||e!=this.state.overwrite)&&((this.state.overwrite=!this.state.overwrite)?Bs(this.display.cursorDiv,"CodeMirror-overwrite"):Rs(this.display.cursorDiv,"CodeMirror-overwrite"),Cs(this,"overwriteToggle",this,this.state.overwrite))},hasFocus:function(){return this.display.input.getField()==Ro()},scrollTo:Ar(function(e,t){(null!=e||null!=t)&&Dn(this),null!=e&&(this.curOp.scrollLeft=e),null!=t&&(this.curOp.scrollTop=t)}),getScrollInfo:function(){var e=this.display.scroller;return{left:e.scrollLeft,top:e.scrollTop,height:e.scrollHeight-Vt(this)-this.display.barHeight,width:e.scrollWidth-Vt(this)-this.display.barWidth,clientHeight:jt(this),clientWidth:Kt(this)}},scrollIntoView:Ar(function(e,t){if(null==e?(e={from:this.doc.sel.primary().head,to:null},null==t&&(t=this.options.cursorScrollMargin)):"number"==typeof e?e={from:Al(e,0),to:null}:null==e.from&&(e={from:e,to:null}),e.to||(e.to=e.from),e.margin=t||0,null!=e.from.line)Dn(this),this.curOp.scrollToPos=e;else{var r=Nn(this,Math.min(e.from.left,e.to.left),Math.min(e.from.top,e.to.top)-e.margin,Math.max(e.from.right,e.to.right),Math.max(e.from.bottom,e.to.bottom)+e.margin);this.scrollTo(r.scrollLeft,r.scrollTop)}}),setSize:Ar(function(e,t){function r(e){return"number"==typeof e||/^\d+$/.test(String(e))?e+"px":e}var n=this;null!=e&&(n.display.wrapper.style.width=r(e)),null!=t&&(n.display.wrapper.style.height=r(t)),n.options.lineWrapping&&nr(this);var i=n.display.viewFrom;n.doc.iter(i,n.display.viewTo,function(e){if(e.widgets)for(var t=0;t<e.widgets.length;t++)if(e.widgets[t].noHScroll){Hr(n,i,"widget");break}++i}),n.curOp.forceUpdate=!0,Cs(n,"refresh",this)}),operation:function(e){return Tr(this,e)},refresh:Ar(function(){var e=this.display.cachedTextHeight;Dr(this),this.curOp.forceUpdate=!0,ir(this),this.scrollTo(this.doc.scrollLeft,this.doc.scrollTop),c(this),(null==e||Math.abs(e-gr(this.display))>.5)&&l(this),Cs(this,"refresh",this)}),swapDoc:Ar(function(e){var t=this.doc;return t.cm=null,Vi(this,e),ir(this),this.display.input.reset(),this.scrollTo(e.scrollLeft,e.scrollTop),this.curOp.forceScroll=!0,mo(this,"swapDoc",this,t),t}),getInputField:function(){return this.display.input.getField()},getWrapperElement:function(){return this.display.wrapper},getScrollerElement:function(){return this.display.scroller},getGutterElement:function(){return this.display.gutters}},Co(e);var Kl=e.defaults={},jl=e.optionHandlers={},Xl=e.Init={toString:function(){return"CodeMirror.Init"}};Fn("value","",function(e,t){e.setValue(t)},!0),Fn("mode",null,function(e,t){e.doc.modeOption=t,r(e)},!0),Fn("indentUnit",2,r,!0),Fn("indentWithTabs",!1),Fn("smartIndent",!0),Fn("tabSize",4,function(e){n(e),ir(e),Dr(e)},!0),Fn("specialChars",/[\t\u0000-\u0019\u00ad\u200b-\u200f\u2028\u2029\ufeff]/g,function(t,r,n){t.state.specialChars=new RegExp(r.source+(r.test(" ")?"":"| "),"g"),n!=e.Init&&t.refresh()}),Fn("specialCharPlaceholder",Di,function(e){e.refresh()},!0),Fn("electricChars",!0),Fn("inputStyle",wl?"contenteditable":"textarea",function(){throw new Error("inputStyle can not (yet) be changed in a running editor")},!0),Fn("rtlMoveVisually",!Cl),Fn("wholeLineUpdateBefore",!0),Fn("theme","default",function(e){s(e),a(e)},!0),Fn("keyMap","default",function(t,r,n){var i=Bn(r),o=n!=e.Init&&Bn(n);o&&o.detach&&o.detach(t,i),i.attach&&i.attach(t,o||null)}),Fn("extraKeys",null),Fn("lineWrapping",!1,i,!0),Fn("gutters",[],function(e){d(e.options),a(e)},!0),Fn("fixedGutter",!0,function(e,t){e.display.gutters.style.left=t?L(e.display)+"px":"0",e.refresh()},!0),Fn("coverGutterNextToScrollbar",!1,function(e){y(e)},!0),Fn("scrollbarStyle","native",function(e){m(e),y(e),e.display.scrollbars.setScrollTop(e.doc.scrollTop),e.display.scrollbars.setScrollLeft(e.doc.scrollLeft)},!0),Fn("lineNumbers",!1,function(e){d(e.options),a(e)},!0),Fn("firstLineNumber",1,a,!0),Fn("lineNumberFormatter",function(e){return e},a,!0),Fn("showCursorWhenSelecting",!1,Ot,!0),Fn("resetSelectionOnContextMenu",!0),Fn("readOnly",!1,function(e,t){"nocursor"==t?(fn(e),e.display.input.blur(),e.display.disabled=!0):(e.display.disabled=!1,t||e.display.input.reset())}),Fn("disableInput",!1,function(e,t){t||e.display.input.reset()},!0),Fn("dragDrop",!0),Fn("cursorBlinkRate",530),Fn("cursorScrollMargin",0),Fn("cursorHeight",1,Ot,!0),Fn("singleCursorHeightPerLine",!0,Ot,!0),Fn("workTime",100),Fn("workDelay",100),Fn("flattenSpans",!0,n,!0),Fn("addModeClass",!1,n,!0),Fn("pollInterval",100),Fn("undoDepth",200,function(e,t){e.doc.history.undoDepth=t}),Fn("historyEventDelay",1250),Fn("viewportMargin",10,function(e){e.refresh()},!0),Fn("maxHighlightLength",1e4,n,!0),Fn("moveInputWithCursor",!0,function(e,t){t||e.display.input.resetPosition()}),Fn("tabindex",null,function(e,t){e.display.input.getField().tabIndex=t||""}),Fn("autofocus",null);var _l=e.modes={},Yl=e.mimeModes={};e.defineMode=function(t,r){e.defaults.mode||"null"==t||(e.defaults.mode=t),arguments.length>2&&(r.dependencies=Array.prototype.slice.call(arguments,2)),_l[t]=r},e.defineMIME=function(e,t){Yl[e]=t},e.resolveMode=function(t){if("string"==typeof t&&Yl.hasOwnProperty(t))t=Yl[t];else if(t&&"string"==typeof t.name&&Yl.hasOwnProperty(t.name)){var r=Yl[t.name];"string"==typeof r&&(r={name:r}),t=Wo(r,t),t.name=r.name}else if("string"==typeof t&&/^[\w\-]+\/[\w\-]+\+xml$/.test(t))return e.resolveMode("application/xml");return"string"==typeof t?{name:t}:t||{name:"null"}},e.getMode=function(t,r){var r=e.resolveMode(r),n=_l[r.name];if(!n)return e.getMode(t,"text/plain");var i=n(t,r);if($l.hasOwnProperty(r.name)){var o=$l[r.name];for(var l in o)o.hasOwnProperty(l)&&(i.hasOwnProperty(l)&&(i["_"+l]=i[l]),i[l]=o[l])}if(i.name=r.name,r.helperType&&(i.helperType=r.helperType),r.modeProps)for(var l in r.modeProps)i[l]=r.modeProps[l];return i},e.defineMode("null",function(){return{token:function(e){e.skipToEnd()}}}),e.defineMIME("text/plain","null");var $l=e.modeExtensions={};e.extendMode=function(e,t){var r=$l.hasOwnProperty(e)?$l[e]:$l[e]={};Oo(t,r)},e.defineExtension=function(t,r){e.prototype[t]=r},e.defineDocExtension=function(e,t){ps.prototype[e]=t},e.defineOption=Fn;var ql=[];e.defineInitHook=function(e){ql.push(e)};var Zl=e.helpers={};e.registerHelper=function(t,r,n){Zl.hasOwnProperty(t)||(Zl[t]=e[t]={_global:[]}),Zl[t][r]=n},e.registerGlobalHelper=function(t,r,n,i){e.registerHelper(t,r,i),Zl[t]._global.push({pred:n,val:i})};var Ql=e.copyState=function(e,t){if(t===!0)return t;if(e.copyState)return e.copyState(t);var r={};for(var n in t){var i=t[n];i instanceof Array&&(i=i.concat([])),r[n]=i}return r},Jl=e.startState=function(e,t,r){return e.startState?e.startState(t,r):!0};e.innerMode=function(e,t){for(;e.innerMode;){var r=e.innerMode(t);if(!r||r.mode==e)break;t=r.state,e=r.mode}return r||{mode:e,state:t}};var es=e.commands={selectAll:function(e){e.setSelection(Al(e.firstLine(),0),Al(e.lastLine()),Ts)},singleSelection:function(e){e.setSelection(e.getCursor("anchor"),e.getCursor("head"),Ts)},killLine:function(e){In(e,function(t){if(t.empty()){var r=Ki(e.doc,t.head.line).text.length;return t.head.ch==r&&t.head.line<e.lastLine()?{from:t.head,to:Al(t.head.line+1,0)}:{from:t.head,to:Al(t.head.line,r)}}return{from:t.from(),to:t.to()}})},deleteLine:function(e){In(e,function(t){return{from:Al(t.from().line,0),to:pt(e.doc,Al(t.to().line+1,0))}})},delLineLeft:function(e){In(e,function(e){return{from:Al(e.from().line,0),to:e.from()}})},delWrappedLineLeft:function(e){In(e,function(t){var r=e.charCoords(t.head,"div").top+5,n=e.coordsChar({left:0,top:r},"div");return{from:n,to:t.from()}})},delWrappedLineRight:function(e){In(e,function(t){var r=e.charCoords(t.head,"div").top+5,n=e.coordsChar({left:e.display.lineDiv.offsetWidth+100,top:r},"div");return{from:t.from(),to:n}})},undo:function(e){e.undo()},redo:function(e){e.redo()},undoSelection:function(e){e.undoSelection()},redoSelection:function(e){e.redoSelection()},goDocStart:function(e){e.extendSelection(Al(e.firstLine(),0))},goDocEnd:function(e){e.extendSelection(Al(e.lastLine()))},goLineStart:function(e){e.extendSelectionsBy(function(t){return Jo(e,t.head.line)},{origin:"+move",bias:1})},goLineStartSmart:function(e){e.extendSelectionsBy(function(t){return tl(e,t.head)},{origin:"+move",bias:1})},goLineEnd:function(e){e.extendSelectionsBy(function(t){return el(e,t.head.line)},{origin:"+move",bias:-1})},goLineRight:function(e){e.extendSelectionsBy(function(t){var r=e.charCoords(t.head,"div").top+5;return e.coordsChar({left:e.display.lineDiv.offsetWidth+100,top:r},"div")},As)},goLineLeft:function(e){e.extendSelectionsBy(function(t){var r=e.charCoords(t.head,"div").top+5;return e.coordsChar({left:0,top:r},"div")},As)},goLineLeftSmart:function(e){e.extendSelectionsBy(function(t){var r=e.charCoords(t.head,"div").top+5,n=e.coordsChar({left:0,top:r},"div");return n.ch<e.getLine(n.line).search(/\S/)?tl(e,t.head):n},As)},goLineUp:function(e){e.moveV(-1,"line")},goLineDown:function(e){e.moveV(1,"line")},goPageUp:function(e){e.moveV(-1,"page")},goPageDown:function(e){e.moveV(1,"page")},goCharLeft:function(e){e.moveH(-1,"char")},goCharRight:function(e){e.moveH(1,"char")},goColumnLeft:function(e){e.moveH(-1,"column")},goColumnRight:function(e){e.moveH(1,"column")},goWordLeft:function(e){e.moveH(-1,"word")},goGroupRight:function(e){e.moveH(1,"group")},goGroupLeft:function(e){e.moveH(-1,"group")},goWordRight:function(e){e.moveH(1,"word")},delCharBefore:function(e){e.deleteH(-1,"char")},delCharAfter:function(e){e.deleteH(1,"char")},delWordBefore:function(e){e.deleteH(-1,"word")},delWordAfter:function(e){e.deleteH(1,"word")},delGroupBefore:function(e){e.deleteH(-1,"group")},delGroupAfter:function(e){e.deleteH(1,"group")},indentAuto:function(e){e.indentSelection("smart")},indentMore:function(e){e.indentSelection("add")},indentLess:function(e){e.indentSelection("subtract")},insertTab:function(e){e.replaceSelection(" ")},insertSoftTab:function(e){for(var t=[],r=e.listSelections(),n=e.options.tabSize,i=0;i<r.length;i++){var o=r[i].from(),l=Ns(e.getLine(o.line),o.ch,n);t.push(new Array(n-l%n+1).join(" "))}e.replaceSelections(t)},defaultTab:function(e){e.somethingSelected()?e.indentSelection("add"):e.execCommand("insertTab")},transposeChars:function(e){Tr(e,function(){for(var t=e.listSelections(),r=[],n=0;n<t.length;n++){var i=t[n].head,o=Ki(e.doc,i.line).text;if(o)if(i.ch==o.length&&(i=new Al(i.line,i.ch-1)),i.ch>0)i=new Al(i.line,i.ch+1),e.replaceRange(o.charAt(i.ch-1)+o.charAt(i.ch-2),Al(i.line,i.ch-2),i,"+transpose");else if(i.line>e.doc.first){var l=Ki(e.doc,i.line-1).text;l&&e.replaceRange(o.charAt(0)+"\n"+l.charAt(l.length-1),Al(i.line-1,l.length-1),Al(i.line,1),"+transpose")}r.push(new ct(i,i))}e.setSelections(r)})},newlineAndIndent:function(e){Tr(e,function(){for(var t=e.listSelections().length,r=0;t>r;r++){var n=e.listSelections()[r];e.replaceRange("\n",n.anchor,n.head,"+input"),e.indentLine(n.from().line+1,null,!0),On(e)}})},toggleOverwrite:function(e){e.toggleOverwrite()}},ts=e.keyMap={};ts.basic={Left:"goCharLeft",Right:"goCharRight",Up:"goLineUp",Down:"goLineDown",End:"goLineEnd",Home:"goLineStartSmart",PageUp:"goPageUp",PageDown:"goPageDown",Delete:"delCharAfter",Backspace:"delCharBefore","Shift-Backspace":"delCharBefore",Tab:"defaultTab","Shift-Tab":"indentAuto",Enter:"newlineAndIndent",Insert:"toggleOverwrite",Esc:"singleSelection"},ts.pcDefault={"Ctrl-A":"selectAll","Ctrl-D":"deleteLine","Ctrl-Z":"undo","Shift-Ctrl-Z":"redo","Ctrl-Y":"redo","Ctrl-Home":"goDocStart","Ctrl-End":"goDocEnd","Ctrl-Up":"goLineUp","Ctrl-Down":"goLineDown","Ctrl-Left":"goGroupLeft","Ctrl-Right":"goGroupRight","Alt-Left":"goLineStart","Alt-Right":"goLineEnd","Ctrl-Backspace":"delGroupBefore","Ctrl-Delete":"delGroupAfter","Ctrl-S":"save","Ctrl-F":"find","Ctrl-G":"findNext","Shift-Ctrl-G":"findPrev","Shift-Ctrl-F":"replace","Shift-Ctrl-R":"replaceAll","Ctrl-[":"indentLess","Ctrl-]":"indentMore","Ctrl-U":"undoSelection","Shift-Ctrl-U":"redoSelection","Alt-U":"redoSelection",fallthrough:"basic"},ts.emacsy={"Ctrl-F":"goCharRight","Ctrl-B":"goCharLeft","Ctrl-P":"goLineUp","Ctrl-N":"goLineDown","Alt-F":"goWordRight","Alt-B":"goWordLeft","Ctrl-A":"goLineStart","Ctrl-E":"goLineEnd","Ctrl-V":"goPageDown","Shift-Ctrl-V":"goPageUp","Ctrl-D":"delCharAfter","Ctrl-H":"delCharBefore","Alt-D":"delWordAfter","Alt-Backspace":"delWordBefore","Ctrl-K":"killLine","Ctrl-T":"transposeChars"},ts.macDefault={"Cmd-A":"selectAll","Cmd-D":"deleteLine","Cmd-Z":"undo","Shift-Cmd-Z":"redo","Cmd-Y":"redo","Cmd-Home":"goDocStart","Cmd-Up":"goDocStart","Cmd-End":"goDocEnd","Cmd-Down":"goDocEnd","Alt-Left":"goGroupLeft","Alt-Right":"goGroupRight","Cmd-Left":"goLineLeft","Cmd-Right":"goLineRight","Alt-Backspace":"delGroupBefore","Ctrl-Alt-Backspace":"delGroupAfter","Alt-Delete":"delGroupAfter","Cmd-S":"save","Cmd-F":"find","Cmd-G":"findNext","Shift-Cmd-G":"findPrev","Cmd-Alt-F":"replace","Shift-Cmd-Alt-F":"replaceAll","Cmd-[":"indentLess","Cmd-]":"indentMore","Cmd-Backspace":"delWrappedLineLeft","Cmd-Delete":"delWrappedLineRight","Cmd-U":"undoSelection","Shift-Cmd-U":"redoSelection","Ctrl-Up":"goDocStart","Ctrl-Down":"goDocEnd",fallthrough:["basic","emacsy"]},ts["default"]=xl?ts.macDefault:ts.pcDefault,e.normalizeKeyMap=function(e){var t={};for(var r in e)if(e.hasOwnProperty(r)){var n=e[r];if(/^(name|fallthrough|(de|at)tach)$/.test(r))continue;if("..."==n){delete e[r];continue}for(var i=Ao(r.split(" "),Rn),o=0;o<i.length;o++){var l,s;o==i.length-1?(s=r,l=n):(s=i.slice(0,o+1).join(" "),l="...");var a=t[s];if(a){if(a!=l)throw new Error("Inconsistent bindings for "+s)}else t[s]=l}delete e[r]}for(var u in t)e[u]=t[u];return e};var rs=e.lookupKey=function(e,t,r,n){t=Bn(t);var i=t.call?t.call(e,n):t[e];if(i===!1)return"nothing";if("..."===i)return"multi";if(null!=i&&r(i))return"handled";if(t.fallthrough){if("[object Array]"!=Object.prototype.toString.call(t.fallthrough))return rs(e,t.fallthrough,r,n);for(var o=0;o<t.fallthrough.length;o++){var l=rs(e,t.fallthrough[o],r,n);if(l)return l}}},ns=e.isModifierKey=function(e){var t="string"==typeof e?e:_s[e.keyCode];return"Ctrl"==t||"Alt"==t||"Shift"==t||"Mod"==t},is=e.keyName=function(e,t){if(gl&&34==e.keyCode&&e["char"])return!1;var r=_s[e.keyCode],n=r;return null==n||e.altGraphKey?!1:(e.altKey&&"Alt"!=r&&(n="Alt-"+n),(Ll?e.metaKey:e.ctrlKey)&&"Ctrl"!=r&&(n="Ctrl-"+n),(Ll?e.ctrlKey:e.metaKey)&&"Cmd"!=r&&(n="Cmd-"+n),!t&&e.shiftKey&&"Shift"!=r&&(n="Shift-"+n),n)};e.fromTextArea=function(t,r){function n(){t.value=u.getValue()}if(r=r?Oo(r):{},r.value=t.value,!r.tabindex&&t.tabIndex&&(r.tabindex=t.tabIndex),!r.placeholder&&t.placeholder&&(r.placeholder=t.placeholder),null==r.autofocus){var i=Ro();r.autofocus=i==t||null!=t.getAttribute("autofocus")&&i==document.body}if(t.form&&(ws(t.form,"submit",n),!r.leaveSubmitMethodAlone)){var o=t.form,l=o.submit;try{var s=o.submit=function(){n(),o.submit=l,o.submit(),o.submit=s}}catch(a){}}r.finishInit=function(e){e.save=n,e.getTextArea=function(){return t},e.toTextArea=function(){e.toTextArea=isNaN,n(),t.parentNode.removeChild(e.getWrapperElement()),t.style.display="",t.form&&(xs(t.form,"submit",n),"function"==typeof t.form.submit&&(t.form.submit=l))}},t.style.display="none";var u=e(function(e){t.parentNode.insertBefore(e,t.nextSibling)},r);return u};var os=e.StringStream=function(e,t){this.pos=this.start=0,this.string=e,this.tabSize=t||8,this.lastColumnPos=this.lastColumnValue=0,this.lineStart=0};os.prototype={eol:function(){return this.pos>=this.string.length},sol:function(){return this.pos==this.lineStart},peek:function(){return this.string.charAt(this.pos)||void 0},next:function(){return this.pos<this.string.length?this.string.charAt(this.pos++):void 0},eat:function(e){var t=this.string.charAt(this.pos);if("string"==typeof e)var r=t==e;else var r=t&&(e.test?e.test(t):e(t));return r?(++this.pos,t):void 0},eatWhile:function(e){for(var t=this.pos;this.eat(e););return this.pos>t},eatSpace:function(){for(var e=this.pos;/[\s\u00a0]/.test(this.string.charAt(this.pos));)++this.pos;return this.pos>e},skipToEnd:function(){this.pos=this.string.length},skipTo:function(e){var t=this.string.indexOf(e,this.pos);
return t>-1?(this.pos=t,!0):void 0},backUp:function(e){this.pos-=e},column:function(){return this.lastColumnPos<this.start&&(this.lastColumnValue=Ns(this.string,this.start,this.tabSize,this.lastColumnPos,this.lastColumnValue),this.lastColumnPos=this.start),this.lastColumnValue-(this.lineStart?Ns(this.string,this.lineStart,this.tabSize):0)},indentation:function(){return Ns(this.string,null,this.tabSize)-(this.lineStart?Ns(this.string,this.lineStart,this.tabSize):0)},match:function(e,t,r){if("string"!=typeof e){var n=this.string.slice(this.pos).match(e);return n&&n.index>0?null:(n&&t!==!1&&(this.pos+=n[0].length),n)}var i=function(e){return r?e.toLowerCase():e},o=this.string.substr(this.pos,e.length);return i(o)==i(e)?(t!==!1&&(this.pos+=e.length),!0):void 0},current:function(){return this.string.slice(this.start,this.pos)},hideFirstChars:function(e,t){this.lineStart+=e;try{return t()}finally{this.lineStart-=e}}};var ls=0,ss=e.TextMarker=function(e,t){this.lines=[],this.type=t,this.doc=e,this.id=++ls};Co(ss),ss.prototype.clear=function(){if(!this.explicitlyCleared){var e=this.doc.cm,t=e&&!e.curOp;if(t&&mr(e),xo(this,"clear")){var r=this.find();r&&mo(this,"clear",r.from,r.to)}for(var n=null,i=null,o=0;o<this.lines.length;++o){var l=this.lines[o],s=_n(l.markedSpans,this);e&&!this.collapsed?Hr(e,Yi(l),"text"):e&&(null!=s.to&&(i=Yi(l)),null!=s.from&&(n=Yi(l))),l.markedSpans=Yn(l.markedSpans,s),null==s.from&&this.collapsed&&!gi(this.doc,l)&&e&&_i(l,gr(e.display))}if(e&&this.collapsed&&!e.options.lineWrapping)for(var o=0;o<this.lines.length;++o){var a=fi(this.lines[o]),u=f(a);u>e.display.maxLineLength&&(e.display.maxLine=a,e.display.maxLineLength=u,e.display.maxLineChanged=!0)}null!=n&&e&&this.collapsed&&Dr(e,n,i+1),this.lines.length=0,this.explicitlyCleared=!0,this.atomic&&this.doc.cantEdit&&(this.doc.cantEdit=!1,e&&At(e.doc)),e&&mo(e,"markerCleared",e,this),t&&br(e),this.parent&&this.parent.clear()}},ss.prototype.find=function(e,t){null==e&&"bookmark"==this.type&&(e=1);for(var r,n,i=0;i<this.lines.length;++i){var o=this.lines[i],l=_n(o.markedSpans,this);if(null!=l.from&&(r=Al(t?o:Yi(o),l.from),-1==e))return r;if(null!=l.to&&(n=Al(t?o:Yi(o),l.to),1==e))return n}return r&&{from:r,to:n}},ss.prototype.changed=function(){var e=this.find(-1,!0),t=this,r=this.doc.cm;e&&r&&Tr(r,function(){var n=e.line,i=Yi(e.line),o=qt(r,i);if(o&&(rr(o),r.curOp.selectionChanged=r.curOp.forceUpdate=!0),r.curOp.updateMaxLine=!0,!gi(t.doc,n)&&null!=t.height){var l=t.height;t.height=null;var s=yi(t)-l;s&&_i(n,n.height+s)}})},ss.prototype.attachLine=function(e){if(!this.lines.length&&this.doc.cm){var t=this.doc.cm.curOp;t.maybeHiddenMarkers&&-1!=Mo(t.maybeHiddenMarkers,this)||(t.maybeUnhiddenMarkers||(t.maybeUnhiddenMarkers=[])).push(this)}this.lines.push(e)},ss.prototype.detachLine=function(e){if(this.lines.splice(Mo(this.lines,e),1),!this.lines.length&&this.doc.cm){var t=this.doc.cm.curOp;(t.maybeHiddenMarkers||(t.maybeHiddenMarkers=[])).push(this)}};var ls=0,as=e.SharedTextMarker=function(e,t){this.markers=e,this.primary=t;for(var r=0;r<e.length;++r)e[r].parent=this};Co(as),as.prototype.clear=function(){if(!this.explicitlyCleared){this.explicitlyCleared=!0;for(var e=0;e<this.markers.length;++e)this.markers[e].clear();mo(this,"clear")}},as.prototype.find=function(e,t){return this.primary.find(e,t)};var us=e.LineWidget=function(e,t,r){if(r)for(var n in r)r.hasOwnProperty(n)&&(this[n]=r[n]);this.cm=e,this.node=t};Co(us),us.prototype.clear=function(){var e=this.cm,t=this.line.widgets,r=this.line,n=Yi(r);if(null!=n&&t){for(var i=0;i<t.length;++i)t[i]==this&&t.splice(i--,1);t.length||(r.widgets=null);var o=yi(this);Tr(e,function(){mi(e,r,-o),Hr(e,n,"widget"),_i(r,Math.max(0,r.height-o))})}},us.prototype.changed=function(){var e=this.height,t=this.cm,r=this.line;this.height=null;var n=yi(this)-e;n&&Tr(t,function(){t.curOp.forceUpdate=!0,mi(t,r,n),_i(r,r.height+n)})};var cs=e.Line=function(e,t,r){this.text=e,ni(this,t),this.height=r?r(this):1};Co(cs),cs.prototype.lineNo=function(){return Yi(this)};var fs={},hs={};Bi.prototype={chunkSize:function(){return this.lines.length},removeInner:function(e,t){for(var r=e,n=e+t;n>r;++r){var i=this.lines[r];this.height-=i.height,xi(i),mo(i,"delete")}this.lines.splice(e,t)},collapse:function(e){e.push.apply(e,this.lines)},insertInner:function(e,t,r){this.height+=r,this.lines=this.lines.slice(0,e).concat(t).concat(this.lines.slice(e));for(var n=0;n<t.length;++n)t[n].parent=this},iterN:function(e,t,r){for(var n=e+t;n>e;++e)if(r(this.lines[e]))return!0}},Gi.prototype={chunkSize:function(){return this.size},removeInner:function(e,t){this.size-=t;for(var r=0;r<this.children.length;++r){var n=this.children[r],i=n.chunkSize();if(i>e){var o=Math.min(t,i-e),l=n.height;if(n.removeInner(e,o),this.height-=l-n.height,i==o&&(this.children.splice(r--,1),n.parent=null),0==(t-=o))break;e=0}else e-=i}if(this.size-t<25&&(this.children.length>1||!(this.children[0]instanceof Bi))){var s=[];this.collapse(s),this.children=[new Bi(s)],this.children[0].parent=this}},collapse:function(e){for(var t=0;t<this.children.length;++t)this.children[t].collapse(e)},insertInner:function(e,t,r){this.size+=t.length,this.height+=r;for(var n=0;n<this.children.length;++n){var i=this.children[n],o=i.chunkSize();if(o>=e){if(i.insertInner(e,t,r),i.lines&&i.lines.length>50){for(;i.lines.length>50;){var l=i.lines.splice(i.lines.length-25,25),s=new Bi(l);i.height-=s.height,this.children.splice(n+1,0,s),s.parent=this}this.maybeSpill()}break}e-=o}},maybeSpill:function(){if(!(this.children.length<=10)){var e=this;do{var t=e.children.splice(e.children.length-5,5),r=new Gi(t);if(e.parent){e.size-=r.size,e.height-=r.height;var n=Mo(e.parent.children,e);e.parent.children.splice(n+1,0,r)}else{var i=new Gi(e.children);i.parent=e,e.children=[i,r],e=i}r.parent=e.parent}while(e.children.length>10);e.parent.maybeSpill()}},iterN:function(e,t,r){for(var n=0;n<this.children.length;++n){var i=this.children[n],o=i.chunkSize();if(o>e){var l=Math.min(t,o-e);if(i.iterN(e,l,r))return!0;if(0==(t-=l))break;e=0}else e-=o}}};var ds=0,ps=e.Doc=function(e,t,r){if(!(this instanceof ps))return new ps(e,t,r);null==r&&(r=0),Gi.call(this,[new Bi([new cs("",null)])]),this.first=r,this.scrollTop=this.scrollLeft=0,this.cantEdit=!1,this.cleanGeneration=1,this.frontier=r;var n=Al(r,0);this.sel=ht(n),this.history=new Qi(null),this.id=++ds,this.modeOption=t,"string"==typeof e&&(e=Vs(e)),Ri(this,{from:n,to:n,text:e}),kt(this,ht(n),Ts)};ps.prototype=Wo(Gi.prototype,{constructor:ps,iter:function(e,t,r){r?this.iterN(e-this.first,t-e,r):this.iterN(this.first,this.first+this.size,e)},insert:function(e,t){for(var r=0,n=0;n<t.length;++n)r+=t[n].height;this.insertInner(e-this.first,t,r)},remove:function(e,t){this.removeInner(e-this.first,t)},getValue:function(e){var t=Xi(this,this.first,this.first+this.size);return e===!1?t:t.join(e||"\n")},setValue:Nr(function(e){var t=Al(this.first,0),r=this.first+this.size-1;bn(this,{from:t,to:Al(r,Ki(this,r).text.length),text:Vs(e),origin:"setValue",full:!0},!0),kt(this,ht(t))}),replaceRange:function(e,t,r,n){t=pt(this,t),r=r?pt(this,r):t,kn(this,e,t,r,n)},getRange:function(e,t,r){var n=ji(this,pt(this,e),pt(this,t));return r===!1?n:n.join(r||"\n")},getLine:function(e){var t=this.getLineHandle(e);return t&&t.text},getLineHandle:function(e){return vt(this,e)?Ki(this,e):void 0},getLineNumber:function(e){return Yi(e)},getLineHandleVisualStart:function(e){return"number"==typeof e&&(e=Ki(this,e)),fi(e)},lineCount:function(){return this.size},firstLine:function(){return this.first},lastLine:function(){return this.first+this.size-1},clipPos:function(e){return pt(this,e)},getCursor:function(e){var t,r=this.sel.primary();return t=null==e||"head"==e?r.head:"anchor"==e?r.anchor:"end"==e||"to"==e||e===!1?r.to():r.from()},listSelections:function(){return this.sel.ranges},somethingSelected:function(){return this.sel.somethingSelected()},setCursor:Nr(function(e,t,r){Ct(this,pt(this,"number"==typeof e?Al(e,t||0):e),null,r)}),setSelection:Nr(function(e,t,r){Ct(this,pt(this,e),pt(this,t||e),r)}),extendSelection:Nr(function(e,t,r){bt(this,pt(this,e),t&&pt(this,t),r)}),extendSelections:Nr(function(e,t){wt(this,mt(this,e,t))}),extendSelectionsBy:Nr(function(e,t){wt(this,Ao(this.sel.ranges,e),t)}),setSelections:Nr(function(e,t,r){if(e.length){for(var n=0,i=[];n<e.length;n++)i[n]=new ct(pt(this,e[n].anchor),pt(this,e[n].head));null==t&&(t=Math.min(e.length-1,this.sel.primIndex)),kt(this,ft(i,t),r)}}),addSelection:Nr(function(e,t,r){var n=this.sel.ranges.slice(0);n.push(new ct(pt(this,e),pt(this,t||e))),kt(this,ft(n,n.length-1),r)}),getSelection:function(e){for(var t,r=this.sel.ranges,n=0;n<r.length;n++){var i=ji(this,r[n].from(),r[n].to());t=t?t.concat(i):i}return e===!1?t:t.join(e||"\n")},getSelections:function(e){for(var t=[],r=this.sel.ranges,n=0;n<r.length;n++){var i=ji(this,r[n].from(),r[n].to());e!==!1&&(i=i.join(e||"\n")),t[n]=i}return t},replaceSelection:function(e,t,r){for(var n=[],i=0;i<this.sel.ranges.length;i++)n[i]=e;this.replaceSelections(n,t,r||"+input")},replaceSelections:Nr(function(e,t,r){for(var n=[],i=this.sel,o=0;o<i.ranges.length;o++){var l=i.ranges[o];n[o]={from:l.from(),to:l.to(),text:Vs(e[o]),origin:r}}for(var s=t&&"end"!=t&&mn(this,n,t),o=n.length-1;o>=0;o--)bn(this,n[o]);s?Lt(this,s):this.cm&&On(this.cm)}),undo:Nr(function(){xn(this,"undo")}),redo:Nr(function(){xn(this,"redo")}),undoSelection:Nr(function(){xn(this,"undo",!0)}),redoSelection:Nr(function(){xn(this,"redo",!0)}),setExtending:function(e){this.extend=e},getExtending:function(){return this.extend},historySize:function(){for(var e=this.history,t=0,r=0,n=0;n<e.done.length;n++)e.done[n].ranges||++t;for(var n=0;n<e.undone.length;n++)e.undone[n].ranges||++r;return{undo:t,redo:r}},clearHistory:function(){this.history=new Qi(this.history.maxGeneration)},markClean:function(){this.cleanGeneration=this.changeGeneration(!0)},changeGeneration:function(e){return e&&(this.history.lastOp=this.history.lastSelOp=this.history.lastOrigin=null),this.history.generation},isClean:function(e){return this.history.generation==(e||this.cleanGeneration)},getHistory:function(){return{done:uo(this.history.done),undone:uo(this.history.undone)}},setHistory:function(e){var t=this.history=new Qi(this.history.maxGeneration);t.done=uo(e.done.slice(0),null,!0),t.undone=uo(e.undone.slice(0),null,!0)},addLineClass:Nr(function(e,t,r){return Pn(this,e,"gutter"==t?"gutter":"class",function(e){var n="text"==t?"textClass":"background"==t?"bgClass":"gutter"==t?"gutterClass":"wrapClass";if(e[n]){if(Bo(r).test(e[n]))return!1;e[n]+=" "+r}else e[n]=r;return!0})}),removeLineClass:Nr(function(e,t,r){return Pn(this,e,"gutter"==t?"gutter":"class",function(e){var n="text"==t?"textClass":"background"==t?"bgClass":"gutter"==t?"gutterClass":"wrapClass",i=e[n];if(!i)return!1;if(null==r)e[n]=null;else{var o=i.match(Bo(r));if(!o)return!1;var l=o.index+o[0].length;e[n]=i.slice(0,o.index)+(o.index&&l!=i.length?" ":"")+i.slice(l)||null}return!0})}),markText:function(e,t,r){return Gn(this,pt(this,e),pt(this,t),r,"range")},setBookmark:function(e,t){var r={replacedWith:t&&(null==t.nodeType?t.widget:t),insertLeft:t&&t.insertLeft,clearWhenEmpty:!1,shared:t&&t.shared};return e=pt(this,e),Gn(this,e,e,r,"bookmark")},findMarksAt:function(e){e=pt(this,e);var t=[],r=Ki(this,e.line).markedSpans;if(r)for(var n=0;n<r.length;++n){var i=r[n];(null==i.from||i.from<=e.ch)&&(null==i.to||i.to>=e.ch)&&t.push(i.marker.parent||i.marker)}return t},findMarks:function(e,t,r){e=pt(this,e),t=pt(this,t);var n=[],i=e.line;return this.iter(e.line,t.line+1,function(o){var l=o.markedSpans;if(l)for(var s=0;s<l.length;s++){var a=l[s];i==e.line&&e.ch>a.to||null==a.from&&i!=e.line||i==t.line&&a.from>t.ch||r&&!r(a.marker)||n.push(a.marker.parent||a.marker)}++i}),n},getAllMarks:function(){var e=[];return this.iter(function(t){var r=t.markedSpans;if(r)for(var n=0;n<r.length;++n)null!=r[n].from&&e.push(r[n].marker)}),e},posFromIndex:function(e){var t,r=this.first;return this.iter(function(n){var i=n.text.length+1;return i>e?(t=e,!0):(e-=i,void++r)}),pt(this,Al(r,t))},indexFromPos:function(e){e=pt(this,e);var t=e.ch;return e.line<this.first||e.ch<0?0:(this.iter(this.first,e.line,function(e){t+=e.text.length+1}),t)},copy:function(e){var t=new ps(Xi(this,this.first,this.first+this.size),this.modeOption,this.first);return t.scrollTop=this.scrollTop,t.scrollLeft=this.scrollLeft,t.sel=this.sel,t.extend=!1,e&&(t.history.undoDepth=this.history.undoDepth,t.setHistory(this.getHistory())),t},linkedDoc:function(e){e||(e={});var t=this.first,r=this.first+this.size;null!=e.from&&e.from>t&&(t=e.from),null!=e.to&&e.to<r&&(r=e.to);var n=new ps(Xi(this,t,r),e.mode||this.modeOption,t);return e.sharedHist&&(n.history=this.history),(this.linked||(this.linked=[])).push({doc:n,sharedHist:e.sharedHist}),n.linked=[{doc:this,isParent:!0,sharedHist:e.sharedHist}],Kn(n,Vn(this)),n},unlinkDoc:function(t){if(t instanceof e&&(t=t.doc),this.linked)for(var r=0;r<this.linked.length;++r){var n=this.linked[r];if(n.doc==t){this.linked.splice(r,1),t.unlinkDoc(this),jn(Vn(this));break}}if(t.history==this.history){var i=[t.id];Ui(t,function(e){i.push(e.id)},!0),t.history=new Qi(null),t.history.done=uo(this.history.done,i),t.history.undone=uo(this.history.undone,i)}},iterLinkedDocs:function(e){Ui(this,e)},getMode:function(){return this.mode},getEditor:function(){return this.cm}}),ps.prototype.eachLine=ps.prototype.iter;var gs="iter insert remove copy getEditor".split(" ");for(var vs in ps.prototype)ps.prototype.hasOwnProperty(vs)&&Mo(gs,vs)<0&&(e.prototype[vs]=function(e){return function(){return e.apply(this.doc,arguments)}}(ps.prototype[vs]));Co(ps);var ms=e.e_preventDefault=function(e){e.preventDefault?e.preventDefault():e.returnValue=!1},ys=e.e_stopPropagation=function(e){e.stopPropagation?e.stopPropagation():e.cancelBubble=!0},bs=e.e_stop=function(e){ms(e),ys(e)},ws=e.on=function(e,t,r){if(e.addEventListener)e.addEventListener(t,r,!1);else if(e.attachEvent)e.attachEvent("on"+t,r);else{var n=e._handlers||(e._handlers={}),i=n[t]||(n[t]=[]);i.push(r)}},xs=e.off=function(e,t,r){if(e.removeEventListener)e.removeEventListener(t,r,!1);else if(e.detachEvent)e.detachEvent("on"+t,r);else{var n=e._handlers&&e._handlers[t];if(!n)return;for(var i=0;i<n.length;++i)if(n[i]==r){n.splice(i,1);break}}},Cs=e.signal=function(e,t){var r=e._handlers&&e._handlers[t];if(r)for(var n=Array.prototype.slice.call(arguments,2),i=0;i<r.length;++i)r[i].apply(null,n)},Ss=null,Ls=30,ks=e.Pass={toString:function(){return"CodeMirror.Pass"}},Ts={scroll:!1},Ms={origin:"*mouse"},As={origin:"+move"};So.prototype.set=function(e,t){clearTimeout(this.id),this.id=setTimeout(t,e)};var Ns=e.countColumn=function(e,t,r,n,i){null==t&&(t=e.search(/[^\s\u00a0]/),-1==t&&(t=e.length));for(var o=n||0,l=i||0;;){var s=e.indexOf(" ",o);if(0>s||s>=t)return l+(t-o);l+=s-o,l+=r-l%r,o=s+1}},Ws=[""],Os=function(e){e.select()};bl?Os=function(e){e.selectionStart=0,e.selectionEnd=e.value.length}:cl&&(Os=function(e){try{e.select()}catch(t){}});var Ds,Hs=/[\u00df\u0590-\u05f4\u0600-\u06ff\u3040-\u309f\u30a0-\u30ff\u3400-\u4db5\u4e00-\u9fcc\uac00-\ud7af]/,Ps=e.isWordChar=function(e){return/\w/.test(e)||e>""&&(e.toUpperCase()!=e.toLowerCase()||Hs.test(e))},Is=/[\u0300-\u036f\u0483-\u0489\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06de-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09be\u09c1-\u09c4\u09cd\u09d7\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3e\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b82\u0bbe\u0bc0\u0bcd\u0bd7\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc2\u0cc6\u0ccc\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0d3e\u0d41-\u0d44\u0d4d\u0d57\u0d62\u0d63\u0dca\u0dcf\u0dd2-\u0dd4\u0dd6\u0ddf\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u200c\u200d\u20d0-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f-\ua672\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\udc00-\udfff\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\uff9e\uff9f]/;Ds=document.createRange?function(e,t,r,n){var i=document.createRange();return i.setEnd(n||e,r),i.setStart(e,t),i}:function(e,t,r){var n=document.body.createTextRange();try{n.moveToElementText(e.parentNode)}catch(i){return n}return n.collapse(!0),n.moveEnd("character",r),n.moveStart("character",t),n};var zs=e.contains=function(e,t){if(3==t.nodeType&&(t=t.parentNode),e.contains)return e.contains(t);do if(11==t.nodeType&&(t=t.host),t==e)return!0;while(t=t.parentNode)};cl&&11>fl&&(Ro=function(){try{return document.activeElement}catch(e){return document.body}});var Es,Fs,Rs=e.rmClass=function(e,t){var r=e.className,n=Bo(t).exec(r);if(n){var i=r.slice(n.index+n[0].length);e.className=r.slice(0,n.index)+(i?n[1]+i:"")}},Bs=e.addClass=function(e,t){var r=e.className;Bo(t).test(r)||(e.className+=(r?" ":"")+t)},Gs=!1,Us=function(){if(cl&&9>fl)return!1;var e=zo("div");return"draggable"in e||"dragDrop"in e}(),Vs=e.splitLines=3!="\n\nb".split(/\n/).length?function(e){for(var t=0,r=[],n=e.length;n>=t;){var i=e.indexOf("\n",t);-1==i&&(i=e.length);var o=e.slice(t,"\r"==e.charAt(i-1)?i-1:i),l=o.indexOf("\r");-1!=l?(r.push(o.slice(0,l)),t+=l+1):(r.push(o),t=i+1)}return r}:function(e){return e.split(/\r\n?|\n/)},Ks=window.getSelection?function(e){try{return e.selectionStart!=e.selectionEnd}catch(t){return!1}}:function(e){try{var t=e.ownerDocument.selection.createRange()}catch(r){}return t&&t.parentElement()==e?0!=t.compareEndPoints("StartToEnd",t):!1},js=function(){var e=zo("div");return"oncopy"in e?!0:(e.setAttribute("oncopy","return;"),"function"==typeof e.oncopy)}(),Xs=null,_s={3:"Enter",8:"Backspace",9:"Tab",13:"Enter",16:"Shift",17:"Ctrl",18:"Alt",19:"Pause",20:"CapsLock",27:"Esc",32:"Space",33:"PageUp",34:"PageDown",35:"End",36:"Home",37:"Left",38:"Up",39:"Right",40:"Down",44:"PrintScrn",45:"Insert",46:"Delete",59:";",61:"=",91:"Mod",92:"Mod",93:"Mod",107:"=",109:"-",127:"Delete",173:"-",186:";",187:"=",188:",",189:"-",190:".",191:"/",192:"`",219:"[",220:"\\",221:"]",222:"'",63232:"Up",63233:"Down",63234:"Left",63235:"Right",63272:"Delete",63273:"Home",63275:"End",63276:"PageUp",63277:"PageDown",63302:"Insert"};e.keyNames=_s,function(){for(var e=0;10>e;e++)_s[e+48]=_s[e+96]=String(e);for(var e=65;90>=e;e++)_s[e]=String.fromCharCode(e);for(var e=1;12>=e;e++)_s[e+111]=_s[e+63235]="F"+e}();var Ys,$s=function(){function e(e){return 247>=e?r.charAt(e):e>=1424&&1524>=e?"R":e>=1536&&1773>=e?n.charAt(e-1536):e>=1774&&2220>=e?"r":e>=8192&&8203>=e?"w":8204==e?"b":"L"}function t(e,t,r){this.level=e,this.from=t,this.to=r}var r="bbbbbbbbbtstwsbbbbbbbbbbbbbbssstwNN%%%NNNNNN,N,N1111111111NNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNbbbbbbsbbbbbbbbbbbbbbbbbbbbbbbbbb,N%%%%NNNNLNNNNN%%11NLNNN1LNNNNNLLLLLLLLLLLLLLLLLLLLLLLNLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLN",n="rrrrrrrrrrrr,rNNmmmmmmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmrrrrrrrnnnnnnnnnn%nnrrrmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmmmmmmNmmmm",i=/[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/,o=/[stwN]/,l=/[LRr]/,s=/[Lb1n]/,a=/[1n]/,u="L";return function(r){if(!i.test(r))return!1;for(var n,c=r.length,f=[],h=0;c>h;++h)f.push(n=e(r.charCodeAt(h)));for(var h=0,d=u;c>h;++h){var n=f[h];"m"==n?f[h]=d:d=n}for(var h=0,p=u;c>h;++h){var n=f[h];"1"==n&&"r"==p?f[h]="n":l.test(n)&&(p=n,"r"==n&&(f[h]="R"))}for(var h=1,d=f[0];c-1>h;++h){var n=f[h];"+"==n&&"1"==d&&"1"==f[h+1]?f[h]="1":","!=n||d!=f[h+1]||"1"!=d&&"n"!=d||(f[h]=d),d=n}for(var h=0;c>h;++h){var n=f[h];if(","==n)f[h]="N";else if("%"==n){for(var g=h+1;c>g&&"%"==f[g];++g);for(var v=h&&"!"==f[h-1]||c>g&&"1"==f[g]?"1":"N",m=h;g>m;++m)f[m]=v;h=g-1}}for(var h=0,p=u;c>h;++h){var n=f[h];"L"==p&&"1"==n?f[h]="L":l.test(n)&&(p=n)}for(var h=0;c>h;++h)if(o.test(f[h])){for(var g=h+1;c>g&&o.test(f[g]);++g);for(var y="L"==(h?f[h-1]:u),b="L"==(c>g?f[g]:u),v=y||b?"L":"R",m=h;g>m;++m)f[m]=v;h=g-1}for(var w,x=[],h=0;c>h;)if(s.test(f[h])){var C=h;for(++h;c>h&&s.test(f[h]);++h);x.push(new t(0,C,h))}else{var S=h,L=x.length;for(++h;c>h&&"L"!=f[h];++h);for(var m=S;h>m;)if(a.test(f[m])){m>S&&x.splice(L,0,new t(1,S,m));var k=m;for(++m;h>m&&a.test(f[m]);++m);x.splice(L,0,new t(2,k,m)),S=m}else++m;h>S&&x.splice(L,0,new t(1,S,h))}return 1==x[0].level&&(w=r.match(/^\s+/))&&(x[0].from=w[0].length,x.unshift(new t(0,0,w[0].length))),1==To(x).level&&(w=r.match(/\s+$/))&&(To(x).to-=w[0].length,x.push(new t(0,c-w[0].length,c))),x[0].level!=To(x).level&&x.push(new t(x[0].level,c,c)),x}}();return e.version="5.0.1",e});
|
!function(e){if("object"==typeof exports&&"object"==typeof module)module.exports=e();else{if("function"==typeof define&&define.amd)return define([],e);this.CodeMirror=e()}}(function(){"use strict";function e(r,n){if(!(this instanceof e))return new e(r,n);this.options=n=n?Oo(n):{},Oo(Kl,n,!1),d(n);var i=n.value;"string"==typeof i&&(i=new ps(i,n.mode)),this.doc=i;var o=new e.inputStyles[n.inputStyle](this),l=this.display=new t(r,i,o);l.wrapper.CodeMirror=this,u(this),s(this),n.lineWrapping&&(this.display.wrapper.className+=" CodeMirror-wrap"),n.autofocus&&!wl&&l.input.focus(),m(this),this.state={keyMaps:[],overlays:[],modeGen:0,overwrite:!1,focused:!1,suppressEdits:!1,pasteIncoming:!1,cutIncoming:!1,draggingText:!1,highlight:new So,keySeq:null,specialChars:null};var a=this;cl&&11>fl&&setTimeout(function(){a.display.input.reset(!0)},20),Rr(this),Vo(),mr(this),this.curOp.forceUpdate=!0,Vi(this,i),n.autofocus&&!wl||a.hasFocus()?setTimeout(Do(cn,this),20):fn(this);for(var c in jl)jl.hasOwnProperty(c)&&jl[c](this,n[c],Xl);C(this),n.finishInit&&n.finishInit(this);for(var f=0;f<ql.length;++f)ql[f](this);br(this),hl&&n.lineWrapping&&"optimizelegibility"==getComputedStyle(l.lineDiv).textRendering&&(l.lineDiv.style.textRendering="auto")}function t(e,t,r){var n=this;this.input=r,n.scrollbarFiller=zo("div",null,"CodeMirror-scrollbar-filler"),n.scrollbarFiller.setAttribute("cm-not-content","true"),n.gutterFiller=zo("div",null,"CodeMirror-gutter-filler"),n.gutterFiller.setAttribute("cm-not-content","true"),n.lineDiv=zo("div",null,"CodeMirror-code"),n.selectionDiv=zo("div",null,null,"position: relative; z-index: 1"),n.cursorDiv=zo("div",null,"CodeMirror-cursors"),n.measure=zo("div",null,"CodeMirror-measure"),n.lineMeasure=zo("div",null,"CodeMirror-measure"),n.lineSpace=zo("div",[n.measure,n.lineMeasure,n.selectionDiv,n.cursorDiv,n.lineDiv],null,"position: relative; outline: none"),n.mover=zo("div",[zo("div",[n.lineSpace],"CodeMirror-lines")],null,"position: relative"),n.sizer=zo("div",[n.mover],"CodeMirror-sizer"),n.sizerWidth=null,n.heightForcer=zo("div",null,null,"position: absolute; height: "+Ls+"px; width: 1px;"),n.gutters=zo("div",null,"CodeMirror-gutters"),n.lineGutter=null,n.scroller=zo("div",[n.sizer,n.heightForcer,n.gutters],"CodeMirror-scroll"),n.scroller.setAttribute("tabIndex","-1"),n.wrapper=zo("div",[n.scrollbarFiller,n.gutterFiller,n.scroller],"CodeMirror"),cl&&8>fl&&(n.gutters.style.zIndex=-1,n.scroller.style.paddingRight=0),hl||sl&&wl||(n.scroller.draggable=!0),e&&(e.appendChild?e.appendChild(n.wrapper):e(n.wrapper)),n.viewFrom=n.viewTo=t.first,n.reportedViewFrom=n.reportedViewTo=t.first,n.view=[],n.renderedView=null,n.externalMeasured=null,n.viewOffset=0,n.lastWrapHeight=n.lastWrapWidth=0,n.updateLineNumbers=null,n.nativeBarWidth=n.barHeight=n.barWidth=0,n.scrollbarsClipped=!1,n.lineNumWidth=n.lineNumInnerWidth=n.lineNumChars=null,n.alignWidgets=!1,n.cachedCharWidth=n.cachedTextHeight=n.cachedPaddingH=null,n.maxLine=null,n.maxLineLength=0,n.maxLineChanged=!1,n.wheelDX=n.wheelDY=n.wheelStartX=n.wheelStartY=null,n.shift=!1,n.selForContextMenu=null,n.activeTouch=null,r.init(n)}function r(t){t.doc.mode=e.getMode(t.options,t.doc.modeOption),n(t)}function n(e){e.doc.iter(function(e){e.stateAfter&&(e.stateAfter=null),e.styles&&(e.styles=null)}),e.doc.frontier=e.doc.first,zt(e,100),e.state.modeGen++,e.curOp&&Dr(e)}function i(e){e.options.lineWrapping?(Bs(e.display.wrapper,"CodeMirror-wrap"),e.display.sizer.style.minWidth="",e.display.sizerWidth=null):(Rs(e.display.wrapper,"CodeMirror-wrap"),h(e)),l(e),Dr(e),ir(e),setTimeout(function(){y(e)},100)}function o(e){var t=gr(e.display),r=e.options.lineWrapping,n=r&&Math.max(5,e.display.scroller.clientWidth/vr(e.display)-3);return function(i){if(gi(e.doc,i))return 0;var o=0;if(i.widgets)for(var l=0;l<i.widgets.length;l++)i.widgets[l].height&&(o+=i.widgets[l].height);return r?o+(Math.ceil(i.text.length/n)||1)*t:o+t}}function l(e){var t=e.doc,r=o(e);t.iter(function(e){var t=r(e);t!=e.height&&_i(e,t)})}function s(e){e.display.wrapper.className=e.display.wrapper.className.replace(/\s*cm-s-\S+/g,"")+e.options.theme.replace(/(^|\s)\s*/g," cm-s-"),ir(e)}function a(e){u(e),Dr(e),setTimeout(function(){x(e)},20)}function u(e){var t=e.display.gutters,r=e.options.gutters;Eo(t);for(var n=0;n<r.length;++n){var i=r[n],o=t.appendChild(zo("div",null,"CodeMirror-gutter "+i));"CodeMirror-linenumbers"==i&&(e.display.lineGutter=o,o.style.width=(e.display.lineNumWidth||1)+"px")}t.style.display=n?"":"none",c(e)}function c(e){var t=e.display.gutters.offsetWidth;e.display.sizer.style.marginLeft=t+"px"}function f(e){if(0==e.height)return 0;for(var t,r=e.text.length,n=e;t=ai(n);){var i=t.find(0,!0);n=i.from.line,r+=i.from.ch-i.to.ch}for(n=e;t=ui(n);){var i=t.find(0,!0);r-=n.text.length-i.from.ch,n=i.to.line,r+=n.text.length-i.to.ch}return r}function h(e){var t=e.display,r=e.doc;t.maxLine=Ki(r,r.first),t.maxLineLength=f(t.maxLine),t.maxLineChanged=!0,r.iter(function(e){var r=f(e);r>t.maxLineLength&&(t.maxLineLength=r,t.maxLine=e)})}function d(e){var t=Mo(e.gutters,"CodeMirror-linenumbers");-1==t&&e.lineNumbers?e.gutters=e.gutters.concat(["CodeMirror-linenumbers"]):t>-1&&!e.lineNumbers&&(e.gutters=e.gutters.slice(0),e.gutters.splice(t,1))}function p(e){var t=e.display,r=t.gutters.offsetWidth,n=Math.round(e.doc.height+Gt(e.display));return{clientHeight:t.scroller.clientHeight,viewHeight:t.wrapper.clientHeight,scrollWidth:t.scroller.scrollWidth,clientWidth:t.scroller.clientWidth,viewWidth:t.wrapper.clientWidth,barLeft:e.options.fixedGutter?r:0,docHeight:n,scrollHeight:n+Vt(e)+t.barHeight,nativeBarWidth:t.nativeBarWidth,gutterWidth:r}}function g(e,t,r){this.cm=r;var n=this.vert=zo("div",[zo("div",null,null,"min-width: 1px")],"CodeMirror-vscrollbar"),i=this.horiz=zo("div",[zo("div",null,null,"height: 100%; min-height: 1px")],"CodeMirror-hscrollbar");e(n),e(i),ws(n,"scroll",function(){n.clientHeight&&t(n.scrollTop,"vertical")}),ws(i,"scroll",function(){i.clientWidth&&t(i.scrollLeft,"horizontal")}),this.checkedOverlay=!1,cl&&8>fl&&(this.horiz.style.minHeight=this.vert.style.minWidth="18px")}function v(){}function m(t){t.display.scrollbars&&(t.display.scrollbars.clear(),t.display.scrollbars.addClass&&Rs(t.display.wrapper,t.display.scrollbars.addClass)),t.display.scrollbars=new e.scrollbarModel[t.options.scrollbarStyle](function(e){t.display.wrapper.insertBefore(e,t.display.scrollbarFiller),ws(e,"mousedown",function(){t.state.focused&&setTimeout(function(){t.display.input.focus()},0)}),e.setAttribute("cm-not-content","true")},function(e,r){"horizontal"==r?Qr(t,e):Zr(t,e)},t),t.display.scrollbars.addClass&&Bs(t.display.wrapper,t.display.scrollbars.addClass)}function y(e,t){t||(t=p(e));var r=e.display.barWidth,n=e.display.barHeight;b(e,t);for(var i=0;4>i&&r!=e.display.barWidth||n!=e.display.barHeight;i++)r!=e.display.barWidth&&e.options.lineWrapping&&O(e),b(e,p(e)),r=e.display.barWidth,n=e.display.barHeight}function b(e,t){var r=e.display,n=r.scrollbars.update(t);r.sizer.style.paddingRight=(r.barWidth=n.right)+"px",r.sizer.style.paddingBottom=(r.barHeight=n.bottom)+"px",n.right&&n.bottom?(r.scrollbarFiller.style.display="block",r.scrollbarFiller.style.height=n.bottom+"px",r.scrollbarFiller.style.width=n.right+"px"):r.scrollbarFiller.style.display="",n.bottom&&e.options.coverGutterNextToScrollbar&&e.options.fixedGutter?(r.gutterFiller.style.display="block",r.gutterFiller.style.height=n.bottom+"px",r.gutterFiller.style.width=t.gutterWidth+"px"):r.gutterFiller.style.display=""}function w(e,t,r){var n=r&&null!=r.top?Math.max(0,r.top):e.scroller.scrollTop;n=Math.floor(n-Bt(e));var i=r&&null!=r.bottom?r.bottom:n+e.wrapper.clientHeight,o=$i(t,n),l=$i(t,i);if(r&&r.ensure){var s=r.ensure.from.line,a=r.ensure.to.line;o>s?(o=s,l=$i(t,qi(Ki(t,s))+e.wrapper.clientHeight)):Math.min(a,t.lastLine())>=l&&(o=$i(t,qi(Ki(t,a))-e.wrapper.clientHeight),l=a)}return{from:o,to:Math.max(l,o+1)}}function x(e){var t=e.display,r=t.view;if(t.alignWidgets||t.gutters.firstChild&&e.options.fixedGutter){for(var n=L(t)-t.scroller.scrollLeft+e.doc.scrollLeft,i=t.gutters.offsetWidth,o=n+"px",l=0;l<r.length;l++)if(!r[l].hidden){e.options.fixedGutter&&r[l].gutter&&(r[l].gutter.style.left=o);var s=r[l].alignable;if(s)for(var a=0;a<s.length;a++)s[a].style.left=o}e.options.fixedGutter&&(t.gutters.style.left=n+i+"px")}}function C(e){if(!e.options.lineNumbers)return!1;var t=e.doc,r=S(e.options,t.first+t.size-1),n=e.display;if(r.length!=n.lineNumChars){var i=n.measure.appendChild(zo("div",[zo("div",r)],"CodeMirror-linenumber CodeMirror-gutter-elt")),o=i.firstChild.offsetWidth,l=i.offsetWidth-o;return n.lineGutter.style.width="",n.lineNumInnerWidth=Math.max(o,n.lineGutter.offsetWidth-l)+1,n.lineNumWidth=n.lineNumInnerWidth+l,n.lineNumChars=n.lineNumInnerWidth?r.length:-1,n.lineGutter.style.width=n.lineNumWidth+"px",c(e),!0}return!1}function S(e,t){return String(e.lineNumberFormatter(t+e.firstLineNumber))}function L(e){return e.scroller.getBoundingClientRect().left-e.sizer.getBoundingClientRect().left}function k(e,t,r){var n=e.display;this.viewport=t,this.visible=w(n,e.doc,t),this.editorIsHidden=!n.wrapper.offsetWidth,this.wrapperHeight=n.wrapper.clientHeight,this.wrapperWidth=n.wrapper.clientWidth,this.oldDisplayWidth=Kt(e),this.force=r,this.dims=H(e),this.events=[]}function T(e){var t=e.display;!t.scrollbarsClipped&&t.scroller.offsetWidth&&(t.nativeBarWidth=t.scroller.offsetWidth-t.scroller.clientWidth,t.heightForcer.style.height=Vt(e)+"px",t.sizer.style.marginBottom=-t.nativeBarWidth+"px",t.sizer.style.borderRightWidth=Vt(e)+"px",t.scrollbarsClipped=!0)}function M(e,t){var r=e.display,n=e.doc;if(t.editorIsHidden)return Pr(e),!1;if(!t.force&&t.visible.from>=r.viewFrom&&t.visible.to<=r.viewTo&&(null==r.updateLineNumbers||r.updateLineNumbers>=r.viewTo)&&r.renderedView==r.view&&0==Fr(e))return!1;C(e)&&(Pr(e),t.dims=H(e));var i=n.first+n.size,o=Math.max(t.visible.from-e.options.viewportMargin,n.first),l=Math.min(i,t.visible.to+e.options.viewportMargin);r.viewFrom<o&&o-r.viewFrom<20&&(o=Math.max(n.first,r.viewFrom)),r.viewTo>l&&r.viewTo-l<20&&(l=Math.min(i,r.viewTo)),Ml&&(o=di(e.doc,o),l=pi(e.doc,l));var s=o!=r.viewFrom||l!=r.viewTo||r.lastWrapHeight!=t.wrapperHeight||r.lastWrapWidth!=t.wrapperWidth;Er(e,o,l),r.viewOffset=qi(Ki(e.doc,r.viewFrom)),e.display.mover.style.top=r.viewOffset+"px";var a=Fr(e);if(!s&&0==a&&!t.force&&r.renderedView==r.view&&(null==r.updateLineNumbers||r.updateLineNumbers>=r.viewTo))return!1;var u=Ro();return a>4&&(r.lineDiv.style.display="none"),P(e,r.updateLineNumbers,t.dims),a>4&&(r.lineDiv.style.display=""),r.renderedView=r.view,u&&Ro()!=u&&u.offsetHeight&&u.focus(),Eo(r.cursorDiv),Eo(r.selectionDiv),r.gutters.style.height=0,s&&(r.lastWrapHeight=t.wrapperHeight,r.lastWrapWidth=t.wrapperWidth,zt(e,400)),r.updateLineNumbers=null,!0}function A(e,t){for(var r=t.force,n=t.viewport,i=!0;;i=!1){if(i&&e.options.lineWrapping&&t.oldDisplayWidth!=Kt(e))r=!0;else if(r=!1,n&&null!=n.top&&(n={top:Math.min(e.doc.height+Gt(e.display)-jt(e),n.top)}),t.visible=w(e.display,e.doc,n),t.visible.from>=e.display.viewFrom&&t.visible.to<=e.display.viewTo)break;if(!M(e,t))break;O(e);var o=p(e);Ot(e),W(e,o),y(e,o)}t.signal(e,"update",e),(e.display.viewFrom!=e.display.reportedViewFrom||e.display.viewTo!=e.display.reportedViewTo)&&(t.signal(e,"viewportChange",e,e.display.viewFrom,e.display.viewTo),e.display.reportedViewFrom=e.display.viewFrom,e.display.reportedViewTo=e.display.viewTo)}function N(e,t){var r=new k(e,t);if(M(e,r)){O(e),A(e,r);var n=p(e);Ot(e),W(e,n),y(e,n),r.finish()}}function W(e,t){e.display.sizer.style.minHeight=t.docHeight+"px";var r=t.docHeight+e.display.barHeight;e.display.heightForcer.style.top=r+"px",e.display.gutters.style.height=Math.max(r+Vt(e),t.clientHeight)+"px"}function O(e){for(var t=e.display,r=t.lineDiv.offsetTop,n=0;n<t.view.length;n++){var i,o=t.view[n];if(!o.hidden){if(cl&&8>fl){var l=o.node.offsetTop+o.node.offsetHeight;i=l-r,r=l}else{var s=o.node.getBoundingClientRect();i=s.bottom-s.top}var a=o.line.height-i;if(2>i&&(i=gr(t)),(a>.001||-.001>a)&&(_i(o.line,i),D(o.line),o.rest))for(var u=0;u<o.rest.length;u++)D(o.rest[u])}}}function D(e){if(e.widgets)for(var t=0;t<e.widgets.length;++t)e.widgets[t].height=e.widgets[t].node.offsetHeight}function H(e){for(var t=e.display,r={},n={},i=t.gutters.clientLeft,o=t.gutters.firstChild,l=0;o;o=o.nextSibling,++l)r[e.options.gutters[l]]=o.offsetLeft+o.clientLeft+i,n[e.options.gutters[l]]=o.clientWidth;return{fixedPos:L(t),gutterTotalWidth:t.gutters.offsetWidth,gutterLeft:r,gutterWidth:n,wrapperWidth:t.wrapper.clientWidth}}function P(e,t,r){function n(t){var r=t.nextSibling;return hl&&xl&&e.display.currentWheelTarget==t?t.style.display="none":t.parentNode.removeChild(t),r}for(var i=e.display,o=e.options.lineNumbers,l=i.lineDiv,s=l.firstChild,a=i.view,u=i.viewFrom,c=0;c<a.length;c++){var f=a[c];if(f.hidden);else if(f.node&&f.node.parentNode==l){for(;s!=f.node;)s=n(s);var h=o&&null!=t&&u>=t&&f.lineNumber;f.changes&&(Mo(f.changes,"gutter")>-1&&(h=!1),I(e,f,u,r)),h&&(Eo(f.lineNumber),f.lineNumber.appendChild(document.createTextNode(S(e.options,u)))),s=f.node.nextSibling}else{var d=V(e,f,u,r);l.insertBefore(d,s)}u+=f.size}for(;s;)s=n(s)}function I(e,t,r,n){for(var i=0;i<t.changes.length;i++){var o=t.changes[i];"text"==o?R(e,t):"gutter"==o?G(e,t,r,n):"class"==o?B(t):"widget"==o&&U(e,t,n)}t.changes=null}function z(e){return e.node==e.text&&(e.node=zo("div",null,null,"position: relative"),e.text.parentNode&&e.text.parentNode.replaceChild(e.node,e.text),e.node.appendChild(e.text),cl&&8>fl&&(e.node.style.zIndex=2)),e.node}function E(e){var t=e.bgClass?e.bgClass+" "+(e.line.bgClass||""):e.line.bgClass;if(t&&(t+=" CodeMirror-linebackground"),e.background)t?e.background.className=t:(e.background.parentNode.removeChild(e.background),e.background=null);else if(t){var r=z(e);e.background=r.insertBefore(zo("div",null,t),r.firstChild)}}function F(e,t){var r=e.display.externalMeasured;return r&&r.line==t.line?(e.display.externalMeasured=null,t.measure=r.measure,r.built):Oi(e,t)}function R(e,t){var r=t.text.className,n=F(e,t);t.text==t.node&&(t.node=n.pre),t.text.parentNode.replaceChild(n.pre,t.text),t.text=n.pre,n.bgClass!=t.bgClass||n.textClass!=t.textClass?(t.bgClass=n.bgClass,t.textClass=n.textClass,B(t)):r&&(t.text.className=r)}function B(e){E(e),e.line.wrapClass?z(e).className=e.line.wrapClass:e.node!=e.text&&(e.node.className="");var t=e.textClass?e.textClass+" "+(e.line.textClass||""):e.line.textClass;e.text.className=t||""}function G(e,t,r,n){t.gutter&&(t.node.removeChild(t.gutter),t.gutter=null);var i=t.line.gutterMarkers;if(e.options.lineNumbers||i){var o=z(t),l=t.gutter=zo("div",null,"CodeMirror-gutter-wrapper","left: "+(e.options.fixedGutter?n.fixedPos:-n.gutterTotalWidth)+"px; width: "+n.gutterTotalWidth+"px");if(e.display.input.setUneditable(l),o.insertBefore(l,t.text),t.line.gutterClass&&(l.className+=" "+t.line.gutterClass),!e.options.lineNumbers||i&&i["CodeMirror-linenumbers"]||(t.lineNumber=l.appendChild(zo("div",S(e.options,r),"CodeMirror-linenumber CodeMirror-gutter-elt","left: "+n.gutterLeft["CodeMirror-linenumbers"]+"px; width: "+e.display.lineNumInnerWidth+"px"))),i)for(var s=0;s<e.options.gutters.length;++s){var a=e.options.gutters[s],u=i.hasOwnProperty(a)&&i[a];u&&l.appendChild(zo("div",[u],"CodeMirror-gutter-elt","left: "+n.gutterLeft[a]+"px; width: "+n.gutterWidth[a]+"px"))}}}function U(e,t,r){t.alignable&&(t.alignable=null);for(var n,i=t.node.firstChild;i;i=n){var n=i.nextSibling;"CodeMirror-linewidget"==i.className&&t.node.removeChild(i)}K(e,t,r)}function V(e,t,r,n){var i=F(e,t);return t.text=t.node=i.pre,i.bgClass&&(t.bgClass=i.bgClass),i.textClass&&(t.textClass=i.textClass),B(t),G(e,t,r,n),K(e,t,n),t.node}function K(e,t,r){if(j(e,t.line,t,r,!0),t.rest)for(var n=0;n<t.rest.length;n++)j(e,t.rest[n],t,r,!1)}function j(e,t,r,n,i){if(t.widgets)for(var o=z(r),l=0,s=t.widgets;l<s.length;++l){var a=s[l],u=zo("div",[a.node],"CodeMirror-linewidget");a.handleMouseEvents||u.setAttribute("cm-ignore-events","true"),X(a,u,r,n),e.display.input.setUneditable(u),i&&a.above?o.insertBefore(u,r.gutter||r.text):o.appendChild(u),mo(a,"redraw")}}function X(e,t,r,n){if(e.noHScroll){(r.alignable||(r.alignable=[])).push(t);var i=n.wrapperWidth;t.style.left=n.fixedPos+"px",e.coverGutter||(i-=n.gutterTotalWidth,t.style.paddingLeft=n.gutterTotalWidth+"px"),t.style.width=i+"px"}e.coverGutter&&(t.style.zIndex=5,t.style.position="relative",e.noHScroll||(t.style.marginLeft=-n.gutterTotalWidth+"px"))}function _(e){return Al(e.line,e.ch)}function Y(e,t){return Nl(e,t)<0?t:e}function $(e,t){return Nl(e,t)<0?e:t}function q(e){e.state.focused||(e.display.input.focus(),cn(e))}function Z(e){return e.options.readOnly||e.doc.cantEdit}function Q(e,t,r,n){var i=e.doc;e.display.shift=!1,n||(n=i.sel);var o=Vs(t),l=null;e.state.pasteIncoming&&n.ranges.length>1&&(Wl&&Wl.join("\n")==t?l=n.ranges.length%Wl.length==0&&Ao(Wl,Vs):o.length==n.ranges.length&&(l=Ao(o,function(e){return[e]})));for(var s=n.ranges.length-1;s>=0;s--){var a=n.ranges[s],u=a.from(),c=a.to();a.empty()&&(r&&r>0?u=Al(u.line,u.ch-r):e.state.overwrite&&!e.state.pasteIncoming&&(c=Al(c.line,Math.min(Ki(i,c.line).text.length,c.ch+To(o).length))));var f=e.curOp.updateInput,h={from:u,to:c,text:l?l[s%l.length]:o,origin:e.state.pasteIncoming?"paste":e.state.cutIncoming?"cut":"+input"};if(bn(e.doc,h),mo(e,"inputRead",e,h),t&&!e.state.pasteIncoming&&e.options.electricChars&&e.options.smartIndent&&a.head.ch<100&&(!s||n.ranges[s-1].head.line!=a.head.line)){var d=e.getModeAt(a.head),p=Vl(h);if(d.electricChars){for(var g=0;g<d.electricChars.length;g++)if(t.indexOf(d.electricChars.charAt(g))>-1){Hn(e,p.line,"smart");break}}else d.electricInput&&d.electricInput.test(Ki(i,p.line).text.slice(0,p.ch))&&Hn(e,p.line,"smart")}}On(e),e.curOp.updateInput=f,e.curOp.typing=!0,e.state.pasteIncoming=e.state.cutIncoming=!1}function J(e){for(var t=[],r=[],n=0;n<e.doc.sel.ranges.length;n++){var i=e.doc.sel.ranges[n].head.line,o={anchor:Al(i,0),head:Al(i+1,0)};r.push(o),t.push(e.getRange(o.anchor,o.head))}return{text:t,ranges:r}}function et(e){e.setAttribute("autocorrect","off"),e.setAttribute("autocapitalize","off"),e.setAttribute("spellcheck","false")}function tt(e){this.cm=e,this.prevInput="",this.pollingFast=!1,this.polling=new So,this.inaccurateSelection=!1,this.hasSelection=!1}function rt(){var e=zo("textarea",null,null,"position: absolute; padding: 0; width: 1px; height: 1em; outline: none"),t=zo("div",[e],null,"overflow: hidden; position: relative; width: 3px; height: 0px;");return hl?e.style.width="1000px":e.setAttribute("wrap","off"),bl&&(e.style.border="1px solid black"),et(e),t}function nt(e){this.cm=e,this.lastAnchorNode=this.lastAnchorOffset=this.lastFocusNode=this.lastFocusOffset=null,this.polling=new So,this.gracePeriod=!1}function it(e,t){var r=qt(e,t.line);if(!r||r.hidden)return null;var n=Ki(e.doc,t.line),i=_t(r,n,t.line),o=Zi(n),l="left";if(o){var s=nl(o,t.ch);l=s%2?"right":"left"}var a=Jt(i.map,t.ch,"left");return a.offset="right"==a.collapse?a.end:a.start,a}function ot(e,t){return t&&(e.bad=!0),e}function lt(e,t,r){var n;if(t==e.display.lineDiv){if(n=e.display.lineDiv.childNodes[r],!n)return ot(e.clipPos(Al(e.display.viewTo-1)),!0);t=null,r=0}else for(n=t;;n=n.parentNode){if(!n||n==e.display.lineDiv)return null;if(n.parentNode&&n.parentNode==e.display.lineDiv)break}for(var i=0;i<e.display.view.length;i++){var o=e.display.view[i];if(o.node==n)return st(o,t,r)}}function st(e,t,r){function n(t,r,n){for(var i=-1;i<(c?c.length:0);i++)for(var o=0>i?u.map:c[i],l=0;l<o.length;l+=3){var s=o[l+2];if(s==t||s==r){var a=Yi(0>i?e.line:e.rest[i]),f=o[l]+n;return(0>n||s!=t)&&(f=o[l+(n?1:0)]),Al(a,f)}}}var i=e.text.firstChild,o=!1;if(!t||!zs(i,t))return ot(Al(Yi(e.line),0),!0);if(t==i&&(o=!0,t=i.childNodes[r],r=0,!t)){var l=e.rest?To(e.rest):e.line;return ot(Al(Yi(l),l.text.length),o)}var s=3==t.nodeType?t:null,a=t;for(s||1!=t.childNodes.length||3!=t.firstChild.nodeType||(s=t.firstChild,r&&(r=s.nodeValue.length));a.parentNode!=i;)a=a.parentNode;var u=e.measure,c=u.maps,f=n(s,a,r);if(f)return ot(f,o);for(var h=a.nextSibling,d=s?s.nodeValue.length-r:0;h;h=h.nextSibling){if(f=n(h,h.firstChild,0))return ot(Al(f.line,f.ch-d),o);d+=h.textContent.length}for(var p=a.previousSibling,d=r;p;p=p.previousSibling){if(f=n(p,p.firstChild,-1))return ot(Al(f.line,f.ch+d),o);d+=h.textContent.length}}function at(e,t,r,n,i){function o(e){return function(t){return t.id==e}}function l(t){if(1==t.nodeType){var r=t.getAttribute("cm-text");if(null!=r)return""==r&&(r=t.textContent.replace(/\u200b/g,"")),void(s+=r);var u,c=t.getAttribute("cm-marker");if(c){var f=e.findMarks(Al(n,0),Al(i+1,0),o(+c));return void(f.length&&(u=f[0].find())&&(s+=ji(e.doc,u.from,u.to).join("\n")))}if("false"==t.getAttribute("contenteditable"))return;for(var h=0;h<t.childNodes.length;h++)l(t.childNodes[h]);/^(pre|div|p)$/i.test(t.nodeName)&&(a=!0)}else if(3==t.nodeType){var d=t.nodeValue;if(!d)return;a&&(s+="\n",a=!1),s+=d}}for(var s="",a=!1;l(t),t!=r;)t=t.nextSibling;return s}function ut(e,t){this.ranges=e,this.primIndex=t}function ct(e,t){this.anchor=e,this.head=t}function ft(e,t){var r=e[t];e.sort(function(e,t){return Nl(e.from(),t.from())}),t=Mo(e,r);for(var n=1;n<e.length;n++){var i=e[n],o=e[n-1];if(Nl(o.to(),i.from())>=0){var l=$(o.from(),i.from()),s=Y(o.to(),i.to()),a=o.empty()?i.from()==i.head:o.from()==o.head;t>=n&&--t,e.splice(--n,2,new ct(a?s:l,a?l:s))}}return new ut(e,t)}function ht(e,t){return new ut([new ct(e,t||e)],0)}function dt(e,t){return Math.max(e.first,Math.min(t,e.first+e.size-1))}function pt(e,t){if(t.line<e.first)return Al(e.first,0);var r=e.first+e.size-1;return t.line>r?Al(r,Ki(e,r).text.length):gt(t,Ki(e,t.line).text.length)}function gt(e,t){var r=e.ch;return null==r||r>t?Al(e.line,t):0>r?Al(e.line,0):e}function vt(e,t){return t>=e.first&&t<e.first+e.size}function mt(e,t){for(var r=[],n=0;n<t.length;n++)r[n]=pt(e,t[n]);return r}function yt(e,t,r,n){if(e.cm&&e.cm.display.shift||e.extend){var i=t.anchor;if(n){var o=Nl(r,i)<0;o!=Nl(n,i)<0?(i=r,r=n):o!=Nl(r,n)<0&&(r=n)}return new ct(i,r)}return new ct(n||r,r)}function bt(e,t,r,n){kt(e,new ut([yt(e,e.sel.primary(),t,r)],0),n)}function wt(e,t,r){for(var n=[],i=0;i<e.sel.ranges.length;i++)n[i]=yt(e,e.sel.ranges[i],t[i],null);var o=ft(n,e.sel.primIndex);kt(e,o,r)}function xt(e,t,r,n){var i=e.sel.ranges.slice(0);i[t]=r,kt(e,ft(i,e.sel.primIndex),n)}function Ct(e,t,r,n){kt(e,ht(t,r),n)}function St(e,t){var r={ranges:t.ranges,update:function(t){this.ranges=[];for(var r=0;r<t.length;r++)this.ranges[r]=new ct(pt(e,t[r].anchor),pt(e,t[r].head))}};return Cs(e,"beforeSelectionChange",e,r),e.cm&&Cs(e.cm,"beforeSelectionChange",e.cm,r),r.ranges!=t.ranges?ft(r.ranges,r.ranges.length-1):t}function Lt(e,t,r){var n=e.history.done,i=To(n);i&&i.ranges?(n[n.length-1]=t,Tt(e,t,r)):kt(e,t,r)}function kt(e,t,r){Tt(e,t,r),io(e,e.sel,e.cm?e.cm.curOp.id:0/0,r)}function Tt(e,t,r){(xo(e,"beforeSelectionChange")||e.cm&&xo(e.cm,"beforeSelectionChange"))&&(t=St(e,t));var n=r&&r.bias||(Nl(t.primary().head,e.sel.primary().head)<0?-1:1);Mt(e,Nt(e,t,n,!0)),r&&r.scroll===!1||!e.cm||On(e.cm)}function Mt(e,t){t.equals(e.sel)||(e.sel=t,e.cm&&(e.cm.curOp.updateInput=e.cm.curOp.selectionChanged=!0,wo(e.cm)),mo(e,"cursorActivity",e))}function At(e){Mt(e,Nt(e,e.sel,null,!1),Ts)}function Nt(e,t,r,n){for(var i,o=0;o<t.ranges.length;o++){var l=t.ranges[o],s=Wt(e,l.anchor,r,n),a=Wt(e,l.head,r,n);(i||s!=l.anchor||a!=l.head)&&(i||(i=t.ranges.slice(0,o)),i[o]=new ct(s,a))}return i?ft(i,t.primIndex):t}function Wt(e,t,r,n){var i=!1,o=t,l=r||1;e.cantEdit=!1;e:for(;;){var s=Ki(e,o.line);if(s.markedSpans)for(var a=0;a<s.markedSpans.length;++a){var u=s.markedSpans[a],c=u.marker;if((null==u.from||(c.inclusiveLeft?u.from<=o.ch:u.from<o.ch))&&(null==u.to||(c.inclusiveRight?u.to>=o.ch:u.to>o.ch))){if(n&&(Cs(c,"beforeCursorEnter"),c.explicitlyCleared)){if(s.markedSpans){--a;continue}break}if(!c.atomic)continue;var f=c.find(0>l?-1:1);if(0==Nl(f,o)&&(f.ch+=l,f.ch<0?f=f.line>e.first?pt(e,Al(f.line-1)):null:f.ch>s.text.length&&(f=f.line<e.first+e.size-1?Al(f.line+1,0):null),!f)){if(i)return n?(e.cantEdit=!0,Al(e.first,0)):Wt(e,t,r,!0);i=!0,f=t,l=-l}o=f;continue e}}return o}}function Ot(e){e.display.input.showSelection(e.display.input.prepareSelection())}function Dt(e,t){for(var r=e.doc,n={},i=n.cursors=document.createDocumentFragment(),o=n.selection=document.createDocumentFragment(),l=0;l<r.sel.ranges.length;l++)if(t!==!1||l!=r.sel.primIndex){var s=r.sel.ranges[l],a=s.empty();(a||e.options.showCursorWhenSelecting)&&Ht(e,s,i),a||Pt(e,s,o)}return n}function Ht(e,t,r){var n=cr(e,t.head,"div",null,null,!e.options.singleCursorHeightPerLine),i=r.appendChild(zo("div"," ","CodeMirror-cursor"));if(i.style.left=n.left+"px",i.style.top=n.top+"px",i.style.height=Math.max(0,n.bottom-n.top)*e.options.cursorHeight+"px",n.other){var o=r.appendChild(zo("div"," ","CodeMirror-cursor CodeMirror-secondarycursor"));o.style.display="",o.style.left=n.other.left+"px",o.style.top=n.other.top+"px",o.style.height=.85*(n.other.bottom-n.other.top)+"px"}}function Pt(e,t,r){function n(e,t,r,n){0>t&&(t=0),t=Math.round(t),n=Math.round(n),s.appendChild(zo("div",null,"CodeMirror-selected","position: absolute; left: "+e+"px; top: "+t+"px; width: "+(null==r?c-e:r)+"px; height: "+(n-t)+"px"))}function i(t,r,i){function o(r,n){return ur(e,Al(t,r),"div",f,n)}var s,a,f=Ki(l,t),h=f.text.length;return Yo(Zi(f),r||0,null==i?h:i,function(e,t,l){var f,d,p,g=o(e,"left");if(e==t)f=g,d=p=g.left;else{if(f=o(t-1,"right"),"rtl"==l){var v=g;g=f,f=v}d=g.left,p=f.right}null==r&&0==e&&(d=u),f.top-g.top>3&&(n(d,g.top,null,g.bottom),d=u,g.bottom<f.top&&n(d,g.bottom,null,f.top)),null==i&&t==h&&(p=c),(!s||g.top<s.top||g.top==s.top&&g.left<s.left)&&(s=g),(!a||f.bottom>a.bottom||f.bottom==a.bottom&&f.right>a.right)&&(a=f),u+1>d&&(d=u),n(d,f.top,p-d,f.bottom)}),{start:s,end:a}}var o=e.display,l=e.doc,s=document.createDocumentFragment(),a=Ut(e.display),u=a.left,c=Math.max(o.sizerWidth,Kt(e)-o.sizer.offsetLeft)-a.right,f=t.from(),h=t.to();if(f.line==h.line)i(f.line,f.ch,h.ch);else{var d=Ki(l,f.line),p=Ki(l,h.line),g=fi(d)==fi(p),v=i(f.line,f.ch,g?d.text.length+1:null).end,m=i(h.line,g?0:null,h.ch).start;g&&(v.top<m.top-2?(n(v.right,v.top,null,v.bottom),n(u,m.top,m.left,m.bottom)):n(v.right,v.top,m.left-v.right,v.bottom)),v.bottom<m.top&&n(u,v.bottom,null,m.top)}r.appendChild(s)}function It(e){if(e.state.focused){var t=e.display;clearInterval(t.blinker);var r=!0;t.cursorDiv.style.visibility="",e.options.cursorBlinkRate>0?t.blinker=setInterval(function(){t.cursorDiv.style.visibility=(r=!r)?"":"hidden"},e.options.cursorBlinkRate):e.options.cursorBlinkRate<0&&(t.cursorDiv.style.visibility="hidden")}}function zt(e,t){e.doc.mode.startState&&e.doc.frontier<e.display.viewTo&&e.state.highlight.set(t,Do(Et,e))}function Et(e){var t=e.doc;if(t.frontier<t.first&&(t.frontier=t.first),!(t.frontier>=e.display.viewTo)){var r=+new Date+e.options.workTime,n=Ql(t.mode,Rt(e,t.frontier)),i=[];t.iter(t.frontier,Math.min(t.first+t.size,e.display.viewTo+500),function(o){if(t.frontier>=e.display.viewFrom){var l=o.styles,s=Mi(e,o,n,!0);o.styles=s.styles;var a=o.styleClasses,u=s.classes;u?o.styleClasses=u:a&&(o.styleClasses=null);for(var c=!l||l.length!=o.styles.length||a!=u&&(!a||!u||a.bgClass!=u.bgClass||a.textClass!=u.textClass),f=0;!c&&f<l.length;++f)c=l[f]!=o.styles[f];c&&i.push(t.frontier),o.stateAfter=Ql(t.mode,n)}else Ni(e,o.text,n),o.stateAfter=t.frontier%5==0?Ql(t.mode,n):null;return++t.frontier,+new Date>r?(zt(e,e.options.workDelay),!0):void 0}),i.length&&Tr(e,function(){for(var t=0;t<i.length;t++)Hr(e,i[t],"text")})}}function Ft(e,t,r){for(var n,i,o=e.doc,l=r?-1:t-(e.doc.mode.innerMode?1e3:100),s=t;s>l;--s){if(s<=o.first)return o.first;var a=Ki(o,s-1);if(a.stateAfter&&(!r||s<=o.frontier))return s;var u=Ns(a.text,null,e.options.tabSize);(null==i||n>u)&&(i=s-1,n=u)}return i}function Rt(e,t,r){var n=e.doc,i=e.display;if(!n.mode.startState)return!0;var o=Ft(e,t,r),l=o>n.first&&Ki(n,o-1).stateAfter;return l=l?Ql(n.mode,l):Jl(n.mode),n.iter(o,t,function(r){Ni(e,r.text,l);var s=o==t-1||o%5==0||o>=i.viewFrom&&o<i.viewTo;r.stateAfter=s?Ql(n.mode,l):null,++o}),r&&(n.frontier=o),l}function Bt(e){return e.lineSpace.offsetTop}function Gt(e){return e.mover.offsetHeight-e.lineSpace.offsetHeight}function Ut(e){if(e.cachedPaddingH)return e.cachedPaddingH;var t=Fo(e.measure,zo("pre","x")),r=window.getComputedStyle?window.getComputedStyle(t):t.currentStyle,n={left:parseInt(r.paddingLeft),right:parseInt(r.paddingRight)};return isNaN(n.left)||isNaN(n.right)||(e.cachedPaddingH=n),n}function Vt(e){return Ls-e.display.nativeBarWidth}function Kt(e){return e.display.scroller.clientWidth-Vt(e)-e.display.barWidth}function jt(e){return e.display.scroller.clientHeight-Vt(e)-e.display.barHeight}function Xt(e,t,r){var n=e.options.lineWrapping,i=n&&Kt(e);if(!t.measure.heights||n&&t.measure.width!=i){var o=t.measure.heights=[];if(n){t.measure.width=i;for(var l=t.text.firstChild.getClientRects(),s=0;s<l.length-1;s++){var a=l[s],u=l[s+1];Math.abs(a.bottom-u.bottom)>2&&o.push((a.bottom+u.top)/2-r.top)}}o.push(r.bottom-r.top)}}function _t(e,t,r){if(e.line==t)return{map:e.measure.map,cache:e.measure.cache};for(var n=0;n<e.rest.length;n++)if(e.rest[n]==t)return{map:e.measure.maps[n],cache:e.measure.caches[n]};for(var n=0;n<e.rest.length;n++)if(Yi(e.rest[n])>r)return{map:e.measure.maps[n],cache:e.measure.caches[n],before:!0}}function Yt(e,t){t=fi(t);var r=Yi(t),n=e.display.externalMeasured=new Wr(e.doc,t,r);n.lineN=r;var i=n.built=Oi(e,n);return n.text=i.pre,Fo(e.display.lineMeasure,i.pre),n}function $t(e,t,r,n){return Qt(e,Zt(e,t),r,n)}function qt(e,t){if(t>=e.display.viewFrom&&t<e.display.viewTo)return e.display.view[Ir(e,t)];var r=e.display.externalMeasured;return r&&t>=r.lineN&&t<r.lineN+r.size?r:void 0}function Zt(e,t){var r=Yi(t),n=qt(e,r);n&&!n.text?n=null:n&&n.changes&&I(e,n,r,H(e)),n||(n=Yt(e,t));var i=_t(n,t,r);return{line:t,view:n,rect:null,map:i.map,cache:i.cache,before:i.before,hasHeights:!1}}function Qt(e,t,r,n,i){t.before&&(r=-1);var o,l=r+(n||"");return t.cache.hasOwnProperty(l)?o=t.cache[l]:(t.rect||(t.rect=t.view.text.getBoundingClientRect()),t.hasHeights||(Xt(e,t.view,t.rect),t.hasHeights=!0),o=er(e,t,r,n),o.bogus||(t.cache[l]=o)),{left:o.left,right:o.right,top:i?o.rtop:o.top,bottom:i?o.rbottom:o.bottom}}function Jt(e,t,r){for(var n,i,o,l,s=0;s<e.length;s+=3){var a=e[s],u=e[s+1];if(a>t?(i=0,o=1,l="left"):u>t?(i=t-a,o=i+1):(s==e.length-3||t==u&&e[s+3]>t)&&(o=u-a,i=o-1,t>=u&&(l="right")),null!=i){if(n=e[s+2],a==u&&r==(n.insertLeft?"left":"right")&&(l=r),"left"==r&&0==i)for(;s&&e[s-2]==e[s-3]&&e[s-1].insertLeft;)n=e[(s-=3)+2],l="left";if("right"==r&&i==u-a)for(;s<e.length-3&&e[s+3]==e[s+4]&&!e[s+5].insertLeft;)n=e[(s+=3)+2],l="right";break}}return{node:n,start:i,end:o,collapse:l,coverStart:a,coverEnd:u}}function er(e,t,r,n){var i,o=Jt(t.map,r,n),l=o.node,s=o.start,a=o.end,u=o.collapse;if(3==l.nodeType){for(var c=0;4>c;c++){for(;s&&Io(t.line.text.charAt(o.coverStart+s));)--s;for(;o.coverStart+a<o.coverEnd&&Io(t.line.text.charAt(o.coverStart+a));)++a;if(cl&&9>fl&&0==s&&a==o.coverEnd-o.coverStart)i=l.parentNode.getBoundingClientRect();else if(cl&&e.options.lineWrapping){var f=Ds(l,s,a).getClientRects();i=f.length?f["right"==n?f.length-1:0]:Pl}else i=Ds(l,s,a).getBoundingClientRect()||Pl;if(i.left||i.right||0==s)break;a=s,s-=1,u="right"}cl&&11>fl&&(i=tr(e.display.measure,i))}else{s>0&&(u=n="right");var f;i=e.options.lineWrapping&&(f=l.getClientRects()).length>1?f["right"==n?f.length-1:0]:l.getBoundingClientRect()}if(cl&&9>fl&&!s&&(!i||!i.left&&!i.right)){var h=l.parentNode.getClientRects()[0];i=h?{left:h.left,right:h.left+vr(e.display),top:h.top,bottom:h.bottom}:Pl}for(var d=i.top-t.rect.top,p=i.bottom-t.rect.top,g=(d+p)/2,v=t.view.measure.heights,c=0;c<v.length-1&&!(g<v[c]);c++);var m=c?v[c-1]:0,y=v[c],b={left:("right"==u?i.right:i.left)-t.rect.left,right:("left"==u?i.left:i.right)-t.rect.left,top:m,bottom:y};return i.left||i.right||(b.bogus=!0),e.options.singleCursorHeightPerLine||(b.rtop=d,b.rbottom=p),b
}function tr(e,t){if(!window.screen||null==screen.logicalXDPI||screen.logicalXDPI==screen.deviceXDPI||!_o(e))return t;var r=screen.logicalXDPI/screen.deviceXDPI,n=screen.logicalYDPI/screen.deviceYDPI;return{left:t.left*r,right:t.right*r,top:t.top*n,bottom:t.bottom*n}}function rr(e){if(e.measure&&(e.measure.cache={},e.measure.heights=null,e.rest))for(var t=0;t<e.rest.length;t++)e.measure.caches[t]={}}function nr(e){e.display.externalMeasure=null,Eo(e.display.lineMeasure);for(var t=0;t<e.display.view.length;t++)rr(e.display.view[t])}function ir(e){nr(e),e.display.cachedCharWidth=e.display.cachedTextHeight=e.display.cachedPaddingH=null,e.options.lineWrapping||(e.display.maxLineChanged=!0),e.display.lineNumChars=null}function or(){return window.pageXOffset||(document.documentElement||document.body).scrollLeft}function lr(){return window.pageYOffset||(document.documentElement||document.body).scrollTop}function sr(e,t,r,n){if(t.widgets)for(var i=0;i<t.widgets.length;++i)if(t.widgets[i].above){var o=yi(t.widgets[i]);r.top+=o,r.bottom+=o}if("line"==n)return r;n||(n="local");var l=qi(t);if("local"==n?l+=Bt(e.display):l-=e.display.viewOffset,"page"==n||"window"==n){var s=e.display.lineSpace.getBoundingClientRect();l+=s.top+("window"==n?0:lr());var a=s.left+("window"==n?0:or());r.left+=a,r.right+=a}return r.top+=l,r.bottom+=l,r}function ar(e,t,r){if("div"==r)return t;var n=t.left,i=t.top;if("page"==r)n-=or(),i-=lr();else if("local"==r||!r){var o=e.display.sizer.getBoundingClientRect();n+=o.left,i+=o.top}var l=e.display.lineSpace.getBoundingClientRect();return{left:n-l.left,top:i-l.top}}function ur(e,t,r,n,i){return n||(n=Ki(e.doc,t.line)),sr(e,n,$t(e,n,t.ch,i),r)}function cr(e,t,r,n,i,o){function l(t,l){var s=Qt(e,i,t,l?"right":"left",o);return l?s.left=s.right:s.right=s.left,sr(e,n,s,r)}function s(e,t){var r=a[t],n=r.level%2;return e==$o(r)&&t&&r.level<a[t-1].level?(r=a[--t],e=qo(r)-(r.level%2?0:1),n=!0):e==qo(r)&&t<a.length-1&&r.level<a[t+1].level&&(r=a[++t],e=$o(r)-r.level%2,n=!1),n&&e==r.to&&e>r.from?l(e-1):l(e,n)}n=n||Ki(e.doc,t.line),i||(i=Zt(e,n));var a=Zi(n),u=t.ch;if(!a)return l(u);var c=nl(a,u),f=s(u,c);return null!=Ys&&(f.other=s(u,Ys)),f}function fr(e,t){var r=0,t=pt(e.doc,t);e.options.lineWrapping||(r=vr(e.display)*t.ch);var n=Ki(e.doc,t.line),i=qi(n)+Bt(e.display);return{left:r,right:r,top:i,bottom:i+n.height}}function hr(e,t,r,n){var i=Al(e,t);return i.xRel=n,r&&(i.outside=!0),i}function dr(e,t,r){var n=e.doc;if(r+=e.display.viewOffset,0>r)return hr(n.first,0,!0,-1);var i=$i(n,r),o=n.first+n.size-1;if(i>o)return hr(n.first+n.size-1,Ki(n,o).text.length,!0,1);0>t&&(t=0);for(var l=Ki(n,i);;){var s=pr(e,l,i,t,r),a=ui(l),u=a&&a.find(0,!0);if(!a||!(s.ch>u.from.ch||s.ch==u.from.ch&&s.xRel>0))return s;i=Yi(l=u.to.line)}}function pr(e,t,r,n,i){function o(n){var i=cr(e,Al(r,n),"line",t,u);return s=!0,l>i.bottom?i.left-a:l<i.top?i.left+a:(s=!1,i.left)}var l=i-qi(t),s=!1,a=2*e.display.wrapper.clientWidth,u=Zt(e,t),c=Zi(t),f=t.text.length,h=Zo(t),d=Qo(t),p=o(h),g=s,v=o(d),m=s;if(n>v)return hr(r,d,m,1);for(;;){if(c?d==h||d==ol(t,h,1):1>=d-h){for(var y=p>n||v-n>=n-p?h:d,b=n-(y==h?p:v);Io(t.text.charAt(y));)++y;var w=hr(r,y,y==h?g:m,-1>b?-1:b>1?1:0);return w}var x=Math.ceil(f/2),C=h+x;if(c){C=h;for(var S=0;x>S;++S)C=ol(t,C,1)}var L=o(C);L>n?(d=C,v=L,(m=s)&&(v+=1e3),f=x):(h=C,p=L,g=s,f-=x)}}function gr(e){if(null!=e.cachedTextHeight)return e.cachedTextHeight;if(null==Ol){Ol=zo("pre");for(var t=0;49>t;++t)Ol.appendChild(document.createTextNode("x")),Ol.appendChild(zo("br"));Ol.appendChild(document.createTextNode("x"))}Fo(e.measure,Ol);var r=Ol.offsetHeight/50;return r>3&&(e.cachedTextHeight=r),Eo(e.measure),r||1}function vr(e){if(null!=e.cachedCharWidth)return e.cachedCharWidth;var t=zo("span","xxxxxxxxxx"),r=zo("pre",[t]);Fo(e.measure,r);var n=t.getBoundingClientRect(),i=(n.right-n.left)/10;return i>2&&(e.cachedCharWidth=i),i||10}function mr(e){e.curOp={cm:e,viewChanged:!1,startHeight:e.doc.height,forceUpdate:!1,updateInput:null,typing:!1,changeObjs:null,cursorActivityHandlers:null,cursorActivityCalled:0,selectionChanged:!1,updateMaxLine:!1,scrollLeft:null,scrollTop:null,scrollToPos:null,id:++zl},Il?Il.ops.push(e.curOp):e.curOp.ownsGroup=Il={ops:[e.curOp],delayedCallbacks:[]}}function yr(e){var t=e.delayedCallbacks,r=0;do{for(;r<t.length;r++)t[r]();for(var n=0;n<e.ops.length;n++){var i=e.ops[n];if(i.cursorActivityHandlers)for(;i.cursorActivityCalled<i.cursorActivityHandlers.length;)i.cursorActivityHandlers[i.cursorActivityCalled++](i.cm)}}while(r<t.length)}function br(e){var t=e.curOp,r=t.ownsGroup;if(r)try{yr(r)}finally{Il=null;for(var n=0;n<r.ops.length;n++)r.ops[n].cm.curOp=null;wr(r)}}function wr(e){for(var t=e.ops,r=0;r<t.length;r++)xr(t[r]);for(var r=0;r<t.length;r++)Cr(t[r]);for(var r=0;r<t.length;r++)Sr(t[r]);for(var r=0;r<t.length;r++)Lr(t[r]);for(var r=0;r<t.length;r++)kr(t[r])}function xr(e){var t=e.cm,r=t.display;T(t),e.updateMaxLine&&h(t),e.mustUpdate=e.viewChanged||e.forceUpdate||null!=e.scrollTop||e.scrollToPos&&(e.scrollToPos.from.line<r.viewFrom||e.scrollToPos.to.line>=r.viewTo)||r.maxLineChanged&&t.options.lineWrapping,e.update=e.mustUpdate&&new k(t,e.mustUpdate&&{top:e.scrollTop,ensure:e.scrollToPos},e.forceUpdate)}function Cr(e){e.updatedDisplay=e.mustUpdate&&M(e.cm,e.update)}function Sr(e){var t=e.cm,r=t.display;e.updatedDisplay&&O(t),e.barMeasure=p(t),r.maxLineChanged&&!t.options.lineWrapping&&(e.adjustWidthTo=$t(t,r.maxLine,r.maxLine.text.length).left+3,t.display.sizerWidth=e.adjustWidthTo,e.barMeasure.scrollWidth=Math.max(r.scroller.clientWidth,r.sizer.offsetLeft+e.adjustWidthTo+Vt(t)+t.display.barWidth),e.maxScrollLeft=Math.max(0,r.sizer.offsetLeft+e.adjustWidthTo-Kt(t))),(e.updatedDisplay||e.selectionChanged)&&(e.preparedSelection=r.input.prepareSelection())}function Lr(e){var t=e.cm;null!=e.adjustWidthTo&&(t.display.sizer.style.minWidth=e.adjustWidthTo+"px",e.maxScrollLeft<t.doc.scrollLeft&&Qr(t,Math.min(t.display.scroller.scrollLeft,e.maxScrollLeft),!0),t.display.maxLineChanged=!1),e.preparedSelection&&t.display.input.showSelection(e.preparedSelection),e.updatedDisplay&&W(t,e.barMeasure),(e.updatedDisplay||e.startHeight!=t.doc.height)&&y(t,e.barMeasure),e.selectionChanged&&It(t),t.state.focused&&e.updateInput&&t.display.input.reset(e.typing)}function kr(e){var t=e.cm,r=t.display,n=t.doc;if(e.updatedDisplay&&A(t,e.update),null==r.wheelStartX||null==e.scrollTop&&null==e.scrollLeft&&!e.scrollToPos||(r.wheelStartX=r.wheelStartY=null),null==e.scrollTop||r.scroller.scrollTop==e.scrollTop&&!e.forceScroll||(n.scrollTop=Math.max(0,Math.min(r.scroller.scrollHeight-r.scroller.clientHeight,e.scrollTop)),r.scrollbars.setScrollTop(n.scrollTop),r.scroller.scrollTop=n.scrollTop),null==e.scrollLeft||r.scroller.scrollLeft==e.scrollLeft&&!e.forceScroll||(n.scrollLeft=Math.max(0,Math.min(r.scroller.scrollWidth-Kt(t),e.scrollLeft)),r.scrollbars.setScrollLeft(n.scrollLeft),r.scroller.scrollLeft=n.scrollLeft,x(t)),e.scrollToPos){var i=Mn(t,pt(n,e.scrollToPos.from),pt(n,e.scrollToPos.to),e.scrollToPos.margin);e.scrollToPos.isCursor&&t.state.focused&&Tn(t,i)}var o=e.maybeHiddenMarkers,l=e.maybeUnhiddenMarkers;if(o)for(var s=0;s<o.length;++s)o[s].lines.length||Cs(o[s],"hide");if(l)for(var s=0;s<l.length;++s)l[s].lines.length&&Cs(l[s],"unhide");r.wrapper.offsetHeight&&(n.scrollTop=t.display.scroller.scrollTop),e.changeObjs&&Cs(t,"changes",t,e.changeObjs),e.update&&e.update.finish()}function Tr(e,t){if(e.curOp)return t();mr(e);try{return t()}finally{br(e)}}function Mr(e,t){return function(){if(e.curOp)return t.apply(e,arguments);mr(e);try{return t.apply(e,arguments)}finally{br(e)}}}function Ar(e){return function(){if(this.curOp)return e.apply(this,arguments);mr(this);try{return e.apply(this,arguments)}finally{br(this)}}}function Nr(e){return function(){var t=this.cm;if(!t||t.curOp)return e.apply(this,arguments);mr(t);try{return e.apply(this,arguments)}finally{br(t)}}}function Wr(e,t,r){this.line=t,this.rest=hi(t),this.size=this.rest?Yi(To(this.rest))-r+1:1,this.node=this.text=null,this.hidden=gi(e,t)}function Or(e,t,r){for(var n,i=[],o=t;r>o;o=n){var l=new Wr(e.doc,Ki(e.doc,o),o);n=o+l.size,i.push(l)}return i}function Dr(e,t,r,n){null==t&&(t=e.doc.first),null==r&&(r=e.doc.first+e.doc.size),n||(n=0);var i=e.display;if(n&&r<i.viewTo&&(null==i.updateLineNumbers||i.updateLineNumbers>t)&&(i.updateLineNumbers=t),e.curOp.viewChanged=!0,t>=i.viewTo)Ml&&di(e.doc,t)<i.viewTo&&Pr(e);else if(r<=i.viewFrom)Ml&&pi(e.doc,r+n)>i.viewFrom?Pr(e):(i.viewFrom+=n,i.viewTo+=n);else if(t<=i.viewFrom&&r>=i.viewTo)Pr(e);else if(t<=i.viewFrom){var o=zr(e,r,r+n,1);o?(i.view=i.view.slice(o.index),i.viewFrom=o.lineN,i.viewTo+=n):Pr(e)}else if(r>=i.viewTo){var o=zr(e,t,t,-1);o?(i.view=i.view.slice(0,o.index),i.viewTo=o.lineN):Pr(e)}else{var l=zr(e,t,t,-1),s=zr(e,r,r+n,1);l&&s?(i.view=i.view.slice(0,l.index).concat(Or(e,l.lineN,s.lineN)).concat(i.view.slice(s.index)),i.viewTo+=n):Pr(e)}var a=i.externalMeasured;a&&(r<a.lineN?a.lineN+=n:t<a.lineN+a.size&&(i.externalMeasured=null))}function Hr(e,t,r){e.curOp.viewChanged=!0;var n=e.display,i=e.display.externalMeasured;if(i&&t>=i.lineN&&t<i.lineN+i.size&&(n.externalMeasured=null),!(t<n.viewFrom||t>=n.viewTo)){var o=n.view[Ir(e,t)];if(null!=o.node){var l=o.changes||(o.changes=[]);-1==Mo(l,r)&&l.push(r)}}}function Pr(e){e.display.viewFrom=e.display.viewTo=e.doc.first,e.display.view=[],e.display.viewOffset=0}function Ir(e,t){if(t>=e.display.viewTo)return null;if(t-=e.display.viewFrom,0>t)return null;for(var r=e.display.view,n=0;n<r.length;n++)if(t-=r[n].size,0>t)return n}function zr(e,t,r,n){var i,o=Ir(e,t),l=e.display.view;if(!Ml||r==e.doc.first+e.doc.size)return{index:o,lineN:r};for(var s=0,a=e.display.viewFrom;o>s;s++)a+=l[s].size;if(a!=t){if(n>0){if(o==l.length-1)return null;i=a+l[o].size-t,o++}else i=a-t;t+=i,r+=i}for(;di(e.doc,r)!=r;){if(o==(0>n?0:l.length-1))return null;r+=n*l[o-(0>n?1:0)].size,o+=n}return{index:o,lineN:r}}function Er(e,t,r){var n=e.display,i=n.view;0==i.length||t>=n.viewTo||r<=n.viewFrom?(n.view=Or(e,t,r),n.viewFrom=t):(n.viewFrom>t?n.view=Or(e,t,n.viewFrom).concat(n.view):n.viewFrom<t&&(n.view=n.view.slice(Ir(e,t))),n.viewFrom=t,n.viewTo<r?n.view=n.view.concat(Or(e,n.viewTo,r)):n.viewTo>r&&(n.view=n.view.slice(0,Ir(e,r)))),n.viewTo=r}function Fr(e){for(var t=e.display.view,r=0,n=0;n<t.length;n++){var i=t[n];i.hidden||i.node&&!i.changes||++r}return r}function Rr(e){function t(){o.activeTouch&&(l=setTimeout(function(){o.activeTouch=null},1e3),s=o.activeTouch,s.end=+new Date)}function r(e){if(1!=e.touches.length)return!1;var t=e.touches[0];return t.radiusX<=1&&t.radiusY<=1}function n(e,t){if(null==t.left)return!0;var r=t.left-e.left,n=t.top-e.top;return r*r+n*n>400}function i(t){bo(e,t)||bs(t)}var o=e.display;ws(o.scroller,"mousedown",Mr(e,Vr)),cl&&11>fl?ws(o.scroller,"dblclick",Mr(e,function(t){if(!bo(e,t)){var r=Ur(e,t);if(r&&!Yr(e,t)&&!Gr(e.display,t)){ms(t);var n=e.findWordAt(r);bt(e.doc,n.anchor,n.head)}}})):ws(o.scroller,"dblclick",function(t){bo(e,t)||ms(t)}),kl||ws(o.scroller,"contextmenu",function(t){hn(e,t)});var l,s={end:0};ws(o.scroller,"touchstart",function(e){if(!r(e)){clearTimeout(l);var t=+new Date;o.activeTouch={start:t,moved:!1,prev:t-s.end<=300?s:null},1==e.touches.length&&(o.activeTouch.left=e.touches[0].pageX,o.activeTouch.top=e.touches[0].pageY)}}),ws(o.scroller,"touchmove",function(){o.activeTouch&&(o.activeTouch.moved=!0)}),ws(o.scroller,"touchend",function(r){var i=o.activeTouch;if(i&&!Gr(o,r)&&null!=i.left&&!i.moved&&new Date-i.start<300){var l,s=e.coordsChar(o.activeTouch,"page");l=!i.prev||n(i,i.prev)?new ct(s,s):!i.prev.prev||n(i,i.prev.prev)?e.findWordAt(s):new ct(Al(s.line,0),pt(e.doc,Al(s.line+1,0))),e.setSelection(l.anchor,l.head),e.focus(),ms(r)}t()}),ws(o.scroller,"touchcancel",t),ws(o.scroller,"scroll",function(){o.scroller.clientHeight&&(Zr(e,o.scroller.scrollTop),Qr(e,o.scroller.scrollLeft,!0),Cs(e,"scroll",e))}),ws(o.scroller,"mousewheel",function(t){Jr(e,t)}),ws(o.scroller,"DOMMouseScroll",function(t){Jr(e,t)}),ws(o.wrapper,"scroll",function(){o.wrapper.scrollTop=o.wrapper.scrollLeft=0}),e.options.dragDrop&&(ws(o.scroller,"dragstart",function(t){qr(e,t)}),ws(o.scroller,"dragenter",i),ws(o.scroller,"dragover",i),ws(o.scroller,"drop",Mr(e,$r)));var a=o.input.getField();ws(a,"keyup",function(t){an.call(e,t)}),ws(a,"keydown",Mr(e,ln)),ws(a,"keypress",Mr(e,un)),ws(a,"focus",Do(cn,e)),ws(a,"blur",Do(fn,e))}function Br(e){var t=e.display;(t.lastWrapHeight!=t.wrapper.clientHeight||t.lastWrapWidth!=t.wrapper.clientWidth)&&(t.cachedCharWidth=t.cachedTextHeight=t.cachedPaddingH=null,t.scrollbarsClipped=!1,e.setSize())}function Gr(e,t){for(var r=go(t);r!=e.wrapper;r=r.parentNode)if(!r||1==r.nodeType&&"true"==r.getAttribute("cm-ignore-events")||r.parentNode==e.sizer&&r!=e.mover)return!0}function Ur(e,t,r,n){var i=e.display;if(!r&&"true"==go(t).getAttribute("cm-not-content"))return null;var o,l,s=i.lineSpace.getBoundingClientRect();try{o=t.clientX-s.left,l=t.clientY-s.top}catch(t){return null}var a,u=dr(e,o,l);if(n&&1==u.xRel&&(a=Ki(e.doc,u.line).text).length==u.ch){var c=Ns(a,a.length,e.options.tabSize)-a.length;u=Al(u.line,Math.max(0,Math.round((o-Ut(e.display).left)/vr(e.display))-c))}return u}function Vr(e){var t=this,r=t.display;if(!(r.activeTouch&&r.input.supportsTouch()||bo(t,e))){if(r.shift=e.shiftKey,Gr(r,e))return void(hl||(r.scroller.draggable=!1,setTimeout(function(){r.scroller.draggable=!0},100)));if(!Yr(t,e)){var n=Ur(t,e);switch(window.focus(),vo(e)){case 1:n?Kr(t,e,n):go(e)==r.scroller&&ms(e);break;case 2:hl&&(t.state.lastMiddleDown=+new Date),n&&bt(t.doc,n),setTimeout(function(){r.input.focus()},20),ms(e);break;case 3:kl&&hn(t,e)}}}}function Kr(e,t,r){cl?setTimeout(Do(q,e),0):q(e);var n,i=+new Date;Hl&&Hl.time>i-400&&0==Nl(Hl.pos,r)?n="triple":Dl&&Dl.time>i-400&&0==Nl(Dl.pos,r)?(n="double",Hl={time:i,pos:r}):(n="single",Dl={time:i,pos:r});var o,l=e.doc.sel,s=xl?t.metaKey:t.ctrlKey;e.options.dragDrop&&Us&&!Z(e)&&"single"==n&&(o=l.contains(r))>-1&&!l.ranges[o].empty()?jr(e,t,r,s):Xr(e,t,r,n,s)}function jr(e,t,r,n){var i=e.display,o=Mr(e,function(l){hl&&(i.scroller.draggable=!1),e.state.draggingText=!1,xs(document,"mouseup",o),xs(i.scroller,"drop",o),Math.abs(t.clientX-l.clientX)+Math.abs(t.clientY-l.clientY)<10&&(ms(l),n||bt(e.doc,r),hl||cl&&9==fl?setTimeout(function(){document.body.focus(),i.input.focus()},20):i.input.focus())});hl&&(i.scroller.draggable=!0),e.state.draggingText=o,i.scroller.dragDrop&&i.scroller.dragDrop(),ws(document,"mouseup",o),ws(i.scroller,"drop",o)}function Xr(e,t,r,n,i){function o(t){if(0!=Nl(v,t))if(v=t,"rect"==n){for(var i=[],o=e.options.tabSize,l=Ns(Ki(u,r.line).text,r.ch,o),s=Ns(Ki(u,t.line).text,t.ch,o),a=Math.min(l,s),d=Math.max(l,s),p=Math.min(r.line,t.line),g=Math.min(e.lastLine(),Math.max(r.line,t.line));g>=p;p++){var m=Ki(u,p).text,y=Lo(m,a,o);a==d?i.push(new ct(Al(p,y),Al(p,y))):m.length>y&&i.push(new ct(Al(p,y),Al(p,Lo(m,d,o))))}i.length||i.push(new ct(r,r)),kt(u,ft(h.ranges.slice(0,f).concat(i),f),{origin:"*mouse",scroll:!1}),e.scrollIntoView(t)}else{var b=c,w=b.anchor,x=t;if("single"!=n){if("double"==n)var C=e.findWordAt(t);else var C=new ct(Al(t.line,0),pt(u,Al(t.line+1,0)));Nl(C.anchor,w)>0?(x=C.head,w=$(b.from(),C.anchor)):(x=C.anchor,w=Y(b.to(),C.head))}var i=h.ranges.slice(0);i[f]=new ct(pt(u,w),x),kt(u,ft(i,f),Ms)}}function l(t){var r=++y,i=Ur(e,t,!0,"rect"==n);if(i)if(0!=Nl(i,v)){q(e),o(i);var s=w(a,u);(i.line>=s.to||i.line<s.from)&&setTimeout(Mr(e,function(){y==r&&l(t)}),150)}else{var c=t.clientY<m.top?-20:t.clientY>m.bottom?20:0;c&&setTimeout(Mr(e,function(){y==r&&(a.scroller.scrollTop+=c,l(t))}),50)}}function s(e){y=1/0,ms(e),a.input.focus(),xs(document,"mousemove",b),xs(document,"mouseup",x),u.history.lastSelOrigin=null}var a=e.display,u=e.doc;ms(t);var c,f,h=u.sel,d=h.ranges;if(i&&!t.shiftKey?(f=u.sel.contains(r),c=f>-1?d[f]:new ct(r,r)):c=u.sel.primary(),t.altKey)n="rect",i||(c=new ct(r,r)),r=Ur(e,t,!0,!0),f=-1;else if("double"==n){var p=e.findWordAt(r);c=e.display.shift||u.extend?yt(u,c,p.anchor,p.head):p}else if("triple"==n){var g=new ct(Al(r.line,0),pt(u,Al(r.line+1,0)));c=e.display.shift||u.extend?yt(u,c,g.anchor,g.head):g}else c=yt(u,c,r);i?-1==f?(f=d.length,kt(u,ft(d.concat([c]),f),{scroll:!1,origin:"*mouse"})):d.length>1&&d[f].empty()&&"single"==n?(kt(u,ft(d.slice(0,f).concat(d.slice(f+1)),0)),h=u.sel):xt(u,f,c,Ms):(f=0,kt(u,new ut([c],0),Ms),h=u.sel);var v=r,m=a.wrapper.getBoundingClientRect(),y=0,b=Mr(e,function(e){vo(e)?l(e):s(e)}),x=Mr(e,s);ws(document,"mousemove",b),ws(document,"mouseup",x)}function _r(e,t,r,n,i){try{var o=t.clientX,l=t.clientY}catch(t){return!1}if(o>=Math.floor(e.display.gutters.getBoundingClientRect().right))return!1;n&&ms(t);var s=e.display,a=s.lineDiv.getBoundingClientRect();if(l>a.bottom||!xo(e,r))return po(t);l-=a.top-s.viewOffset;for(var u=0;u<e.options.gutters.length;++u){var c=s.gutters.childNodes[u];if(c&&c.getBoundingClientRect().right>=o){var f=$i(e.doc,l),h=e.options.gutters[u];return i(e,r,e,f,h,t),po(t)}}}function Yr(e,t){return _r(e,t,"gutterClick",!0,mo)}function $r(e){var t=this;if(!bo(t,e)&&!Gr(t.display,e)){ms(e),cl&&(El=+new Date);var r=Ur(t,e,!0),n=e.dataTransfer.files;if(r&&!Z(t))if(n&&n.length&&window.FileReader&&window.File)for(var i=n.length,o=Array(i),l=0,s=function(e,n){var s=new FileReader;s.onload=Mr(t,function(){if(o[n]=s.result,++l==i){r=pt(t.doc,r);var e={from:r,to:r,text:Vs(o.join("\n")),origin:"paste"};bn(t.doc,e),Lt(t.doc,ht(r,Vl(e)))}}),s.readAsText(e)},a=0;i>a;++a)s(n[a],a);else{if(t.state.draggingText&&t.doc.sel.contains(r)>-1)return t.state.draggingText(e),void setTimeout(function(){t.display.input.focus()},20);try{var o=e.dataTransfer.getData("Text");if(o){if(t.state.draggingText&&!(xl?e.metaKey:e.ctrlKey))var u=t.listSelections();if(Tt(t.doc,ht(r,r)),u)for(var a=0;a<u.length;++a)kn(t.doc,"",u[a].anchor,u[a].head,"drag");t.replaceSelection(o,"around","paste"),t.display.input.focus()}}catch(e){}}}}function qr(e,t){if(cl&&(!e.state.draggingText||+new Date-El<100))return void bs(t);if(!bo(e,t)&&!Gr(e.display,t)&&(t.dataTransfer.setData("Text",e.getSelection()),t.dataTransfer.setDragImage&&!vl)){var r=zo("img",null,null,"position: fixed; left: 0; top: 0;");r.src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==",gl&&(r.width=r.height=1,e.display.wrapper.appendChild(r),r._top=r.offsetTop),t.dataTransfer.setDragImage(r,0,0),gl&&r.parentNode.removeChild(r)}}function Zr(e,t){Math.abs(e.doc.scrollTop-t)<2||(e.doc.scrollTop=t,sl||N(e,{top:t}),e.display.scroller.scrollTop!=t&&(e.display.scroller.scrollTop=t),e.display.scrollbars.setScrollTop(t),sl&&N(e),zt(e,100))}function Qr(e,t,r){(r?t==e.doc.scrollLeft:Math.abs(e.doc.scrollLeft-t)<2)||(t=Math.min(t,e.display.scroller.scrollWidth-e.display.scroller.clientWidth),e.doc.scrollLeft=t,x(e),e.display.scroller.scrollLeft!=t&&(e.display.scroller.scrollLeft=t),e.display.scrollbars.setScrollLeft(t))}function Jr(e,t){var r=Bl(t),n=r.x,i=r.y,o=e.display,l=o.scroller;if(n&&l.scrollWidth>l.clientWidth||i&&l.scrollHeight>l.clientHeight){if(i&&xl&&hl)e:for(var s=t.target,a=o.view;s!=l;s=s.parentNode)for(var u=0;u<a.length;u++)if(a[u].node==s){e.display.currentWheelTarget=s;break e}if(n&&!sl&&!gl&&null!=Rl)return i&&Zr(e,Math.max(0,Math.min(l.scrollTop+i*Rl,l.scrollHeight-l.clientHeight))),Qr(e,Math.max(0,Math.min(l.scrollLeft+n*Rl,l.scrollWidth-l.clientWidth))),ms(t),void(o.wheelStartX=null);if(i&&null!=Rl){var c=i*Rl,f=e.doc.scrollTop,h=f+o.wrapper.clientHeight;0>c?f=Math.max(0,f+c-50):h=Math.min(e.doc.height,h+c+50),N(e,{top:f,bottom:h})}20>Fl&&(null==o.wheelStartX?(o.wheelStartX=l.scrollLeft,o.wheelStartY=l.scrollTop,o.wheelDX=n,o.wheelDY=i,setTimeout(function(){if(null!=o.wheelStartX){var e=l.scrollLeft-o.wheelStartX,t=l.scrollTop-o.wheelStartY,r=t&&o.wheelDY&&t/o.wheelDY||e&&o.wheelDX&&e/o.wheelDX;o.wheelStartX=o.wheelStartY=null,r&&(Rl=(Rl*Fl+r)/(Fl+1),++Fl)}},200)):(o.wheelDX+=n,o.wheelDY+=i))}}function en(e,t,r){if("string"==typeof t&&(t=es[t],!t))return!1;e.display.input.ensurePolled();var n=e.display.shift,i=!1;try{Z(e)&&(e.state.suppressEdits=!0),r&&(e.display.shift=!1),i=t(e)!=ks}finally{e.display.shift=n,e.state.suppressEdits=!1}return i}function tn(e,t,r){for(var n=0;n<e.state.keyMaps.length;n++){var i=rs(t,e.state.keyMaps[n],r,e);if(i)return i}return e.options.extraKeys&&rs(t,e.options.extraKeys,r,e)||rs(t,e.options.keyMap,r,e)}function rn(e,t,r,n){var i=e.state.keySeq;if(i){if(ns(t))return"handled";Gl.set(50,function(){e.state.keySeq==i&&(e.state.keySeq=null,e.display.input.reset())}),t=i+" "+t}var o=tn(e,t,n);return"multi"==o&&(e.state.keySeq=t),"handled"==o&&mo(e,"keyHandled",e,t,r),("handled"==o||"multi"==o)&&(ms(r),It(e)),i&&!o&&/\'$/.test(t)?(ms(r),!0):!!o}function nn(e,t){var r=is(t,!0);return r?t.shiftKey&&!e.state.keySeq?rn(e,"Shift-"+r,t,function(t){return en(e,t,!0)})||rn(e,r,t,function(t){return("string"==typeof t?/^go[A-Z]/.test(t):t.motion)?en(e,t):void 0}):rn(e,r,t,function(t){return en(e,t)}):!1}function on(e,t,r){return rn(e,"'"+r+"'",t,function(t){return en(e,t,!0)})}function ln(e){var t=this;if(q(t),!bo(t,e)){cl&&11>fl&&27==e.keyCode&&(e.returnValue=!1);var r=e.keyCode;t.display.shift=16==r||e.shiftKey;var n=nn(t,e);gl&&(Ul=n?r:null,!n&&88==r&&!js&&(xl?e.metaKey:e.ctrlKey)&&t.replaceSelection("",null,"cut")),18!=r||/\bCodeMirror-crosshair\b/.test(t.display.lineDiv.className)||sn(t)}}function sn(e){function t(e){18!=e.keyCode&&e.altKey||(Rs(r,"CodeMirror-crosshair"),xs(document,"keyup",t),xs(document,"mouseover",t))}var r=e.display.lineDiv;Bs(r,"CodeMirror-crosshair"),ws(document,"keyup",t),ws(document,"mouseover",t)}function an(e){16==e.keyCode&&(this.doc.sel.shift=!1),bo(this,e)}function un(e){var t=this;if(!(Gr(t.display,e)||bo(t,e)||e.ctrlKey&&!e.altKey||xl&&e.metaKey)){var r=e.keyCode,n=e.charCode;if(gl&&r==Ul)return Ul=null,void ms(e);if(!gl||e.which&&!(e.which<10)||!nn(t,e)){var i=String.fromCharCode(null==n?r:n);on(t,e,i)||t.display.input.onKeyPress(e)}}}function cn(e){"nocursor"!=e.options.readOnly&&(e.state.focused||(Cs(e,"focus",e),e.state.focused=!0,Bs(e.display.wrapper,"CodeMirror-focused"),e.curOp||e.display.selForContextMenu==e.doc.sel||(e.display.input.reset(),hl&&setTimeout(function(){e.display.input.reset(!0)},20)),e.display.input.receivedFocus()),It(e))}function fn(e){e.state.focused&&(Cs(e,"blur",e),e.state.focused=!1,Rs(e.display.wrapper,"CodeMirror-focused")),clearInterval(e.display.blinker),setTimeout(function(){e.state.focused||(e.display.shift=!1)},150)}function hn(e,t){Gr(e.display,t)||dn(e,t)||e.display.input.onContextMenu(t)}function dn(e,t){return xo(e,"gutterContextMenu")?_r(e,t,"gutterContextMenu",!1,Cs):!1}function pn(e,t){if(Nl(e,t.from)<0)return e;if(Nl(e,t.to)<=0)return Vl(t);var r=e.line+t.text.length-(t.to.line-t.from.line)-1,n=e.ch;return e.line==t.to.line&&(n+=Vl(t).ch-t.to.ch),Al(r,n)}function gn(e,t){for(var r=[],n=0;n<e.sel.ranges.length;n++){var i=e.sel.ranges[n];r.push(new ct(pn(i.anchor,t),pn(i.head,t)))}return ft(r,e.sel.primIndex)}function vn(e,t,r){return e.line==t.line?Al(r.line,e.ch-t.ch+r.ch):Al(r.line+(e.line-t.line),e.ch)}function mn(e,t,r){for(var n=[],i=Al(e.first,0),o=i,l=0;l<t.length;l++){var s=t[l],a=vn(s.from,i,o),u=vn(Vl(s),i,o);if(i=s.to,o=u,"around"==r){var c=e.sel.ranges[l],f=Nl(c.head,c.anchor)<0;n[l]=new ct(f?u:a,f?a:u)}else n[l]=new ct(a,a)}return new ut(n,e.sel.primIndex)}function yn(e,t,r){var n={canceled:!1,from:t.from,to:t.to,text:t.text,origin:t.origin,cancel:function(){this.canceled=!0}};return r&&(n.update=function(t,r,n,i){t&&(this.from=pt(e,t)),r&&(this.to=pt(e,r)),n&&(this.text=n),void 0!==i&&(this.origin=i)}),Cs(e,"beforeChange",e,n),e.cm&&Cs(e.cm,"beforeChange",e.cm,n),n.canceled?null:{from:n.from,to:n.to,text:n.text,origin:n.origin}}function bn(e,t,r){if(e.cm){if(!e.cm.curOp)return Mr(e.cm,bn)(e,t,r);if(e.cm.state.suppressEdits)return}if(!(xo(e,"beforeChange")||e.cm&&xo(e.cm,"beforeChange"))||(t=yn(e,t,!0))){var n=Tl&&!r&&ti(e,t.from,t.to);if(n)for(var i=n.length-1;i>=0;--i)wn(e,{from:n[i].from,to:n[i].to,text:i?[""]:t.text});else wn(e,t)}}function wn(e,t){if(1!=t.text.length||""!=t.text[0]||0!=Nl(t.from,t.to)){var r=gn(e,t);ro(e,t,r,e.cm?e.cm.curOp.id:0/0),Sn(e,t,r,Qn(e,t));var n=[];Ui(e,function(e,r){r||-1!=Mo(n,e.history)||(ho(e.history,t),n.push(e.history)),Sn(e,t,null,Qn(e,t))})}}function xn(e,t,r){if(!e.cm||!e.cm.state.suppressEdits){for(var n,i=e.history,o=e.sel,l="undo"==t?i.done:i.undone,s="undo"==t?i.undone:i.done,a=0;a<l.length&&(n=l[a],r?!n.ranges||n.equals(e.sel):n.ranges);a++);if(a!=l.length){for(i.lastOrigin=i.lastSelOrigin=null;n=l.pop(),n.ranges;){if(oo(n,s),r&&!n.equals(e.sel))return void kt(e,n,{clearRedo:!1});o=n}var u=[];oo(o,s),s.push({changes:u,generation:i.generation}),i.generation=n.generation||++i.maxGeneration;for(var c=xo(e,"beforeChange")||e.cm&&xo(e.cm,"beforeChange"),a=n.changes.length-1;a>=0;--a){var f=n.changes[a];if(f.origin=t,c&&!yn(e,f,!1))return void(l.length=0);u.push(Ji(e,f));var h=a?gn(e,f):To(l);Sn(e,f,h,ei(e,f)),!a&&e.cm&&e.cm.scrollIntoView({from:f.from,to:Vl(f)});var d=[];Ui(e,function(e,t){t||-1!=Mo(d,e.history)||(ho(e.history,f),d.push(e.history)),Sn(e,f,null,ei(e,f))})}}}}function Cn(e,t){if(0!=t&&(e.first+=t,e.sel=new ut(Ao(e.sel.ranges,function(e){return new ct(Al(e.anchor.line+t,e.anchor.ch),Al(e.head.line+t,e.head.ch))}),e.sel.primIndex),e.cm)){Dr(e.cm,e.first,e.first-t,t);for(var r=e.cm.display,n=r.viewFrom;n<r.viewTo;n++)Hr(e.cm,n,"gutter")}}function Sn(e,t,r,n){if(e.cm&&!e.cm.curOp)return Mr(e.cm,Sn)(e,t,r,n);if(t.to.line<e.first)return void Cn(e,t.text.length-1-(t.to.line-t.from.line));if(!(t.from.line>e.lastLine())){if(t.from.line<e.first){var i=t.text.length-1-(e.first-t.from.line);Cn(e,i),t={from:Al(e.first,0),to:Al(t.to.line+i,t.to.ch),text:[To(t.text)],origin:t.origin}}var o=e.lastLine();t.to.line>o&&(t={from:t.from,to:Al(o,Ki(e,o).text.length),text:[t.text[0]],origin:t.origin}),t.removed=ji(e,t.from,t.to),r||(r=gn(e,t)),e.cm?Ln(e.cm,t,n):Ri(e,t,n),Tt(e,r,Ts)}}function Ln(e,t,r){var n=e.doc,i=e.display,l=t.from,s=t.to,a=!1,u=l.line;e.options.lineWrapping||(u=Yi(fi(Ki(n,l.line))),n.iter(u,s.line+1,function(e){return e==i.maxLine?(a=!0,!0):void 0})),n.sel.contains(t.from,t.to)>-1&&wo(e),Ri(n,t,r,o(e)),e.options.lineWrapping||(n.iter(u,l.line+t.text.length,function(e){var t=f(e);t>i.maxLineLength&&(i.maxLine=e,i.maxLineLength=t,i.maxLineChanged=!0,a=!1)}),a&&(e.curOp.updateMaxLine=!0)),n.frontier=Math.min(n.frontier,l.line),zt(e,400);var c=t.text.length-(s.line-l.line)-1;t.full?Dr(e):l.line!=s.line||1!=t.text.length||Fi(e.doc,t)?Dr(e,l.line,s.line+1,c):Hr(e,l.line,"text");var h=xo(e,"changes"),d=xo(e,"change");if(d||h){var p={from:l,to:s,text:t.text,removed:t.removed,origin:t.origin};d&&mo(e,"change",e,p),h&&(e.curOp.changeObjs||(e.curOp.changeObjs=[])).push(p)}e.display.selForContextMenu=null}function kn(e,t,r,n,i){if(n||(n=r),Nl(n,r)<0){var o=n;n=r,r=o}"string"==typeof t&&(t=Vs(t)),bn(e,{from:r,to:n,text:t,origin:i})}function Tn(e,t){if(!bo(e,"scrollCursorIntoView")){var r=e.display,n=r.sizer.getBoundingClientRect(),i=null;if(t.top+n.top<0?i=!0:t.bottom+n.top>(window.innerHeight||document.documentElement.clientHeight)&&(i=!1),null!=i&&!yl){var o=zo("div","",null,"position: absolute; top: "+(t.top-r.viewOffset-Bt(e.display))+"px; height: "+(t.bottom-t.top+Vt(e)+r.barHeight)+"px; left: "+t.left+"px; width: 2px;");e.display.lineSpace.appendChild(o),o.scrollIntoView(i),e.display.lineSpace.removeChild(o)}}}function Mn(e,t,r,n){null==n&&(n=0);for(var i=0;5>i;i++){var o=!1,l=cr(e,t),s=r&&r!=t?cr(e,r):l,a=Nn(e,Math.min(l.left,s.left),Math.min(l.top,s.top)-n,Math.max(l.left,s.left),Math.max(l.bottom,s.bottom)+n),u=e.doc.scrollTop,c=e.doc.scrollLeft;if(null!=a.scrollTop&&(Zr(e,a.scrollTop),Math.abs(e.doc.scrollTop-u)>1&&(o=!0)),null!=a.scrollLeft&&(Qr(e,a.scrollLeft),Math.abs(e.doc.scrollLeft-c)>1&&(o=!0)),!o)break}return l}function An(e,t,r,n,i){var o=Nn(e,t,r,n,i);null!=o.scrollTop&&Zr(e,o.scrollTop),null!=o.scrollLeft&&Qr(e,o.scrollLeft)}function Nn(e,t,r,n,i){var o=e.display,l=gr(e.display);0>r&&(r=0);var s=e.curOp&&null!=e.curOp.scrollTop?e.curOp.scrollTop:o.scroller.scrollTop,a=jt(e),u={};i-r>a&&(i=r+a);var c=e.doc.height+Gt(o),f=l>r,h=i>c-l;if(s>r)u.scrollTop=f?0:r;else if(i>s+a){var d=Math.min(r,(h?c:i)-a);d!=s&&(u.scrollTop=d)}var p=e.curOp&&null!=e.curOp.scrollLeft?e.curOp.scrollLeft:o.scroller.scrollLeft,g=Kt(e)-(e.options.fixedGutter?o.gutters.offsetWidth:0),v=n-t>g;return v&&(n=t+g),10>t?u.scrollLeft=0:p>t?u.scrollLeft=Math.max(0,t-(v?0:10)):n>g+p-3&&(u.scrollLeft=n+(v?0:10)-g),u}function Wn(e,t,r){(null!=t||null!=r)&&Dn(e),null!=t&&(e.curOp.scrollLeft=(null==e.curOp.scrollLeft?e.doc.scrollLeft:e.curOp.scrollLeft)+t),null!=r&&(e.curOp.scrollTop=(null==e.curOp.scrollTop?e.doc.scrollTop:e.curOp.scrollTop)+r)}function On(e){Dn(e);var t=e.getCursor(),r=t,n=t;e.options.lineWrapping||(r=t.ch?Al(t.line,t.ch-1):t,n=Al(t.line,t.ch+1)),e.curOp.scrollToPos={from:r,to:n,margin:e.options.cursorScrollMargin,isCursor:!0}}function Dn(e){var t=e.curOp.scrollToPos;if(t){e.curOp.scrollToPos=null;var r=fr(e,t.from),n=fr(e,t.to),i=Nn(e,Math.min(r.left,n.left),Math.min(r.top,n.top)-t.margin,Math.max(r.right,n.right),Math.max(r.bottom,n.bottom)+t.margin);e.scrollTo(i.scrollLeft,i.scrollTop)}}function Hn(e,t,r,n){var i,o=e.doc;null==r&&(r="add"),"smart"==r&&(o.mode.indent?i=Rt(e,t):r="prev");var l=e.options.tabSize,s=Ki(o,t),a=Ns(s.text,null,l);s.stateAfter&&(s.stateAfter=null);var u,c=s.text.match(/^\s*/)[0];if(n||/\S/.test(s.text)){if("smart"==r&&(u=o.mode.indent(i,s.text.slice(c.length),s.text),u==ks||u>150)){if(!n)return;r="prev"}}else u=0,r="not";"prev"==r?u=t>o.first?Ns(Ki(o,t-1).text,null,l):0:"add"==r?u=a+e.options.indentUnit:"subtract"==r?u=a-e.options.indentUnit:"number"==typeof r&&(u=a+r),u=Math.max(0,u);var f="",h=0;if(e.options.indentWithTabs)for(var d=Math.floor(u/l);d;--d)h+=l,f+=" ";if(u>h&&(f+=ko(u-h)),f!=c)kn(o,f,Al(t,0),Al(t,c.length),"+input");else for(var d=0;d<o.sel.ranges.length;d++){var p=o.sel.ranges[d];if(p.head.line==t&&p.head.ch<c.length){var h=Al(t,c.length);xt(o,d,new ct(h,h));break}}s.stateAfter=null}function Pn(e,t,r,n){var i=t,o=t;return"number"==typeof t?o=Ki(e,dt(e,t)):i=Yi(t),null==i?null:(n(o,i)&&e.cm&&Hr(e.cm,i,r),o)}function In(e,t){for(var r=e.doc.sel.ranges,n=[],i=0;i<r.length;i++){for(var o=t(r[i]);n.length&&Nl(o.from,To(n).to)<=0;){var l=n.pop();if(Nl(l.from,o.from)<0){o.from=l.from;break}}n.push(o)}Tr(e,function(){for(var t=n.length-1;t>=0;t--)kn(e.doc,"",n[t].from,n[t].to,"+delete");On(e)})}function zn(e,t,r,n,i){function o(){var t=s+r;return t<e.first||t>=e.first+e.size?f=!1:(s=t,c=Ki(e,t))}function l(e){var t=(i?ol:ll)(c,a,r,!0);if(null==t){if(e||!o())return f=!1;a=i?(0>r?Qo:Zo)(c):0>r?c.text.length:0}else a=t;return!0}var s=t.line,a=t.ch,u=r,c=Ki(e,s),f=!0;if("char"==n)l();else if("column"==n)l(!0);else if("word"==n||"group"==n)for(var h=null,d="group"==n,p=e.cm&&e.cm.getHelper(t,"wordChars"),g=!0;!(0>r)||l(!g);g=!1){var v=c.text.charAt(a)||"\n",m=Ho(v,p)?"w":d&&"\n"==v?"n":!d||/\s/.test(v)?null:"p";if(!d||g||m||(m="s"),h&&h!=m){0>r&&(r=1,l());break}if(m&&(h=m),r>0&&!l(!g))break}var y=Wt(e,Al(s,a),u,!0);return f||(y.hitSide=!0),y}function En(e,t,r,n){var i,o=e.doc,l=t.left;if("page"==n){var s=Math.min(e.display.wrapper.clientHeight,window.innerHeight||document.documentElement.clientHeight);i=t.top+r*(s-(0>r?1.5:.5)*gr(e.display))}else"line"==n&&(i=r>0?t.bottom+3:t.top-3);for(;;){var a=dr(e,l,i);if(!a.outside)break;if(0>r?0>=i:i>=o.height){a.hitSide=!0;break}i+=5*r}return a}function Fn(t,r,n,i){e.defaults[t]=r,n&&(jl[t]=i?function(e,t,r){r!=Xl&&n(e,t,r)}:n)}function Rn(e){for(var t,r,n,i,o=e.split(/-(?!$)/),e=o[o.length-1],l=0;l<o.length-1;l++){var s=o[l];if(/^(cmd|meta|m)$/i.test(s))i=!0;else if(/^a(lt)?$/i.test(s))t=!0;else if(/^(c|ctrl|control)$/i.test(s))r=!0;else{if(!/^s(hift)$/i.test(s))throw new Error("Unrecognized modifier name: "+s);n=!0}}return t&&(e="Alt-"+e),r&&(e="Ctrl-"+e),i&&(e="Cmd-"+e),n&&(e="Shift-"+e),e}function Bn(e){return"string"==typeof e?ts[e]:e
|
|
urls.py
|
from django.conf.urls import url
from django.contrib import admin
|
from mixpanel_django_graphos.views import ReportActivityView
admin.site.index_template = 'admin/index.html'
admin.autodiscover()
def get_admin_urls(urls):
"""
Extend admin to include additional urls
"""
def get_urls():
my_urls = [url(r'^activity-report/$', admin.site.admin_view(
ReportActivityView.as_view()), name='activity-report')]
return my_urls + urls
return get_urls
admin_urls = get_admin_urls(admin.site.get_urls())
admin.site.get_urls = admin_urls
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| |
issue-21974.rs
|
// Test that (for now) we report an ambiguity error here, because
// specific trait relationships are ignored for the purposes of trait
// matching. This behavior should likely be improved such that this
// test passes. See #21974 for more details.
trait Foo {
fn foo(self);
}
fn
|
<'a,'b,T>(x: &'a T, y: &'b T)
where &'a T : Foo, //~ ERROR type annotations needed
&'b T : Foo
{
x.foo();
y.foo();
}
fn main() { }
|
foo
|
validator.ts
|
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This file is generated by generate_channels.js, do not edit manually.
import { Validator, ValidationError, tOptional, tObject, tBoolean, tNumber, tString, tAny, tEnum, tArray, tBinary } from './validatorPrimitives';
export { Validator, ValidationError } from './validatorPrimitives';
type Scheme = { [key: string]: Validator };
export function createScheme(tChannel: (name: string) => Validator): Scheme {
const scheme: Scheme = {};
const tType = (name: string): Validator => {
return (arg: any, path: string) => {
const v = scheme[name];
if (!v)
throw new ValidationError(path + ': unknown type "' + name + '"');
return v(arg, path);
};
};
scheme.StackFrame = tObject({
file: tString,
line: tOptional(tNumber),
column: tOptional(tNumber),
function: tOptional(tString),
});
scheme.Metadata = tObject({
stack: tOptional(tArray(tType('StackFrame'))),
apiName: tOptional(tString),
internal: tOptional(tBoolean),
});
scheme.Point = tObject({
x: tNumber,
y: tNumber,
});
scheme.Rect = tObject({
x: tNumber,
y: tNumber,
width: tNumber,
height: tNumber,
});
scheme.SerializedValue = tObject({
n: tOptional(tNumber),
b: tOptional(tBoolean),
s: tOptional(tString),
v: tOptional(tEnum(['null', 'undefined', 'NaN', 'Infinity', '-Infinity', '-0'])),
d: tOptional(tString),
r: tOptional(tObject({
p: tString,
f: tString,
})),
a: tOptional(tArray(tType('SerializedValue'))),
o: tOptional(tArray(tObject({
k: tString,
v: tType('SerializedValue'),
}))),
h: tOptional(tNumber),
});
scheme.SerializedArgument = tObject({
value: tType('SerializedValue'),
handles: tArray(tChannel('*')),
});
scheme.ExpectedTextValue = tObject({
string: tOptional(tString),
regexSource: tOptional(tString),
regexFlags: tOptional(tString),
matchSubstring: tOptional(tBoolean),
normalizeWhiteSpace: tOptional(tBoolean),
});
scheme.AXNode = tObject({
role: tString,
name: tString,
valueString: tOptional(tString),
valueNumber: tOptional(tNumber),
description: tOptional(tString),
keyshortcuts: tOptional(tString),
roledescription: tOptional(tString),
valuetext: tOptional(tString),
disabled: tOptional(tBoolean),
expanded: tOptional(tBoolean),
focused: tOptional(tBoolean),
modal: tOptional(tBoolean),
multiline: tOptional(tBoolean),
multiselectable: tOptional(tBoolean),
readonly: tOptional(tBoolean),
required: tOptional(tBoolean),
selected: tOptional(tBoolean),
checked: tOptional(tEnum(['checked', 'unchecked', 'mixed'])),
pressed: tOptional(tEnum(['pressed', 'released', 'mixed'])),
level: tOptional(tNumber),
valuemin: tOptional(tNumber),
valuemax: tOptional(tNumber),
autocomplete: tOptional(tString),
haspopup: tOptional(tString),
invalid: tOptional(tString),
orientation: tOptional(tString),
children: tOptional(tArray(tType('AXNode'))),
});
scheme.SetNetworkCookie = tObject({
name: tString,
value: tString,
url: tOptional(tString),
domain: tOptional(tString),
path: tOptional(tString),
expires: tOptional(tNumber),
httpOnly: tOptional(tBoolean),
secure: tOptional(tBoolean),
sameSite: tOptional(tEnum(['Strict', 'Lax', 'None'])),
});
scheme.NetworkCookie = tObject({
name: tString,
value: tString,
domain: tString,
path: tString,
expires: tNumber,
httpOnly: tBoolean,
secure: tBoolean,
sameSite: tEnum(['Strict', 'Lax', 'None']),
});
scheme.NameValue = tObject({
name: tString,
value: tString,
});
scheme.OriginStorage = tObject({
origin: tString,
localStorage: tArray(tType('NameValue')),
});
scheme.SerializedError = tObject({
error: tOptional(tObject({
message: tString,
name: tString,
stack: tOptional(tString),
})),
value: tOptional(tType('SerializedValue')),
});
scheme.FormField = tObject({
name: tString,
value: tOptional(tString),
file: tOptional(tObject({
name: tString,
mimeType: tString,
buffer: tBinary,
})),
});
scheme.FetchRequestFetchParams = tObject({
url: tString,
params: tOptional(tArray(tType('NameValue'))),
method: tOptional(tString),
headers: tOptional(tArray(tType('NameValue'))),
postData: tOptional(tBinary),
jsonData: tOptional(tAny),
formData: tOptional(tArray(tType('NameValue'))),
multipartData: tOptional(tArray(tType('FormField'))),
timeout: tOptional(tNumber),
failOnStatusCode: tOptional(tBoolean),
ignoreHTTPSErrors: tOptional(tBoolean),
});
scheme.FetchRequestFetchResponseBodyParams = tObject({
fetchUid: tString,
});
scheme.FetchRequestStorageStateParams = tOptional(tObject({}));
scheme.FetchRequestDisposeFetchResponseParams = tObject({
fetchUid: tString,
});
scheme.FetchRequestDisposeParams = tOptional(tObject({}));
scheme.FetchResponse = tObject({
fetchUid: tString,
url: tString,
status: tNumber,
statusText: tString,
headers: tArray(tType('NameValue')),
});
scheme.LifecycleEvent = tEnum(['load', 'domcontentloaded', 'networkidle', 'commit']);
scheme.RootInitializeParams = tObject({
sdkLanguage: tString,
});
scheme.PlaywrightSocksConnectedParams = tObject({
uid: tString,
host: tString,
port: tNumber,
});
scheme.PlaywrightSocksFailedParams = tObject({
uid: tString,
errorCode: tString,
});
scheme.PlaywrightSocksDataParams = tObject({
uid: tString,
data: tBinary,
});
scheme.PlaywrightSocksErrorParams = tObject({
uid: tString,
error: tString,
});
scheme.PlaywrightSocksEndParams = tObject({
uid: tString,
});
scheme.PlaywrightNewRequestParams = tObject({
baseURL: tOptional(tString),
userAgent: tOptional(tString),
ignoreHTTPSErrors: tOptional(tBoolean),
extraHTTPHeaders: tOptional(tArray(tType('NameValue'))),
httpCredentials: tOptional(tObject({
username: tString,
password: tString,
})),
proxy: tOptional(tObject({
server: tString,
bypass: tOptional(tString),
username: tOptional(tString),
password: tOptional(tString),
})),
timeout: tOptional(tNumber),
storageState: tOptional(tObject({
cookies: tArray(tType('NetworkCookie')),
origins: tArray(tType('OriginStorage')),
})),
});
scheme.SelectorsRegisterParams = tObject({
name: tString,
source: tString,
contentScript: tOptional(tBoolean),
});
scheme.BrowserTypeConnectParams = tObject({
wsEndpoint: tString,
headers: tOptional(tAny),
slowMo: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.BrowserTypeLaunchParams = tObject({
channel: tOptional(tString),
executablePath: tOptional(tString),
args: tOptional(tArray(tString)),
ignoreAllDefaultArgs: tOptional(tBoolean),
ignoreDefaultArgs: tOptional(tArray(tString)),
handleSIGINT: tOptional(tBoolean),
handleSIGTERM: tOptional(tBoolean),
handleSIGHUP: tOptional(tBoolean),
timeout: tOptional(tNumber),
env: tOptional(tArray(tType('NameValue'))),
headless: tOptional(tBoolean),
devtools: tOptional(tBoolean),
proxy: tOptional(tObject({
server: tString,
bypass: tOptional(tString),
username: tOptional(tString),
password: tOptional(tString),
})),
downloadsPath: tOptional(tString),
tracesDir: tOptional(tString),
chromiumSandbox: tOptional(tBoolean),
firefoxUserPrefs: tOptional(tAny),
slowMo: tOptional(tNumber),
});
scheme.BrowserTypeLaunchPersistentContextParams = tObject({
channel: tOptional(tString),
executablePath: tOptional(tString),
args: tOptional(tArray(tString)),
ignoreAllDefaultArgs: tOptional(tBoolean),
ignoreDefaultArgs: tOptional(tArray(tString)),
handleSIGINT: tOptional(tBoolean),
handleSIGTERM: tOptional(tBoolean),
handleSIGHUP: tOptional(tBoolean),
timeout: tOptional(tNumber),
env: tOptional(tArray(tType('NameValue'))),
headless: tOptional(tBoolean),
devtools: tOptional(tBoolean),
proxy: tOptional(tObject({
server: tString,
bypass: tOptional(tString),
username: tOptional(tString),
password: tOptional(tString),
})),
downloadsPath: tOptional(tString),
tracesDir: tOptional(tString),
chromiumSandbox: tOptional(tBoolean),
noDefaultViewport: tOptional(tBoolean),
viewport: tOptional(tObject({
width: tNumber,
height: tNumber,
})),
screen: tOptional(tObject({
width: tNumber,
height: tNumber,
})),
ignoreHTTPSErrors: tOptional(tBoolean),
javaScriptEnabled: tOptional(tBoolean),
bypassCSP: tOptional(tBoolean),
userAgent: tOptional(tString),
locale: tOptional(tString),
timezoneId: tOptional(tString),
geolocation: tOptional(tObject({
longitude: tNumber,
latitude: tNumber,
accuracy: tOptional(tNumber),
})),
permissions: tOptional(tArray(tString)),
extraHTTPHeaders: tOptional(tArray(tType('NameValue'))),
offline: tOptional(tBoolean),
httpCredentials: tOptional(tObject({
username: tString,
password: tString,
})),
deviceScaleFactor: tOptional(tNumber),
isMobile: tOptional(tBoolean),
hasTouch: tOptional(tBoolean),
colorScheme: tOptional(tEnum(['dark', 'light', 'no-preference'])),
reducedMotion: tOptional(tEnum(['reduce', 'no-preference'])),
forcedColors: tOptional(tEnum(['active', 'none'])),
acceptDownloads: tOptional(tBoolean),
baseURL: tOptional(tString),
recordVideo: tOptional(tObject({
dir: tString,
size: tOptional(tObject({
width: tNumber,
height: tNumber,
})),
})),
recordHar: tOptional(tObject({
omitContent: tOptional(tBoolean),
path: tString,
})),
strictSelectors: tOptional(tBoolean),
userDataDir: tString,
slowMo: tOptional(tNumber),
});
scheme.BrowserTypeConnectOverCDPParams = tObject({
endpointURL: tString,
headers: tOptional(tArray(tType('NameValue'))),
slowMo: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.BrowserCloseParams = tOptional(tObject({}));
scheme.BrowserKillForTestsParams = tOptional(tObject({}));
scheme.BrowserNewContextParams = tObject({
noDefaultViewport: tOptional(tBoolean),
viewport: tOptional(tObject({
width: tNumber,
height: tNumber,
})),
screen: tOptional(tObject({
width: tNumber,
height: tNumber,
})),
ignoreHTTPSErrors: tOptional(tBoolean),
javaScriptEnabled: tOptional(tBoolean),
bypassCSP: tOptional(tBoolean),
userAgent: tOptional(tString),
locale: tOptional(tString),
timezoneId: tOptional(tString),
geolocation: tOptional(tObject({
longitude: tNumber,
latitude: tNumber,
accuracy: tOptional(tNumber),
})),
permissions: tOptional(tArray(tString)),
extraHTTPHeaders: tOptional(tArray(tType('NameValue'))),
offline: tOptional(tBoolean),
httpCredentials: tOptional(tObject({
username: tString,
password: tString,
})),
deviceScaleFactor: tOptional(tNumber),
isMobile: tOptional(tBoolean),
hasTouch: tOptional(tBoolean),
colorScheme: tOptional(tEnum(['dark', 'light', 'no-preference'])),
reducedMotion: tOptional(tEnum(['reduce', 'no-preference'])),
forcedColors: tOptional(tEnum(['active', 'none'])),
acceptDownloads: tOptional(tBoolean),
baseURL: tOptional(tString),
recordVideo: tOptional(tObject({
dir: tString,
size: tOptional(tObject({
width: tNumber,
height: tNumber,
})),
})),
recordHar: tOptional(tObject({
omitContent: tOptional(tBoolean),
path: tString,
})),
strictSelectors: tOptional(tBoolean),
proxy: tOptional(tObject({
server: tString,
bypass: tOptional(tString),
username: tOptional(tString),
password: tOptional(tString),
})),
storageState: tOptional(tObject({
cookies: tOptional(tArray(tType('SetNetworkCookie'))),
origins: tOptional(tArray(tType('OriginStorage'))),
})),
});
scheme.BrowserNewBrowserCDPSessionParams = tOptional(tObject({}));
scheme.BrowserStartTracingParams = tObject({
page: tOptional(tChannel('Page')),
path: tOptional(tString),
screenshots: tOptional(tBoolean),
categories: tOptional(tArray(tString)),
});
scheme.BrowserStopTracingParams = tOptional(tObject({}));
scheme.EventTargetWaitForEventInfoParams = tObject({
info: tObject({
waitId: tString,
phase: tEnum(['before', 'after', 'log']),
event: tOptional(tString),
message: tOptional(tString),
error: tOptional(tString),
}),
});
scheme.BrowserContextWaitForEventInfoParams = tType('EventTargetWaitForEventInfoParams');
scheme.PageWaitForEventInfoParams = tType('EventTargetWaitForEventInfoParams');
scheme.WebSocketWaitForEventInfoParams = tType('EventTargetWaitForEventInfoParams');
scheme.ElectronApplicationWaitForEventInfoParams = tType('EventTargetWaitForEventInfoParams');
scheme.AndroidDeviceWaitForEventInfoParams = tType('EventTargetWaitForEventInfoParams');
scheme.BrowserContextAddCookiesParams = tObject({
cookies: tArray(tType('SetNetworkCookie')),
});
scheme.BrowserContextAddInitScriptParams = tObject({
source: tString,
});
scheme.BrowserContextClearCookiesParams = tOptional(tObject({}));
scheme.BrowserContextClearPermissionsParams = tOptional(tObject({}));
scheme.BrowserContextCloseParams = tOptional(tObject({}));
scheme.BrowserContextCookiesParams = tObject({
urls: tArray(tString),
});
scheme.BrowserContextExposeBindingParams = tObject({
name: tString,
needsHandle: tOptional(tBoolean),
});
scheme.BrowserContextGrantPermissionsParams = tObject({
permissions: tArray(tString),
origin: tOptional(tString),
});
scheme.BrowserContextNewPageParams = tOptional(tObject({}));
scheme.BrowserContextSetDefaultNavigationTimeoutNoReplyParams = tObject({
timeout: tOptional(tNumber),
});
scheme.BrowserContextSetDefaultTimeoutNoReplyParams = tObject({
timeout: tOptional(tNumber),
});
scheme.BrowserContextSetExtraHTTPHeadersParams = tObject({
headers: tArray(tType('NameValue')),
});
scheme.BrowserContextSetGeolocationParams = tObject({
geolocation: tOptional(tObject({
longitude: tNumber,
latitude: tNumber,
accuracy: tOptional(tNumber),
})),
|
scheme.BrowserContextSetHTTPCredentialsParams = tObject({
httpCredentials: tOptional(tObject({
username: tString,
password: tString,
})),
});
scheme.BrowserContextSetNetworkInterceptionEnabledParams = tObject({
enabled: tBoolean,
});
scheme.BrowserContextSetOfflineParams = tObject({
offline: tBoolean,
});
scheme.BrowserContextStorageStateParams = tOptional(tObject({}));
scheme.BrowserContextPauseParams = tOptional(tObject({}));
scheme.BrowserContextRecorderSupplementEnableParams = tObject({
language: tOptional(tString),
startRecording: tOptional(tBoolean),
pauseOnNextStatement: tOptional(tBoolean),
launchOptions: tOptional(tAny),
contextOptions: tOptional(tAny),
device: tOptional(tString),
saveStorage: tOptional(tString),
outputFile: tOptional(tString),
});
scheme.BrowserContextNewCDPSessionParams = tObject({
page: tOptional(tChannel('Page')),
frame: tOptional(tChannel('Frame')),
});
scheme.BrowserContextTracingStartParams = tObject({
name: tOptional(tString),
snapshots: tOptional(tBoolean),
screenshots: tOptional(tBoolean),
});
scheme.BrowserContextTracingStartChunkParams = tObject({
title: tOptional(tString),
});
scheme.BrowserContextTracingStopChunkParams = tObject({
save: tBoolean,
skipCompress: tBoolean,
});
scheme.BrowserContextTracingStopParams = tOptional(tObject({}));
scheme.BrowserContextHarExportParams = tOptional(tObject({}));
scheme.PageSetDefaultNavigationTimeoutNoReplyParams = tObject({
timeout: tOptional(tNumber),
});
scheme.PageSetDefaultTimeoutNoReplyParams = tObject({
timeout: tOptional(tNumber),
});
scheme.PageSetFileChooserInterceptedNoReplyParams = tObject({
intercepted: tBoolean,
});
scheme.PageAddInitScriptParams = tObject({
source: tString,
});
scheme.PageCloseParams = tObject({
runBeforeUnload: tOptional(tBoolean),
});
scheme.PageEmulateMediaParams = tObject({
media: tOptional(tEnum(['screen', 'print', 'null'])),
colorScheme: tOptional(tEnum(['dark', 'light', 'no-preference', 'null'])),
reducedMotion: tOptional(tEnum(['reduce', 'no-preference', 'null'])),
forcedColors: tOptional(tEnum(['active', 'none', 'null'])),
});
scheme.PageExposeBindingParams = tObject({
name: tString,
needsHandle: tOptional(tBoolean),
});
scheme.PageGoBackParams = tObject({
timeout: tOptional(tNumber),
waitUntil: tOptional(tType('LifecycleEvent')),
});
scheme.PageGoForwardParams = tObject({
timeout: tOptional(tNumber),
waitUntil: tOptional(tType('LifecycleEvent')),
});
scheme.PageReloadParams = tObject({
timeout: tOptional(tNumber),
waitUntil: tOptional(tType('LifecycleEvent')),
});
scheme.PageScreenshotParams = tObject({
timeout: tOptional(tNumber),
type: tOptional(tEnum(['png', 'jpeg'])),
quality: tOptional(tNumber),
omitBackground: tOptional(tBoolean),
fullPage: tOptional(tBoolean),
clip: tOptional(tType('Rect')),
});
scheme.PageSetExtraHTTPHeadersParams = tObject({
headers: tArray(tType('NameValue')),
});
scheme.PageSetNetworkInterceptionEnabledParams = tObject({
enabled: tBoolean,
});
scheme.PageSetViewportSizeParams = tObject({
viewportSize: tObject({
width: tNumber,
height: tNumber,
}),
});
scheme.PageKeyboardDownParams = tObject({
key: tString,
});
scheme.PageKeyboardUpParams = tObject({
key: tString,
});
scheme.PageKeyboardInsertTextParams = tObject({
text: tString,
});
scheme.PageKeyboardTypeParams = tObject({
text: tString,
delay: tOptional(tNumber),
});
scheme.PageKeyboardPressParams = tObject({
key: tString,
delay: tOptional(tNumber),
});
scheme.PageMouseMoveParams = tObject({
x: tNumber,
y: tNumber,
steps: tOptional(tNumber),
});
scheme.PageMouseDownParams = tObject({
button: tOptional(tEnum(['left', 'right', 'middle'])),
clickCount: tOptional(tNumber),
});
scheme.PageMouseUpParams = tObject({
button: tOptional(tEnum(['left', 'right', 'middle'])),
clickCount: tOptional(tNumber),
});
scheme.PageMouseClickParams = tObject({
x: tNumber,
y: tNumber,
delay: tOptional(tNumber),
button: tOptional(tEnum(['left', 'right', 'middle'])),
clickCount: tOptional(tNumber),
});
scheme.PageMouseWheelParams = tObject({
deltaX: tNumber,
deltaY: tNumber,
});
scheme.PageTouchscreenTapParams = tObject({
x: tNumber,
y: tNumber,
});
scheme.PageAccessibilitySnapshotParams = tObject({
interestingOnly: tOptional(tBoolean),
root: tOptional(tChannel('ElementHandle')),
});
scheme.PagePdfParams = tObject({
scale: tOptional(tNumber),
displayHeaderFooter: tOptional(tBoolean),
headerTemplate: tOptional(tString),
footerTemplate: tOptional(tString),
printBackground: tOptional(tBoolean),
landscape: tOptional(tBoolean),
pageRanges: tOptional(tString),
format: tOptional(tString),
width: tOptional(tString),
height: tOptional(tString),
preferCSSPageSize: tOptional(tBoolean),
margin: tOptional(tObject({
top: tOptional(tString),
bottom: tOptional(tString),
left: tOptional(tString),
right: tOptional(tString),
})),
});
scheme.PageStartJSCoverageParams = tObject({
resetOnNavigation: tOptional(tBoolean),
reportAnonymousScripts: tOptional(tBoolean),
});
scheme.PageStopJSCoverageParams = tOptional(tObject({}));
scheme.PageStartCSSCoverageParams = tObject({
resetOnNavigation: tOptional(tBoolean),
});
scheme.PageStopCSSCoverageParams = tOptional(tObject({}));
scheme.PageBringToFrontParams = tOptional(tObject({}));
scheme.FrameEvalOnSelectorParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.FrameEvalOnSelectorAllParams = tObject({
selector: tString,
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.FrameAddScriptTagParams = tObject({
url: tOptional(tString),
content: tOptional(tString),
type: tOptional(tString),
});
scheme.FrameAddStyleTagParams = tObject({
url: tOptional(tString),
content: tOptional(tString),
});
scheme.FrameCheckParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
position: tOptional(tType('Point')),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.FrameClickParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
modifiers: tOptional(tArray(tEnum(['Alt', 'Control', 'Meta', 'Shift']))),
position: tOptional(tType('Point')),
delay: tOptional(tNumber),
button: tOptional(tEnum(['left', 'right', 'middle'])),
clickCount: tOptional(tNumber),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.FrameContentParams = tOptional(tObject({}));
scheme.FrameDragAndDropParams = tObject({
source: tString,
target: tString,
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
sourcePosition: tOptional(tType('Point')),
targetPosition: tOptional(tType('Point')),
});
scheme.FrameDblclickParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
modifiers: tOptional(tArray(tEnum(['Alt', 'Control', 'Meta', 'Shift']))),
position: tOptional(tType('Point')),
delay: tOptional(tNumber),
button: tOptional(tEnum(['left', 'right', 'middle'])),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.FrameDispatchEventParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
type: tString,
eventInit: tType('SerializedArgument'),
timeout: tOptional(tNumber),
});
scheme.FrameEvaluateExpressionParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.FrameEvaluateExpressionHandleParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.FrameFillParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
value: tString,
force: tOptional(tBoolean),
timeout: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
});
scheme.FrameFocusParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameFrameElementParams = tOptional(tObject({}));
scheme.FrameGetAttributeParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
name: tString,
timeout: tOptional(tNumber),
});
scheme.FrameGotoParams = tObject({
url: tString,
timeout: tOptional(tNumber),
waitUntil: tOptional(tType('LifecycleEvent')),
referer: tOptional(tString),
});
scheme.FrameHoverParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
force: tOptional(tBoolean),
modifiers: tOptional(tArray(tEnum(['Alt', 'Control', 'Meta', 'Shift']))),
position: tOptional(tType('Point')),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.FrameInnerHTMLParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameInnerTextParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameInputValueParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameIsCheckedParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameIsDisabledParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameIsEnabledParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameIsHiddenParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
});
scheme.FrameIsVisibleParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
});
scheme.FrameIsEditableParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FramePressParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
key: tString,
delay: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameQuerySelectorParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
});
scheme.FrameQuerySelectorAllParams = tObject({
selector: tString,
});
scheme.FrameSelectOptionParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
elements: tOptional(tArray(tChannel('ElementHandle'))),
options: tOptional(tArray(tObject({
value: tOptional(tString),
label: tOptional(tString),
index: tOptional(tNumber),
}))),
force: tOptional(tBoolean),
timeout: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
});
scheme.FrameSetContentParams = tObject({
html: tString,
timeout: tOptional(tNumber),
waitUntil: tOptional(tType('LifecycleEvent')),
});
scheme.FrameSetInputFilesParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
files: tArray(tObject({
name: tString,
mimeType: tOptional(tString),
buffer: tBinary,
})),
timeout: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
});
scheme.FrameTapParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
modifiers: tOptional(tArray(tEnum(['Alt', 'Control', 'Meta', 'Shift']))),
position: tOptional(tType('Point')),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.FrameTextContentParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameTitleParams = tOptional(tObject({}));
scheme.FrameTypeParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
text: tString,
delay: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.FrameUncheckParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
position: tOptional(tType('Point')),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.FrameWaitForTimeoutParams = tObject({
timeout: tNumber,
});
scheme.FrameWaitForFunctionParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
timeout: tOptional(tNumber),
pollingInterval: tOptional(tNumber),
});
scheme.FrameWaitForSelectorParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
state: tOptional(tEnum(['attached', 'detached', 'visible', 'hidden'])),
omitReturnValue: tOptional(tBoolean),
});
scheme.FrameExpectParams = tObject({
selector: tString,
expression: tString,
expressionArg: tOptional(tAny),
expectedText: tOptional(tArray(tType('ExpectedTextValue'))),
expectedNumber: tOptional(tNumber),
expectedValue: tOptional(tType('SerializedArgument')),
useInnerText: tOptional(tBoolean),
isNot: tBoolean,
timeout: tOptional(tNumber),
});
scheme.WorkerEvaluateExpressionParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.WorkerEvaluateExpressionHandleParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.JSHandleDisposeParams = tOptional(tObject({}));
scheme.ElementHandleDisposeParams = tType('JSHandleDisposeParams');
scheme.JSHandleEvaluateExpressionParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.ElementHandleEvaluateExpressionParams = tType('JSHandleEvaluateExpressionParams');
scheme.JSHandleEvaluateExpressionHandleParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.ElementHandleEvaluateExpressionHandleParams = tType('JSHandleEvaluateExpressionHandleParams');
scheme.JSHandleGetPropertyListParams = tOptional(tObject({}));
scheme.ElementHandleGetPropertyListParams = tType('JSHandleGetPropertyListParams');
scheme.JSHandleGetPropertyParams = tObject({
name: tString,
});
scheme.ElementHandleGetPropertyParams = tType('JSHandleGetPropertyParams');
scheme.JSHandleJsonValueParams = tOptional(tObject({}));
scheme.ElementHandleJsonValueParams = tType('JSHandleJsonValueParams');
scheme.ElementHandleEvalOnSelectorParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.ElementHandleEvalOnSelectorAllParams = tObject({
selector: tString,
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.ElementHandleBoundingBoxParams = tOptional(tObject({}));
scheme.ElementHandleCheckParams = tObject({
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
position: tOptional(tType('Point')),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.ElementHandleClickParams = tObject({
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
modifiers: tOptional(tArray(tEnum(['Alt', 'Control', 'Meta', 'Shift']))),
position: tOptional(tType('Point')),
delay: tOptional(tNumber),
button: tOptional(tEnum(['left', 'right', 'middle'])),
clickCount: tOptional(tNumber),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.ElementHandleContentFrameParams = tOptional(tObject({}));
scheme.ElementHandleDblclickParams = tObject({
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
modifiers: tOptional(tArray(tEnum(['Alt', 'Control', 'Meta', 'Shift']))),
position: tOptional(tType('Point')),
delay: tOptional(tNumber),
button: tOptional(tEnum(['left', 'right', 'middle'])),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.ElementHandleDispatchEventParams = tObject({
type: tString,
eventInit: tType('SerializedArgument'),
});
scheme.ElementHandleFillParams = tObject({
value: tString,
force: tOptional(tBoolean),
timeout: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
});
scheme.ElementHandleFocusParams = tOptional(tObject({}));
scheme.ElementHandleGetAttributeParams = tObject({
name: tString,
});
scheme.ElementHandleHoverParams = tObject({
force: tOptional(tBoolean),
modifiers: tOptional(tArray(tEnum(['Alt', 'Control', 'Meta', 'Shift']))),
position: tOptional(tType('Point')),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.ElementHandleInnerHTMLParams = tOptional(tObject({}));
scheme.ElementHandleInnerTextParams = tOptional(tObject({}));
scheme.ElementHandleInputValueParams = tOptional(tObject({}));
scheme.ElementHandleIsCheckedParams = tOptional(tObject({}));
scheme.ElementHandleIsDisabledParams = tOptional(tObject({}));
scheme.ElementHandleIsEditableParams = tOptional(tObject({}));
scheme.ElementHandleIsEnabledParams = tOptional(tObject({}));
scheme.ElementHandleIsHiddenParams = tOptional(tObject({}));
scheme.ElementHandleIsVisibleParams = tOptional(tObject({}));
scheme.ElementHandleOwnerFrameParams = tOptional(tObject({}));
scheme.ElementHandlePressParams = tObject({
key: tString,
delay: tOptional(tNumber),
timeout: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
});
scheme.ElementHandleQuerySelectorParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
});
scheme.ElementHandleQuerySelectorAllParams = tObject({
selector: tString,
});
scheme.ElementHandleScreenshotParams = tObject({
timeout: tOptional(tNumber),
type: tOptional(tEnum(['png', 'jpeg'])),
quality: tOptional(tNumber),
omitBackground: tOptional(tBoolean),
});
scheme.ElementHandleScrollIntoViewIfNeededParams = tObject({
timeout: tOptional(tNumber),
});
scheme.ElementHandleSelectOptionParams = tObject({
elements: tOptional(tArray(tChannel('ElementHandle'))),
options: tOptional(tArray(tObject({
value: tOptional(tString),
label: tOptional(tString),
index: tOptional(tNumber),
}))),
force: tOptional(tBoolean),
timeout: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
});
scheme.ElementHandleSelectTextParams = tObject({
force: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.ElementHandleSetInputFilesParams = tObject({
files: tArray(tObject({
name: tString,
mimeType: tOptional(tString),
buffer: tBinary,
})),
timeout: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
});
scheme.ElementHandleTapParams = tObject({
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
modifiers: tOptional(tArray(tEnum(['Alt', 'Control', 'Meta', 'Shift']))),
position: tOptional(tType('Point')),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.ElementHandleTextContentParams = tOptional(tObject({}));
scheme.ElementHandleTypeParams = tObject({
text: tString,
delay: tOptional(tNumber),
noWaitAfter: tOptional(tBoolean),
timeout: tOptional(tNumber),
});
scheme.ElementHandleUncheckParams = tObject({
force: tOptional(tBoolean),
noWaitAfter: tOptional(tBoolean),
position: tOptional(tType('Point')),
timeout: tOptional(tNumber),
trial: tOptional(tBoolean),
});
scheme.ElementHandleWaitForElementStateParams = tObject({
state: tEnum(['visible', 'hidden', 'stable', 'enabled', 'disabled', 'editable']),
timeout: tOptional(tNumber),
});
scheme.ElementHandleWaitForSelectorParams = tObject({
selector: tString,
strict: tOptional(tBoolean),
timeout: tOptional(tNumber),
state: tOptional(tEnum(['attached', 'detached', 'visible', 'hidden'])),
});
scheme.RequestResponseParams = tOptional(tObject({}));
scheme.RequestRawRequestHeadersParams = tOptional(tObject({}));
scheme.RouteAbortParams = tObject({
errorCode: tOptional(tString),
});
scheme.RouteContinueParams = tObject({
url: tOptional(tString),
method: tOptional(tString),
headers: tOptional(tArray(tType('NameValue'))),
postData: tOptional(tBinary),
});
scheme.RouteFulfillParams = tObject({
status: tOptional(tNumber),
headers: tOptional(tArray(tType('NameValue'))),
body: tOptional(tString),
isBase64: tOptional(tBoolean),
fetchResponseUid: tOptional(tString),
});
scheme.ResourceTiming = tObject({
startTime: tNumber,
domainLookupStart: tNumber,
domainLookupEnd: tNumber,
connectStart: tNumber,
secureConnectionStart: tNumber,
connectEnd: tNumber,
requestStart: tNumber,
responseStart: tNumber,
});
scheme.ResponseBodyParams = tOptional(tObject({}));
scheme.ResponseSecurityDetailsParams = tOptional(tObject({}));
scheme.ResponseServerAddrParams = tOptional(tObject({}));
scheme.ResponseRawResponseHeadersParams = tOptional(tObject({}));
scheme.ResponseSizesParams = tOptional(tObject({}));
scheme.SecurityDetails = tObject({
issuer: tOptional(tString),
protocol: tOptional(tString),
subjectName: tOptional(tString),
validFrom: tOptional(tNumber),
validTo: tOptional(tNumber),
});
scheme.RequestSizes = tObject({
requestBodySize: tNumber,
requestHeadersSize: tNumber,
responseBodySize: tNumber,
responseHeadersSize: tNumber,
});
scheme.RemoteAddr = tObject({
ipAddress: tString,
port: tNumber,
});
scheme.BindingCallRejectParams = tObject({
error: tType('SerializedError'),
});
scheme.BindingCallResolveParams = tObject({
result: tType('SerializedArgument'),
});
scheme.DialogAcceptParams = tObject({
promptText: tOptional(tString),
});
scheme.DialogDismissParams = tOptional(tObject({}));
scheme.ArtifactPathAfterFinishedParams = tOptional(tObject({}));
scheme.ArtifactSaveAsParams = tObject({
path: tString,
});
scheme.ArtifactSaveAsStreamParams = tOptional(tObject({}));
scheme.ArtifactFailureParams = tOptional(tObject({}));
scheme.ArtifactStreamParams = tOptional(tObject({}));
scheme.ArtifactCancelParams = tOptional(tObject({}));
scheme.ArtifactDeleteParams = tOptional(tObject({}));
scheme.StreamReadParams = tObject({
size: tOptional(tNumber),
});
scheme.StreamCloseParams = tOptional(tObject({}));
scheme.CDPSessionSendParams = tObject({
method: tString,
params: tOptional(tAny),
});
scheme.CDPSessionDetachParams = tOptional(tObject({}));
scheme.ElectronLaunchParams = tObject({
executablePath: tOptional(tString),
args: tOptional(tArray(tString)),
cwd: tOptional(tString),
env: tOptional(tArray(tType('NameValue'))),
timeout: tOptional(tNumber),
acceptDownloads: tOptional(tBoolean),
bypassCSP: tOptional(tBoolean),
colorScheme: tOptional(tEnum(['dark', 'light', 'no-preference'])),
extraHTTPHeaders: tOptional(tArray(tType('NameValue'))),
geolocation: tOptional(tObject({
longitude: tNumber,
latitude: tNumber,
accuracy: tOptional(tNumber),
})),
httpCredentials: tOptional(tObject({
username: tString,
password: tString,
})),
ignoreHTTPSErrors: tOptional(tBoolean),
locale: tOptional(tString),
offline: tOptional(tBoolean),
recordHar: tOptional(tObject({
omitContent: tOptional(tBoolean),
path: tString,
})),
recordVideo: tOptional(tObject({
dir: tString,
size: tOptional(tObject({
width: tNumber,
height: tNumber,
})),
})),
strictSelectors: tOptional(tBoolean),
timezoneId: tOptional(tString),
});
scheme.ElectronApplicationBrowserWindowParams = tObject({
page: tChannel('Page'),
});
scheme.ElectronApplicationEvaluateExpressionParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.ElectronApplicationEvaluateExpressionHandleParams = tObject({
expression: tString,
isFunction: tOptional(tBoolean),
arg: tType('SerializedArgument'),
});
scheme.ElectronApplicationCloseParams = tOptional(tObject({}));
scheme.AndroidDevicesParams = tOptional(tObject({}));
scheme.AndroidSetDefaultTimeoutNoReplyParams = tObject({
timeout: tNumber,
});
scheme.AndroidSocketWriteParams = tObject({
data: tBinary,
});
scheme.AndroidSocketCloseParams = tOptional(tObject({}));
scheme.AndroidDeviceWaitParams = tObject({
selector: tType('AndroidSelector'),
state: tOptional(tEnum(['gone'])),
timeout: tOptional(tNumber),
});
scheme.AndroidDeviceFillParams = tObject({
selector: tType('AndroidSelector'),
text: tString,
timeout: tOptional(tNumber),
});
scheme.AndroidDeviceTapParams = tObject({
selector: tType('AndroidSelector'),
duration: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.AndroidDeviceDragParams = tObject({
selector: tType('AndroidSelector'),
dest: tType('Point'),
speed: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.AndroidDeviceFlingParams = tObject({
selector: tType('AndroidSelector'),
direction: tEnum(['up', 'down', 'left', 'right']),
speed: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.AndroidDeviceLongTapParams = tObject({
selector: tType('AndroidSelector'),
timeout: tOptional(tNumber),
});
scheme.AndroidDevicePinchCloseParams = tObject({
selector: tType('AndroidSelector'),
percent: tNumber,
speed: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.AndroidDevicePinchOpenParams = tObject({
selector: tType('AndroidSelector'),
percent: tNumber,
speed: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.AndroidDeviceScrollParams = tObject({
selector: tType('AndroidSelector'),
direction: tEnum(['up', 'down', 'left', 'right']),
percent: tNumber,
speed: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.AndroidDeviceSwipeParams = tObject({
selector: tType('AndroidSelector'),
direction: tEnum(['up', 'down', 'left', 'right']),
percent: tNumber,
speed: tOptional(tNumber),
timeout: tOptional(tNumber),
});
scheme.AndroidDeviceInfoParams = tObject({
selector: tType('AndroidSelector'),
});
scheme.AndroidDeviceScreenshotParams = tOptional(tObject({}));
scheme.AndroidDeviceInputTypeParams = tObject({
text: tString,
});
scheme.AndroidDeviceInputPressParams = tObject({
key: tString,
});
scheme.AndroidDeviceInputTapParams = tObject({
point: tType('Point'),
});
scheme.AndroidDeviceInputSwipeParams = tObject({
segments: tArray(tType('Point')),
steps: tNumber,
});
scheme.AndroidDeviceInputDragParams = tObject({
from: tType('Point'),
to: tType('Point'),
steps: tNumber,
});
scheme.AndroidDeviceLaunchBrowserParams = tObject({
pkg: tOptional(tString),
ignoreHTTPSErrors: tOptional(tBoolean),
javaScriptEnabled: tOptional(tBoolean),
bypassCSP: tOptional(tBoolean),
userAgent: tOptional(tString),
locale: tOptional(tString),
timezoneId: tOptional(tString),
geolocation: tOptional(tObject({
longitude: tNumber,
latitude: tNumber,
accuracy: tOptional(tNumber),
})),
permissions: tOptional(tArray(tString)),
extraHTTPHeaders: tOptional(tArray(tType('NameValue'))),
offline: tOptional(tBoolean),
httpCredentials: tOptional(tObject({
username: tString,
password: tString,
})),
deviceScaleFactor: tOptional(tNumber),
isMobile: tOptional(tBoolean),
hasTouch: tOptional(tBoolean),
colorScheme: tOptional(tEnum(['dark', 'light', 'no-preference'])),
reducedMotion: tOptional(tEnum(['reduce', 'no-preference'])),
forcedColors: tOptional(tEnum(['active', 'none'])),
acceptDownloads: tOptional(tBoolean),
recordVideo: tOptional(tObject({
dir: tString,
size: tOptional(tObject({
width: tNumber,
height: tNumber,
})),
})),
recordHar: tOptional(tObject({
omitContent: tOptional(tBoolean),
path: tString,
})),
strictSelectors: tOptional(tBoolean),
proxy: tOptional(tObject({
server: tString,
bypass: tOptional(tString),
username: tOptional(tString),
password: tOptional(tString),
})),
});
scheme.AndroidDeviceOpenParams = tObject({
command: tString,
});
scheme.AndroidDeviceShellParams = tObject({
command: tString,
});
scheme.AndroidDeviceInstallApkParams = tObject({
file: tBinary,
args: tOptional(tArray(tString)),
});
scheme.AndroidDevicePushParams = tObject({
file: tBinary,
path: tString,
mode: tOptional(tNumber),
});
scheme.AndroidDeviceSetDefaultTimeoutNoReplyParams = tObject({
timeout: tNumber,
});
scheme.AndroidDeviceConnectToWebViewParams = tObject({
pid: tNumber,
});
scheme.AndroidDeviceCloseParams = tOptional(tObject({}));
scheme.AndroidWebView = tObject({
pid: tNumber,
pkg: tString,
});
scheme.AndroidSelector = tObject({
checkable: tOptional(tBoolean),
checked: tOptional(tBoolean),
clazz: tOptional(tString),
clickable: tOptional(tBoolean),
depth: tOptional(tNumber),
desc: tOptional(tString),
enabled: tOptional(tBoolean),
focusable: tOptional(tBoolean),
focused: tOptional(tBoolean),
hasChild: tOptional(tObject({
selector: tType('AndroidSelector'),
})),
hasDescendant: tOptional(tObject({
selector: tType('AndroidSelector'),
maxDepth: tOptional(tNumber),
})),
longClickable: tOptional(tBoolean),
pkg: tOptional(tString),
res: tOptional(tString),
scrollable: tOptional(tBoolean),
selected: tOptional(tBoolean),
text: tOptional(tString),
});
scheme.AndroidElementInfo = tObject({
children: tOptional(tArray(tType('AndroidElementInfo'))),
clazz: tString,
desc: tString,
res: tString,
pkg: tString,
text: tString,
bounds: tType('Rect'),
checkable: tBoolean,
checked: tBoolean,
clickable: tBoolean,
enabled: tBoolean,
focusable: tBoolean,
focused: tBoolean,
longClickable: tBoolean,
scrollable: tBoolean,
selected: tBoolean,
});
scheme.JsonPipeSendParams = tObject({
message: tAny,
});
scheme.JsonPipeCloseParams = tOptional(tObject({}));
return scheme;
}
|
});
|
get_web_app_swift_virtual_network_connection.py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetWebAppSwiftVirtualNetworkConnectionResult',
'AwaitableGetWebAppSwiftVirtualNetworkConnectionResult',
'get_web_app_swift_virtual_network_connection',
]
@pulumi.output_type
class GetWebAppSwiftVirtualNetworkConnectionResult:
"""
Swift Virtual Network Contract. This is used to enable the new Swift way of doing virtual network integration.
"""
def __init__(__self__, id=None, kind=None, name=None, subnet_resource_id=None, swift_supported=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if subnet_resource_id and not isinstance(subnet_resource_id, str):
raise TypeError("Expected argument 'subnet_resource_id' to be a str")
pulumi.set(__self__, "subnet_resource_id", subnet_resource_id)
if swift_supported and not isinstance(swift_supported, bool):
raise TypeError("Expected argument 'swift_supported' to be a bool")
pulumi.set(__self__, "swift_supported", swift_supported)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="subnetResourceId")
def subnet_resource_id(self) -> Optional[str]:
"""
The Virtual Network subnet's resource ID. This is the subnet that this Web App will join. This subnet must have a delegation to Microsoft.Web/serverFarms defined first.
"""
return pulumi.get(self, "subnet_resource_id")
@property
@pulumi.getter(name="swiftSupported")
def swift_supported(self) -> Optional[bool]:
"""
A flag that specifies if the scale unit this Web App is on supports Swift integration.
"""
return pulumi.get(self, "swift_supported")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetWebAppSwiftVirtualNetworkConnectionResult(GetWebAppSwiftVirtualNetworkConnectionResult):
# pylint: disable=using-constant-test
|
def get_web_app_swift_virtual_network_connection(name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWebAppSwiftVirtualNetworkConnectionResult:
"""
Swift Virtual Network Contract. This is used to enable the new Swift way of doing virtual network integration.
:param str name: Name of the app.
:param str resource_group_name: Name of the resource group to which the resource belongs.
"""
__args__ = dict()
__args__['name'] = name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:web/v20181101:getWebAppSwiftVirtualNetworkConnection', __args__, opts=opts, typ=GetWebAppSwiftVirtualNetworkConnectionResult).value
return AwaitableGetWebAppSwiftVirtualNetworkConnectionResult(
id=__ret__.id,
kind=__ret__.kind,
name=__ret__.name,
subnet_resource_id=__ret__.subnet_resource_id,
swift_supported=__ret__.swift_supported,
type=__ret__.type)
|
def __await__(self):
if False:
yield self
return GetWebAppSwiftVirtualNetworkConnectionResult(
id=self.id,
kind=self.kind,
name=self.name,
subnet_resource_id=self.subnet_resource_id,
swift_supported=self.swift_supported,
type=self.type)
|
exceptions.py
|
# Copyright (C) 2018 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Model-related exceptions and related logic."""
import re
from logging import getLogger
from sqlalchemy.exc import IntegrityError
logger = getLogger(__name__)
def field_lookup(field_string):
"""Find relevant error field for UNIQUE violation in SQL error message."""
bad_field = 'code' # assumes this field as a default
if field_string.startswith('uq_t_'):
bad_field = 'title'
elif field_string.endswith('email'):
bad_field = 'email'
elif field_string.endswith('title'):
bad_field = 'title'
return bad_field
def translate_message(exception):
"""
Translates db exceptions to something a user can understand.
"""
message = exception.message
if isinstance(exception, IntegrityError):
# TODO: Handle not null, foreign key, uniqueness errors with compound keys
code, _ = exception.orig.args
if code == 1062: # duplicate entry ... for key ...
pattern = re.compile(r"Duplicate entry ('.*') for key '(.*)'")
matches = pattern.search(message)
if matches:
logger.exception(exception)
return (u"The value {value} is already used for another {key}. "
u"{key} values must be unique."
.format(value=matches.group(1),
key=field_lookup(matches.group(2))))
elif code == 1452: # cannod set child row: a foreign key constraint fails
pattern = re.compile(
r"foreign key constraint fails \(`.+`.`(.+)`, CONSTRAINT `.+` "
r"FOREIGN KEY \(`.+`\) REFERENCES `(.+)` \(`.+`\)\)"
)
matches = pattern.search(message)
if matches:
from_, to_ = matches.groups()
return (u"This request will break a mandatory relationship "
u"from {from_} to {to_}."
.format(from_=from_, to_=to_))
return message
class
|
(ValueError):
pass
class StatusValidationError(ValidationError):
pass
class ReservedNameError(ValueError):
pass
|
ValidationError
|
progress.rs
|
use std::cmp;
use std::env;
use std::time::{Duration, Instant};
use crate::core::shell::Verbosity;
use crate::util::config::ProgressWhen;
use crate::util::{CargoResult, Config};
use cargo_util::is_ci;
use unicode_width::UnicodeWidthChar;
pub struct Progress<'cfg> {
state: Option<State<'cfg>>,
}
pub enum ProgressStyle {
Percentage,
Ratio,
}
struct Throttle {
first: bool,
last_update: Instant,
}
struct State<'cfg> {
config: &'cfg Config,
format: Format,
name: String,
done: bool,
throttle: Throttle,
last_line: Option<String>,
fixed_width: Option<usize>,
}
struct Format {
style: ProgressStyle,
max_width: usize,
max_print: usize,
}
impl<'cfg> Progress<'cfg> {
pub fn with_style(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> {
// report no progress when -q (for quiet) or TERM=dumb are set
// or if running on Continuous Integration service like Travis where the
// output logs get mangled.
let dumb = match env::var("TERM") {
Ok(term) => term == "dumb",
Err(_) => false,
};
let progress_config = cfg.progress_config();
match progress_config.when {
ProgressWhen::Always => return Progress::new_priv(name, style, cfg),
ProgressWhen::Never => return Progress { state: None },
ProgressWhen::Auto => {}
}
if cfg.shell().verbosity() == Verbosity::Quiet || dumb || is_ci() {
return Progress { state: None };
}
Progress::new_priv(name, style, cfg)
}
fn new_priv(name: &str, style: ProgressStyle, cfg: &'cfg Config) -> Progress<'cfg> {
let progress_config = cfg.progress_config();
let width = progress_config
.width
.or_else(|| cfg.shell().err_width().progress_max_width());
Progress {
state: width.map(|n| State {
config: cfg,
format: Format {
style,
max_width: n,
// 50 gives some space for text after the progress bar,
// even on narrow (e.g. 80 char) terminals.
max_print: 50,
},
name: name.to_string(),
done: false,
throttle: Throttle::new(),
last_line: None,
fixed_width: progress_config.width,
}),
}
}
pub fn disable(&mut self) {
self.state = None;
}
pub fn is_enabled(&self) -> bool {
self.state.is_some()
}
pub fn new(name: &str, cfg: &'cfg Config) -> Progress<'cfg> {
Self::with_style(name, ProgressStyle::Percentage, cfg)
}
pub fn tick(&mut self, cur: usize, max: usize) -> CargoResult<()> {
let s = match &mut self.state {
Some(s) => s,
None => return Ok(()),
};
// Don't update too often as it can cause excessive performance loss
// just putting stuff onto the terminal. We also want to avoid
// flickering by not drawing anything that goes away too quickly. As a
// result we've got two branches here:
//
// 1. If we haven't drawn anything, we wait for a period of time to
// actually start drawing to the console. This ensures that
// short-lived operations don't flicker on the console. Currently
// there's a 500ms delay to when we first draw something.
// 2. If we've drawn something, then we rate limit ourselves to only
// draw to the console every so often. Currently there's a 100ms
// delay between updates.
if !s.throttle.allowed() {
return Ok(());
}
s.tick(cur, max, "")
}
pub fn tick_now(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
match self.state {
Some(ref mut s) => s.tick(cur, max, msg),
None => Ok(()),
}
}
pub fn update_allowed(&mut self) -> bool {
match &mut self.state {
Some(s) => s.throttle.allowed(),
None => false,
}
}
pub fn print_now(&mut self, msg: &str) -> CargoResult<()> {
match &mut self.state {
Some(s) => s.print("", msg),
None => Ok(()),
}
}
pub fn clear(&mut self) {
if let Some(ref mut s) = self.state {
s.clear();
}
}
}
impl Throttle {
fn new() -> Throttle {
Throttle {
first: true,
last_update: Instant::now(),
}
}
fn allowed(&mut self) -> bool {
if self.first {
let delay = Duration::from_millis(500);
if self.last_update.elapsed() < delay {
return false;
}
} else {
let interval = Duration::from_millis(100);
if self.last_update.elapsed() < interval {
return false;
}
}
self.update();
true
}
fn update(&mut self) {
self.first = false;
self.last_update = Instant::now();
}
}
impl<'cfg> State<'cfg> {
fn tick(&mut self, cur: usize, max: usize, msg: &str) -> CargoResult<()> {
if self.done {
return Ok(());
}
if max > 0 && cur == max {
self.done = true;
}
// Write out a pretty header, then the progress bar itself, and then
// return back to the beginning of the line for the next print.
self.try_update_max_width();
if let Some(pbar) = self.format.progress(cur, max) {
self.print(&pbar, msg)?;
}
Ok(())
}
fn print(&mut self, prefix: &str, msg: &str) -> CargoResult<()> {
self.throttle.update();
self.try_update_max_width();
// make sure we have enough room for the header
if self.format.max_width < 15 {
return Ok(());
}
let mut line = prefix.to_string();
self.format.render(&mut line, msg);
while line.len() < self.format.max_width - 15 {
line.push(' ');
}
// Only update if the line has changed.
if self.config.shell().is_cleared() || self.last_line.as_ref() != Some(&line) {
let mut shell = self.config.shell();
shell.set_needs_clear(false);
shell.status_header(&self.name)?;
write!(shell.err(), "{}\r", line)?;
self.last_line = Some(line);
shell.set_needs_clear(true);
}
Ok(())
}
fn clear(&mut self) {
// No need to clear if the progress is not currently being displayed.
if self.last_line.is_some() && !self.config.shell().is_cleared() {
self.config.shell().err_erase_line();
self.last_line = None;
}
}
fn try_update_max_width(&mut self) {
if self.fixed_width.is_none() {
if let Some(n) = self.config.shell().err_width().progress_max_width() {
self.format.max_width = n;
}
}
}
}
impl Format {
fn progress(&self, cur: usize, max: usize) -> Option<String> {
assert!(cur <= max);
// Render the percentage at the far right and then figure how long the
// progress bar is
let pct = (cur as f64) / (max as f64);
let pct = if !pct.is_finite() { 0.0 } else { pct };
let stats = match self.style {
ProgressStyle::Percentage => format!(" {:6.02}%", pct * 100.0),
ProgressStyle::Ratio => format!(" {}/{}", cur, max),
};
let extra_len = stats.len() + 2 /* [ and ] */ + 15 /* status header */;
let display_width = match self.width().checked_sub(extra_len) {
Some(n) => n,
None => return None,
};
let mut string = String::with_capacity(self.max_width);
string.push('[');
let hashes = display_width as f64 * pct;
let hashes = hashes as usize;
// Draw the `===>`
if hashes > 0 {
for _ in 0..hashes - 1 {
string.push('=');
}
if cur == max {
string.push('=');
} else {
string.push('>');
}
}
// Draw the empty space we have left to do
for _ in 0..(display_width - hashes) {
string.push(' ');
}
string.push(']');
string.push_str(&stats);
Some(string)
}
fn render(&self, string: &mut String, msg: &str) {
let mut avail_msg_len = self.max_width - string.len() - 15;
let mut ellipsis_pos = 0;
if avail_msg_len <= 3 {
return;
}
for c in msg.chars() {
let display_width = c.width().unwrap_or(0);
if avail_msg_len >= display_width {
avail_msg_len -= display_width;
string.push(c);
if avail_msg_len >= 3 {
ellipsis_pos = string.len();
}
} else {
string.truncate(ellipsis_pos);
string.push_str("...");
break;
}
}
}
#[cfg(test)]
fn progress_status(&self, cur: usize, max: usize, msg: &str) -> Option<String> {
let mut ret = self.progress(cur, max)?;
self.render(&mut ret, msg);
Some(ret)
}
fn width(&self) -> usize {
cmp::min(self.max_width, self.max_print)
}
}
impl<'cfg> Drop for State<'cfg> {
fn drop(&mut self) {
self.clear();
}
}
#[test]
fn test_progress_status()
|
(
format.progress_status(76, 77, ""),
Some("[=============> ] 98.70%".to_string())
);
assert_eq!(
format.progress_status(77, 77, ""),
Some("[===============] 100.00%".to_string())
);
}
#[test]
fn test_progress_status_too_short() {
let format = Format {
style: ProgressStyle::Percentage,
max_print: 25,
max_width: 25,
};
assert_eq!(
format.progress_status(1, 1, ""),
Some("[] 100.00%".to_string())
);
let format = Format {
style: ProgressStyle::Percentage,
max_print: 24,
max_width: 24,
};
assert_eq!(format.progress_status(1, 1, ""), None);
}
|
{
let format = Format {
style: ProgressStyle::Ratio,
max_print: 40,
max_width: 60,
};
assert_eq!(
format.progress_status(0, 4, ""),
Some("[ ] 0/4".to_string())
);
assert_eq!(
format.progress_status(1, 4, ""),
Some("[===> ] 1/4".to_string())
);
assert_eq!(
format.progress_status(2, 4, ""),
Some("[========> ] 2/4".to_string())
);
assert_eq!(
format.progress_status(3, 4, ""),
Some("[=============> ] 3/4".to_string())
);
assert_eq!(
format.progress_status(4, 4, ""),
Some("[===================] 4/4".to_string())
);
assert_eq!(
format.progress_status(3999, 4000, ""),
Some("[===========> ] 3999/4000".to_string())
);
assert_eq!(
format.progress_status(4000, 4000, ""),
Some("[=============] 4000/4000".to_string())
);
assert_eq!(
format.progress_status(3, 4, ": short message"),
Some("[=============> ] 3/4: short message".to_string())
);
assert_eq!(
format.progress_status(3, 4, ": msg thats just fit"),
Some("[=============> ] 3/4: msg thats just fit".to_string())
);
assert_eq!(
format.progress_status(3, 4, ": msg that's just fit"),
Some("[=============> ] 3/4: msg that's just...".to_string())
);
// combining diacritics have width zero and thus can fit max_width.
let zalgo_msg = "z̸̧̢̗͉̝̦͍̱ͧͦͨ̑̅̌ͥ́͢a̢ͬͨ̽ͯ̅̑ͥ͋̏̑ͫ̄͢͏̫̝̪̤͎̱̣͍̭̞̙̱͙͍̘̭͚l̶̡̛̥̝̰̭̹̯̯̞̪͇̱̦͙͔̘̼͇͓̈ͨ͗ͧ̓͒ͦ̀̇ͣ̈ͭ͊͛̃̑͒̿̕͜g̸̷̢̩̻̻͚̠͓̞̥͐ͩ͌̑ͥ̊̽͋͐̐͌͛̐̇̑ͨ́ͅo͙̳̣͔̰̠̜͕͕̞̦̙̭̜̯̹̬̻̓͑ͦ͋̈̉͌̃ͯ̀̂͠ͅ ̸̡͎̦̲̖̤̺̜̮̱̰̥͔̯̅̏ͬ̂ͨ̋̃̽̈́̾̔̇ͣ̚͜͜h̡ͫ̐̅̿̍̀͜҉̛͇̭̹̰̠͙̞ẽ̶̙̹̳̖͉͎̦͂̋̓ͮ̔ͬ̐̀͂̌͑̒͆̚͜͠ ͓͓̟͍̮̬̝̝̰͓͎̼̻ͦ͐̾̔͒̃̓͟͟c̮̦͍̺͈͚̯͕̄̒͐̂͊̊͗͊ͤͣ̀͘̕͝͞o̶͍͚͍̣̮͌ͦ̽̑ͩ̅ͮ̐̽̏͗́͂̅ͪ͠m̷̧͖̻͔̥̪̭͉͉̤̻͖̩̤͖̘ͦ̂͌̆̂ͦ̒͊ͯͬ͊̉̌ͬ͝͡e̵̹̣͍̜̺̤̤̯̫̹̠̮͎͙̯͚̰̼͗͐̀̒͂̉̀̚͝͞s̵̲͍͙͖̪͓͓̺̱̭̩̣͖̣ͤͤ͂̎̈͗͆ͨͪ̆̈͗͝͠";
assert_eq!(
format.progress_status(3, 4, zalgo_msg),
Some("[=============> ] 3/4".to_string() + zalgo_msg)
);
// some non-ASCII ellipsize test
assert_eq!(
format.progress_status(3, 4, "_123456789123456e\u{301}\u{301}8\u{301}90a"),
Some("[=============> ] 3/4_123456789123456e\u{301}\u{301}...".to_string())
);
assert_eq!(
format.progress_status(3, 4, ":每個漢字佔據了兩個字元"),
Some("[=============> ] 3/4:每個漢字佔據了...".to_string())
);
}
#[test]
fn test_progress_status_percentage() {
let format = Format {
style: ProgressStyle::Percentage,
max_print: 40,
max_width: 60,
};
assert_eq!(
format.progress_status(0, 77, ""),
Some("[ ] 0.00%".to_string())
);
assert_eq!(
format.progress_status(1, 77, ""),
Some("[ ] 1.30%".to_string())
);
assert_eq!
|
serializers.py
|
from django.shortcuts import get_object_or_404
from rest_framework import serializers
from .models import Category, Comment, Genre, Review, Title
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = (
"name",
"slug",
)
class GenreSerializer(serializers.ModelSerializer):
class Meta:
model = Genre
fields = (
"name",
"slug",
)
class TitleReadSerializer(serializers.ModelSerializer):
genre = GenreSerializer(many=True, read_only=True)
category = CategorySerializer(read_only=True)
class Meta:
model = Title
fields = "__all__"
class TitleCreateSerializer(serializers.ModelSerializer):
genre = serializers.SlugRelatedField(
slug_field="slug", many=True, queryset=Genre.objects.all()
)
category = serializers.SlugRelatedField(
slug_field="slug", queryset=Category.objects.all()
)
class Meta:
model = Title
fields = "__all__"
class ReviewSerializer(serializers.ModelSerializer):
|
rializer(serializers.ModelSerializer):
author = serializers.SlugRelatedField(
slug_field="username", read_only=True
)
class Meta:
model = Comment
exclude = ("review",)
|
author = serializers.SlugRelatedField(
slug_field="username", read_only=True
)
class Meta:
model = Review
exclude = ("title",)
def validate(self, attrs):
if (
Review.objects.filter(
author=self.context["request"].user, title=self.get_title()
).exists()
and self.context["request"].method != "PATCH"
):
raise serializers.ValidationError("Вы уже оставили отзыв")
return attrs
def get_title(self):
title = get_object_or_404(
Title, id=self.context.get("view").kwargs.get("title_id")
)
return title
class CommentSe
|
lib.rs
|
mod byte_hunk;
mod byte_patch;
mod byte_rewrite;
mod byte_region;
mod hunk_ref;
pub use crate::byte_hunk::ByteHunk;
pub use crate::byte_patch::Patch;
pub use crate::byte_rewrite::Rewrite;
pub use crate::byte_region::Region;
pub use crate::hunk_ref::Ref;
// Stuf goes here
// ===========================================================
// Hunk
// ===========================================================
pub trait Hunk {
/// Determine length of this hunk
fn len(&self) -> usize;
/// Read a byte from this hunk
fn read_u8(&self,offset: usize) -> u8;
}
pub fn from_bytes<'a>(bytes: &'a [u8]) -> ByteHunk<'a>
|
{
ByteHunk::new(bytes)
}
|
|
notifications.py
|
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.conf import settings
class BaseEmailSponsorshipNotification:
|
subject_template = None
message_template = None
email_context_keys = None
def get_subject(self, context):
return render_to_string(self.subject_template, context).strip()
def get_message(self, context):
return render_to_string(self.message_template, context).strip()
def get_recipient_list(self, context):
raise NotImplementedError
def notify(self, **kwargs):
context = {k: kwargs.get(k) for k in self.email_context_keys}
send_mail(
subject=self.get_subject(context),
message=self.get_message(context),
recipient_list=self.get_recipient_list(context),
from_email=settings.SPONSORSHIP_NOTIFICATION_FROM_EMAIL,
)
class AppliedSponsorshipNotificationToPSF(BaseEmailSponsorshipNotification):
subject_template = "sponsors/email/psf_new_application_subject.txt"
message_template = "sponsors/email/psf_new_application.txt"
email_context_keys = ["request", "sponsorship"]
def get_recipient_list(self, context):
return [settings.SPONSORSHIP_NOTIFICATION_TO_EMAIL]
class AppliedSponsorshipNotificationToSponsors(BaseEmailSponsorshipNotification):
subject_template = "sponsors/email/sponsor_new_application_subject.txt"
message_template = "sponsors/email/sponsor_new_application.txt"
email_context_keys = ["sponsorship"]
def get_recipient_list(self, context):
return context["sponsorship"].verified_emails
class RejectedSponsorshipNotificationToPSF(BaseEmailSponsorshipNotification):
subject_template = "sponsors/email/psf_rejected_sponsorship_subject.txt"
message_template = "sponsors/email/psf_rejected_sponsorship.txt"
email_context_keys = ["sponsorship"]
def get_recipient_list(self, context):
return [settings.SPONSORSHIP_NOTIFICATION_TO_EMAIL]
class RejectedSponsorshipNotificationToSponsors(BaseEmailSponsorshipNotification):
subject_template = "sponsors/email/sponsor_rejected_sponsorship_subject.txt"
message_template = "sponsors/email/sponsor_rejected_sponsorship.txt"
email_context_keys = ["sponsorship"]
def get_recipient_list(self, context):
return context["sponsorship"].verified_emails
class StatementOfWorkNotificationToPSF(BaseEmailSponsorshipNotification):
subject_template = "sponsors/email/psf_statement_of_work_subject.txt"
message_template = "sponsors/email/psf_statement_of_work.txt"
email_context_keys = ["sponsorship"]
def get_recipient_list(self, context):
return [settings.SPONSORSHIP_NOTIFICATION_TO_EMAIL]
class StatementOfWorkNotificationToSponsors(BaseEmailSponsorshipNotification):
subject_template = "sponsors/email/sponsor_statement_of_work_subject.txt"
message_template = "sponsors/email/sponsor_statement_of_work.txt"
email_context_keys = ["sponsorship"]
def get_recipient_list(self, context):
return context["sponsorship"].verified_emails
| |
panel_group.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
File: panel_group.py
Description: Group Panel for Python SDK sample.
"""
import os
import wx
import wx.lib.scrolledpanel as scrolled
import util
import model
from view import base
class GroupPanel(base.MyPanel):
"""Group Panel."""
def __init__(self, parent):
super(GroupPanel, self).__init__(parent)
self.face_paths = []
self.faces = {}
self.vsizer = wx.BoxSizer(wx.VERTICAL)
self.panel = scrolled.ScrolledPanel(self)
self.hsizer = wx.BoxSizer()
self.hsizer.AddStretchSpacer()
self.hvsizer = wx.BoxSizer(wx.VERTICAL)
self.hvsizer.SetMinSize((util.INNER_PANEL_WIDTH, -1))
label = ('Click the button below to select a folder containing face '
'images.\nThe images will be grouped based on similarity.\n'
'You will see the different groups under the "Grouping '
'Results" label.')
self.static_text = wx.StaticText(self.panel, label=label)
self.static_text.Wrap(util.INNER_PANEL_WIDTH)
self.hvsizer.Add(self.static_text, 0, wx.ALL, 0)
self.vhsizer = wx.BoxSizer()
self.lsizer = wx.BoxSizer(wx.VERTICAL)
self.lsizer.SetMinSize((util.MAX_IMAGE_SIZE, -1))
flag = wx.EXPAND | wx.ALIGN_CENTER | wx.ALL
self.btn = wx.Button(self.panel, label='Group')
self.lsizer.Add(self.btn, 0, flag, 5)
self.Bind(wx.EVT_BUTTON, self.OnChooseFolder, self.btn)
self.grid = base.MyGridStaticBitmap(self.panel, 0, 4, 0, 0)
self.lsizer.Add(self.grid, 0, wx.ALL, 5)
self.vhsizer.Add(self.lsizer, 1, wx.EXPAND)
self.vhsizer.AddSpacer(90)
self.rsizer = wx.BoxSizer(wx.VERTICAL)
self.rsizer.SetMinSize((util.MAX_IMAGE_SIZE, -1))
flag = wx.ALIGN_CENTER | wx.EXPAND | wx.ALL
self.result_text = wx.StaticText(self.panel, label='Grouping Results:')
self.rsizer.Add(self.result_text, 0, flag, 5)
self.result = base.GroupResult(self.panel)
self.rsizer.Add(self.result, 0, wx.EXPAND)
self.vhsizer.Add(self.rsizer, 1, wx.EXPAND)
self.hvsizer.Add(self.vhsizer)
self.hsizer.Add(self.hvsizer)
self.hsizer.AddStretchSpacer()
self.hsizer.Layout()
self.panel.SetSizer(self.hsizer)
self.panel.Layout()
self.panel.SetupScrolling(scroll_x=False)
self.vsizer.Add(self.panel, 3, wx.EXPAND)
self.log = base.MyLog(self)
self.vsizer.Add(self.log, 1, wx.EXPAND)
self.SetSizerAndFit(self.vsizer)
def OnChooseFolder(self, evt):
|
"""Choose Folder."""
dlg = wx.DirDialog(self)
if dlg.ShowModal() == wx.ID_OK:
path = dlg.GetPath()
del self.face_paths[:]
self.faces.clear()
for root, dirs, files in os.walk(path):
if files:
self.face_paths.extend(
[os.path.join(root, filename) for filename in files])
self.btn.Disable()
self.log.log((
'Request: Preparing faces for grouping, detecting faces in '
'chosen folder.'))
self.grid.set_paths(self.face_paths)
for path in self.face_paths:
try:
res = util.CF.face.detect(path)
except util.CF.CognitiveFaceException:
continue
for entry in res:
face = model.Face(entry, path)
self.faces[face.id] = face
self.grid.set_faces(self.faces.values())
self.log.log('Response: Success. Total {0} faces are detected.'.
format(len(self.faces)))
self.log.log(
'Request: Grouping {0} faces.'.format(len(self.faces)))
res = util.CF.face.group(self.faces.keys())
self.result.set_data(self.faces, res)
len_groups = len(res['groups'])
if res.get('messyGroup'):
len_groups += 1
self.log.log(
'Response: Success. {0} faces grouped into {1} groups'.format(
len(self.faces), len_groups))
self.btn.Enable()
|
|
NotFound.tsx
|
import { FC, ReactElement } from 'react'
export interface NotFoundProps {
children: ReactElement
}
|
const NotFound: NotFoundComponent = ({ children }) => {
return children
}
NotFound.isRoute = false
NotFound.isNotFound = true
export default NotFound
|
export interface NotFoundComponent extends FC<NotFoundProps> {
isRoute: boolean
isNotFound: boolean
}
|
scan.rs
|
use super::*;
use crate::logical_plan::CsvParserOptions;
#[cfg(feature = "ipc")]
use crate::logical_plan::IpcOptions;
use crate::utils::try_path_to_str;
use polars_io::prelude::*;
use polars_io::{csv::CsvEncoding, ScanAggregation};
#[cfg(any(feature = "ipc", feature = "parquet"))]
use std::fs::File;
use std::mem;
use std::path::Path;
fn cache_hit(
path: &Path,
predicate: &Option<Arc<dyn PhysicalExpr>>,
state: &ExecutionState,
) -> (String, Option<DataFrame>) {
let path_str = try_path_to_str(path).unwrap();
let cache_key = match predicate {
Some(predicate) => format!("{}{:?}", path_str, predicate.as_expression()),
None => path_str.to_string(),
};
let cached = state.cache_hit(&cache_key);
(cache_key, cached)
}
#[cfg(any(feature = "ipc", feature = "parquet"))]
type Projection = Option<Vec<usize>>;
#[cfg(any(feature = "ipc", feature = "parquet"))]
type StopNRows = Option<usize>;
#[cfg(any(feature = "ipc", feature = "parquet"))]
type Aggregation<'a> = Option<&'a [ScanAggregation]>;
#[cfg(any(feature = "ipc", feature = "parquet"))]
type Predicate = Option<Arc<dyn PhysicalIoExpr>>;
#[cfg(any(feature = "ipc", feature = "parquet"))]
fn prepare_scan_args<'a>(
path: &Path,
predicate: &Option<Arc<dyn PhysicalExpr>>,
with_columns: &mut Option<Vec<String>>,
schema: &mut SchemaRef,
stop_after_n_rows: Option<usize>,
aggregate: &'a [ScanAggregation],
) -> (File, Projection, StopNRows, Aggregation<'a>, Predicate) {
let file = std::fs::File::open(&path).unwrap();
let with_columns = mem::take(with_columns);
let schema = mem::take(schema);
let projection: Option<Vec<_>> = with_columns.map(|with_columns| {
with_columns
.iter()
.map(|name| schema.column_with_name(name).unwrap().0)
.collect()
});
let stop_after_n_rows = set_n_rows(stop_after_n_rows);
let aggregate = if aggregate.is_empty() {
None
} else {
Some(aggregate)
};
let predicate = predicate
.clone()
.map(|expr| Arc::new(PhysicalIoHelper { expr }) as Arc<dyn PhysicalIoExpr>);
(file, projection, stop_after_n_rows, aggregate, predicate)
}
#[cfg(feature = "ipc")]
pub struct IpcExec {
pub(crate) path: PathBuf,
pub(crate) schema: SchemaRef,
pub(crate) predicate: Option<Arc<dyn PhysicalExpr>>,
pub(crate) aggregate: Vec<ScanAggregation>,
pub(crate) options: IpcOptions,
}
#[cfg(feature = "ipc")]
impl Executor for IpcExec {
fn execute(&mut self, state: &ExecutionState) -> Result<DataFrame> {
let (cache_key, cached) = cache_hit(&self.path, &self.predicate, state);
if let Some(df) = cached {
return Ok(df);
}
let (file, projection, stop_after_n_rows, aggregate, predicate) = prepare_scan_args(
&self.path,
&self.predicate,
&mut self.options.with_columns,
&mut self.schema,
self.options.stop_after_n_rows,
&self.aggregate,
);
let df = IpcReader::new(file)
.with_stop_after_n_rows(stop_after_n_rows)
.finish_with_scan_ops(
predicate,
aggregate,
projection.as_ref().map(|v| v.as_ref()),
)?;
if self.options.cache {
state.store_cache(cache_key, df.clone())
}
if state.verbose {
println!("ipc {:?} read", self.path);
}
Ok(df)
}
}
#[cfg(feature = "parquet")]
pub struct ParquetExec {
path: PathBuf,
schema: SchemaRef,
with_columns: Option<Vec<String>>,
predicate: Option<Arc<dyn PhysicalExpr>>,
aggregate: Vec<ScanAggregation>,
stop_after_n_rows: Option<usize>,
cache: bool,
}
#[cfg(feature = "parquet")]
impl ParquetExec {
pub(crate) fn new(
path: PathBuf,
schema: SchemaRef,
with_columns: Option<Vec<String>>,
predicate: Option<Arc<dyn PhysicalExpr>>,
aggregate: Vec<ScanAggregation>,
stop_after_n_rows: Option<usize>,
cache: bool,
) -> Self {
ParquetExec {
path,
schema,
with_columns,
predicate,
aggregate,
stop_after_n_rows,
cache,
}
}
}
#[cfg(feature = "parquet")]
impl Executor for ParquetExec {
fn execute(&mut self, state: &ExecutionState) -> Result<DataFrame> {
let (cache_key, cached) = cache_hit(&self.path, &self.predicate, state);
if let Some(df) = cached {
return Ok(df);
}
let (file, projection, stop_after_n_rows, aggregate, predicate) = prepare_scan_args(
&self.path,
&self.predicate,
&mut self.with_columns,
&mut self.schema,
self.stop_after_n_rows,
&self.aggregate,
);
let df = ParquetReader::new(file)
.with_stop_after_n_rows(stop_after_n_rows)
.finish_with_scan_ops(
predicate,
aggregate,
projection.as_ref().map(|v| v.as_ref()),
)?;
if self.cache {
state.store_cache(cache_key, df.clone())
}
if state.verbose {
println!("parquet {:?} read", self.path);
}
Ok(df)
}
}
#[cfg(feature = "csv-file")]
pub struct CsvExec {
pub path: PathBuf,
pub schema: SchemaRef,
pub options: CsvParserOptions,
pub predicate: Option<Arc<dyn PhysicalExpr>>,
pub aggregate: Vec<ScanAggregation>,
}
#[cfg(feature = "csv-file")]
impl Executor for CsvExec {
fn execute(&mut self, state: &ExecutionState) -> Result<DataFrame> {
let (cache_key, cached) = cache_hit(&self.path, &self.predicate, state);
if let Some(df) = cached {
return Ok(df);
}
// cache miss
let mut with_columns = mem::take(&mut self.options.with_columns);
let mut projected_len = 0;
with_columns.as_ref().map(|columns| {
projected_len = columns.len();
columns
});
if projected_len == 0 {
with_columns = None;
}
let stop_after_n_rows = set_n_rows(self.options.stop_after_n_rows);
let predicate = self
.predicate
.clone()
.map(|expr| Arc::new(PhysicalIoHelper { expr }) as Arc<dyn PhysicalIoExpr>);
let aggregate = if self.aggregate.is_empty() {
None
} else {
Some(self.aggregate.as_slice())
};
let df = CsvReader::from_path(&self.path)
.unwrap()
.has_header(self.options.has_header)
.with_schema(&self.schema)
.with_delimiter(self.options.delimiter)
.with_ignore_parser_errors(self.options.ignore_errors)
.with_skip_rows(self.options.skip_rows)
.with_stop_after_n_rows(stop_after_n_rows)
.with_columns(with_columns)
.low_memory(self.options.low_memory)
.with_null_values(self.options.null_values.clone())
.with_predicate(predicate)
.with_aggregate(aggregate)
.with_encoding(CsvEncoding::LossyUtf8)
.with_comment_char(self.options.comment_char)
.with_quote_char(self.options.quote_char)
.finish()?;
if self.options.cache {
state.store_cache(cache_key, df.clone());
}
if state.verbose {
println!("csv {:?} read", self.path);
}
Ok(df)
}
}
/// Producer of an in memory DataFrame
pub struct DataFrameExec {
df: Arc<DataFrame>,
projection: Option<Vec<Arc<dyn PhysicalExpr>>>,
selection: Option<Arc<dyn PhysicalExpr>>,
}
impl DataFrameExec {
pub(crate) fn new(
df: Arc<DataFrame>,
projection: Option<Vec<Arc<dyn PhysicalExpr>>>,
selection: Option<Arc<dyn PhysicalExpr>>,
) -> Self {
DataFrameExec {
df,
projection,
selection,
}
}
}
impl Executor for DataFrameExec {
fn execute(&mut self, state: &ExecutionState) -> Result<DataFrame> {
let df = mem::take(&mut self.df);
let mut df = Arc::try_unwrap(df).unwrap_or_else(|df| (*df).clone());
// projection should be before selection as those are free
// TODO: this is only the case if we don't create new columns
if let Some(projection) = &self.projection {
df = evaluate_physical_expressions(&df, projection, state)?;
}
if let Some(selection) = &self.selection {
let s = selection.evaluate(&df, state)?;
let mask = s.bool().map_err(|_| {
PolarsError::ComputeError("filter predicate was not of type boolean".into())
})?;
df = df.filter(mask)?;
}
if let Some(limit) = set_n_rows(None) {
Ok(df.head(Some(limit)))
} else {
Ok(df)
|
}
}
|
}
|
fetchers.py
|
#!/bin/env python
# -*coding: UTF-8 -*-
"""
High level helper methods to load Argo data from any source
The facade should be able to work with all available data access point,
"""
import warnings
from argopy.options import OPTIONS, _VALIDATORS
from .errors import InvalidFetcherAccessPoint, InvalidFetcher
from .utilities import list_available_data_src, list_available_index_src
from .plotters import plot_trajectory, plot_dac, plot_profilerType
AVAILABLE_DATA_SOURCES = list_available_data_src()
AVAILABLE_INDEX_SOURCES = list_available_index_src()
class ArgoDataFetcher(object):
""" Fetch and process Argo data.
Can return data selected from:
- one or more float(s), defined by WMOs
- one or more profile(s), defined for one WMO and one or more CYCLE NUMBER
- a space/time rectangular domain, defined by lat/lon/pres/time range
Can return data from the regular Argo dataset ('phy': temperature, salinity) and the Argo referenced
dataset used in DMQC ('ref': temperature, salinity).
This is the main API facade.
Specify here all options to data_fetchers.
Parameters
----------
mode : str
src : str
ds : str
"""
def __init__(self,
mode: str = "",
src: str = "",
ds: str = "",
**fetcher_kwargs):
"""
Parameters
----------
mode : str
User mode. Set to OPTIONS['mode'] by default.
ds : str
Name of the dataset to load. Use the global OPTIONS['dataset'] by default.
src : str
Source of the data to use. Use the global OPTIONS['src'] by default.
**fetcher_kwargs
Used to pass arguments specific to a data source.
"""
# Facade options:
self._mode = OPTIONS['mode'] if mode == '' else mode
self._dataset_id = OPTIONS['dataset'] if ds == '' else ds
self._src = OPTIONS['src'] if src == '' else src
_VALIDATORS['mode'](self._mode)
_VALIDATORS['src'](self._src)
_VALIDATORS['dataset'](self._dataset_id)
# Load data source access points:
if self._src not in AVAILABLE_DATA_SOURCES:
raise InvalidFetcher("Requested data fetcher '%s' not available ! Please try again with any of: %s"
% (self._src, "\n".join(AVAILABLE_DATA_SOURCES)))
else:
Fetchers = AVAILABLE_DATA_SOURCES[self._src]
# Auto-discovery of access points for this fetcher:
# rq: Access point names for the facade are not the same as the access point of fetchers
self.valid_access_points = ['profile', 'float', 'region']
self.Fetchers = {}
for p in Fetchers.access_points:
if p == 'wmo': # Required for 'profile' and 'float'
self.Fetchers['profile'] = Fetchers.Fetch_wmo
self.Fetchers['float'] = Fetchers.Fetch_wmo
if p == 'box': # Required for 'region'
self.Fetchers['region'] = Fetchers.Fetch_box
# Init sub-methods:
self.fetcher = None
if ds is None:
ds = Fetchers.dataset_ids[0]
self.fetcher_options = {**{'ds': ds}, **fetcher_kwargs}
self.postproccessor = self.__empty_processor
self._AccessPoint = None
# Dev warnings
# Todo Clean-up before each release
if self._dataset_id == 'bgc' and self._mode == 'standard':
warnings.warn(" 'BGC' dataset fetching in 'standard' user mode is not reliable. "
"Try to switch to 'expert' mode if you encounter errors.")
def __repr__(self):
if self.fetcher:
summary = [self.fetcher.__repr__()]
summary.append("Backend: %s" % self._src)
summary.append("User mode: %s" % self._mode)
else:
summary = ["<datafetcher 'Not initialised'>"]
summary.append("Backend: %s" % self._src)
summary.append("Fetchers: %s" % ", ".join(self.Fetchers.keys()))
summary.append("User mode: %s" % self._mode)
return "\n".join(summary)
def __empty_processor(self, xds):
""" Do nothing to a dataset """
return xds
def __getattr__(self, key):
""" Validate access points """
# print("key", key)
valid_attrs = ['Fetchers', 'fetcher', 'fetcher_options', 'postproccessor']
if key not in self.valid_access_points and key not in valid_attrs:
raise InvalidFetcherAccessPoint("'%s' is not a valid access point" % key)
pass
def dashboard(self, **kw):
try:
return self.fetcher.dashboard(**kw)
except Exception as e:
warnings.warn("dashboard not avaible for this fetcher access point (%s/%s)" % (self._src, self._AccessPoint))
def float(self, wmo, **kw):
""" Fetch data from a float """
if "CYC" in kw or "cyc" in kw:
raise TypeError("float() got an unexpected keyword argument 'cyc'. Use 'profile' access "
"point to fetch specific profile data.")
if 'float' in self.Fetchers:
self.fetcher = self.Fetchers['float'](WMO=wmo, **self.fetcher_options)
self._AccessPoint = 'float' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'float' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def profile(self, wmo, cyc):
""" Fetch data from a profile
given one or more WMOs and CYCLE_NUMBER
"""
if 'profile' in self.Fetchers:
self.fetcher = self.Fetchers['profile'](WMO=wmo, CYC=cyc, **self.fetcher_options)
self._AccessPoint = 'profile' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'profile' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def region(self, box: list):
""" Fetch data from a space/time domain
Parameters
----------
box: list(lon_min: float, lon_max: float, lat_min: float, lat_max: float, pres_min: float, pres_max: float,
date_min: str, date_max: str)
Define the domain to load all Argo data for. Longitude, latitude and pressure bounds are required, while
the two bounding dates [date_min and date_max] are optional. If not specificied, the entire time series
is requested.
Returns
-------
:class:`argopy.DataFetcher` with an access point initialized.
"""
if 'region' in self.Fetchers:
self.fetcher = self.Fetchers['region'](box=box, **self.fetcher_options)
self._AccessPoint = 'region' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'region' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def
|
(self, **kwargs):
""" Fetch and return data as xarray.DataSet
Returns
-------
:class:`xarray.DataArray`
"""
# if not self.fetcher:
# raise InvalidFetcher(" Initialize an access point (%s) first." %
# ",".join(self.Fetchers.keys()))
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
xds = self.fetcher.to_xarray(**kwargs)
xds = self.postproccessor(xds)
return xds
def to_dataframe(self, **kwargs):
""" Fetch and return data as pandas.Dataframe """
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.to_xarray(**kwargs).to_dataframe()
def clear_cache(self):
""" Clear fetcher cached data """
return self.fetcher.clear_cache()
class ArgoIndexFetcher(object):
"""
Specs discussion :
https://github.com/euroargodev/argopy/issues/8
https://github.com/euroargodev/argopy/pull/6)
Usage:
from argopy import ArgoIndexFetcher
idx = ArgoIndexFetcher.region([-75, -65, 10, 20])
idx.plot.trajectories()
idx.to_dataframe()
Fetch and process Argo index.
Can return metadata from index of :
- one or more float(s), defined by WMOs
- one or more profile(s), defined for one WMO and one or more CYCLE NUMBER
- a space/time rectangular domain, defined by lat/lon/pres/time range
idx object can also be used as an input :
argo_loader = ArgoDataFetcher(index=idx)
Specify here all options to data_fetchers
"""
def __init__(self,
mode: str = "",
src: str = "",
**fetcher_kwargs):
# Facade options:
self._mode = OPTIONS['mode'] if mode == '' else mode
self._src = OPTIONS['src'] if src == '' else src
_VALIDATORS['mode'](self._mode)
_VALIDATORS['src'](self._src)
# Load data source access points:
if self._src not in AVAILABLE_INDEX_SOURCES:
raise InvalidFetcher("Requested index fetcher '%s' not available ! "
"Please try again with any of: %s" % (self._src, "\n".join(AVAILABLE_INDEX_SOURCES)))
else:
Fetchers = AVAILABLE_INDEX_SOURCES[self._src]
# Auto-discovery of access points for this fetcher:
# rq: Access point names for the facade are not the same as the access point of fetchers
self.valid_access_points = ['profile', 'float', 'region']
self.Fetchers = {}
for p in Fetchers.access_points:
if p == 'wmo': # Required for 'profile' and 'float'
self.Fetchers['profile'] = Fetchers.Fetcher_wmo
self.Fetchers['float'] = Fetchers.Fetcher_wmo
if p == 'box': # Required for 'region'
self.Fetchers['region'] = Fetchers.Fetcher_box
# Init sub-methods:
self.fetcher = None
self.fetcher_options = {**fetcher_kwargs}
self.postproccessor = self.__empty_processor
self._AccessPoint = None
def __repr__(self):
if self.fetcher:
summary = [self.fetcher.__repr__()]
summary.append("User mode: %s" % self._mode)
else:
summary = ["<indexfetcher 'Not initialised'>"]
summary.append("Fetchers: %s" % ", ".join(self.Fetchers.keys()))
summary.append("User mode: %s" % self._mode)
return "\n".join(summary)
def __empty_processor(self, xds):
""" Do nothing to a dataset """
return xds
def __getattr__(self, key):
""" Validate access points """
valid_attrs = ['Fetchers', 'fetcher', 'fetcher_options', 'postproccessor']
if key not in self.valid_access_points and key not in valid_attrs:
raise InvalidFetcherAccessPoint("'%s' is not a valid access point" % key)
pass
def profile(self, wmo, cyc):
""" Fetch index for a profile
given one or more WMOs and CYCLE_NUMBER
"""
if 'profile' in self.Fetchers:
self.fetcher = self.Fetchers['profile'](WMO=wmo, CYC=cyc, **self.fetcher_options)
self._AccessPoint = 'profile' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'profile' not available with '%s' src" % self._src)
return self
def float(self, wmo):
""" Load index for one or more WMOs """
if 'float' in self.Fetchers:
self.fetcher = self.Fetchers['float'](WMO=wmo, **self.fetcher_options)
self._AccessPoint = 'float' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'float' not available with '%s' src" % self._src)
return self
def region(self, box):
""" Load index for a rectangular space/time domain region """
if 'region' in self.Fetchers:
self.fetcher = self.Fetchers['region'](box=box, **self.fetcher_options)
self._AccessPoint = 'region' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'region' not available with '%s' src" % self._src)
return self
def to_dataframe(self, **kwargs):
""" Fetch index and return pandas.Dataframe """
if not self.fetcher:
raise InvalidFetcher(" Initialize an access point (%s) first." %
",".join(self.Fetchers.keys()))
return self.fetcher.to_dataframe(**kwargs)
def to_xarray(self, **kwargs):
""" Fetch index and return xr.dataset """
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.fetcher.to_xarray(**kwargs)
def to_csv(self, file: str = 'output_file.csv'):
""" Fetch index and return csv """
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.to_dataframe().to_csv(file)
def plot(self, ptype='trajectory'):
""" Create custom plots from index
Parameters
----------
ptype: str
Type of plot to generate. This can be: 'trajectory',' profiler', 'dac'.
Returns
-------
fig : :class:`matplotlib.pyplot.figure.Figure`
Figure instance
"""
idx = self.to_dataframe()
if ptype == 'dac':
return plot_dac(idx)
elif ptype == 'profiler':
return plot_profilerType(idx)
elif ptype == 'trajectory':
return plot_trajectory(idx.sort_values(['file']))
else:
raise ValueError("Type of plot unavailable. Use: 'dac', 'profiler' or 'trajectory' (default)")
def clear_cache(self):
""" Clear fetcher cached data """
return self.fetcher.clear_cache()
|
to_xarray
|
mrep.py
|
# https://rosalind.info/problems/mrep/
def
|
(fasta: list):
prev = True
header = []
seq = []
for f in fasta:
if ">" in f:
header.append(f[1:])
prev = True
elif prev:
seq.append(f)
prev = False
else:
seq[-1] += f
return header, seq
# INPUT -------------------------------------------
file_in = "sample/dataset/mrep.txt"
file_out = "sample/output/mrep.txt"
# file_in = "case/dataset/mrep.txt"
with open(file_in) as f:
data = f.read().splitlines()
with open(file_out) as f:
outcome = f.read().splitlines()
# MAIN -------------------------------------------
# OUTPUT -------------------------------------------
with open("case/output/mrep.txt", "w") as f:
f.write()
# END
|
fmtfa
|
sentiment.py
|
'''
Conduct Sentiment Analysis
Chun Hu, Yimin Li, Tianyue Niu
'''
import os
import json
import re
import pandas as pd
import nltk
nltk.download('punkt')
nltk.download('wordnet')
nltk.download('stopwords')
from nltk import word_tokenize, sent_tokenize
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
from textblob import TextBlob
# turn off warnings
pd.set_option('mode.chained_assignment', None)
cwd = os.path.dirname(__file__)
top_10s_path = os.path.join(cwd, 'top10s.csv')
def merge_two_df(top_songs, lyrics):
'''
Input:
top_songs (pandas data frame): kaggle data
lyrics (json file): lyrics scraped
Output:
a merged data containing lyrics (pandas data frame)
'''
# merge two df
top_songs['lyrics'] = ''
for index, row in top_songs.iterrows():
tit = top_songs.title[index]
if tit in lyrics:
top_songs['lyrics'][index] = lyrics[tit]
return top_songs
def process_words(words, stop):
'''
Input:
words (list): a list of words
stop (list): extra stop words we want to remove
Output:
new_words (list): a list of normalized words
'''
lemmatizer = WordNetLemmatizer()
new_words = []
for word in words:
new_word = re.sub(r'[^\w\s]', '', word)
if new_word != '':
new_word = new_word.lower()
if new_word not in stop and new_word not in stopwords.words('english'):
new_word = lemmatizer.lemmatize(new_word, pos='v')
new_words.append(new_word)
return new_words
def add_sentiment(top_songs):
'''
Input:
top_songs (pandas df): raw version
Output:
top_songs (pandas df): with sentiment analysis result
'''
# tokenize words
top_songs['tokenized'] = top_songs['lyrics'].apply(\
lambda x: [word_tokenize(s) for s in sent_tokenize(x)])
# normalize words
top_songs['normalized'] = top_songs['tokenized']
stop = ['chorus', 'verse', 'intro', 'pre', 'outro', 'interlude']
for index, row in top_songs['tokenized'].items():
new_sent = []
for sent in row:
new_sent += process_words(sent, stop)
top_songs['normalized'][index] = new_sent
# calculate sentiment
top_songs['sentiment'] = ''
for index, row in top_songs.iterrows():
obj = TextBlob(' '.join(top_songs['normalized'][index]))
sentiment = obj.sentiment.polarity
top_songs['sentiment'][index] = sentiment
return top_songs
def
|
():
'''
Input:
None
Output:
top_songs (pandas df): final cleaned & processed data frame
'''
top_songs = pd.read_csv(top_10s_path)
with open('lyrics_file.json') as f:
lyrics = json.load(f)
top_songs = merge_two_df(top_songs, lyrics)
df = add_sentiment(top_songs)
df.to_csv('top_songs.csv')
return
if __name__ == "__main__":
create_final_top_songs()
|
create_final_top_songs
|
spawn-receiver-sender.rs
|
//= {
//= "output": {
//= "2": [
//= "",
//= true
//= ],
//= "1": [
//= "",
//= true
//= ]
//= },
//= "children": [
//= {
//= "output": {
//= "2": [
//= "",
//= true
//= ],
//= "1": [
//= "",
//= true
//= ]
//= },
//= "children": [],
//= "exit": "Success"
//= },
//= {
//= "output": {
//= "2": [
//= "",
//= true
//= ],
//= "1": [
//= "",
//= true
//= ]
//= },
//= "children": [],
//= "exit": "Success"
//= }
//= ],
//= "exit": "Success"
//= }
use constellation::*;
fn
|
() {
init(Resources {
mem: 20 * Mem::MIB,
..Resources::default()
});
for _ in 0..2 {
let pid = spawn(
Resources {
mem: 20 * Mem::MIB,
..Resources::default()
},
FnOnce!(|parent| {
let _sender = Sender::<String>::new(parent);
}),
)
.block()
.expect("spawn() failed to allocate process");
let _receiver = Receiver::<String>::new(pid);
}
}
|
main
|
tth.py
|
from merkletree import MerkleTree
from .hashing import TigerHash
class
|
(MerkleTree):
segment = 1024;
hashsize = TigerHash.size;
@classmethod
def _hash(klass, *chunks):
return TigerHash.digest(*chunks);
|
TigerTree
|
get_accounts_account_id_statements_statement_id_transactions_responses.go
|
// Code generated by go-swagger; DO NOT EDIT.
package transactions
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
"github.com/cloudentity/acp/pkg/openbanking/models"
)
// GetAccountsAccountIDStatementsStatementIDTransactionsReader is a Reader for the GetAccountsAccountIDStatementsStatementIDTransactions structure.
type GetAccountsAccountIDStatementsStatementIDTransactionsReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 400:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsBadRequest()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 401:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 403:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsForbidden()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 404:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsNotFound()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 405:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 406:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 429:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests()
if err := result.readResponse(response, consumer, o.formats); err != nil
|
return nil, result
case 500:
result := NewGetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
return nil, runtime.NewAPIError("unknown error", response, response.Code())
}
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsOK creates a GetAccountsAccountIDStatementsStatementIDTransactionsOK with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsOK() *GetAccountsAccountIDStatementsStatementIDTransactionsOK {
return &GetAccountsAccountIDStatementsStatementIDTransactionsOK{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsOK handles this case with default header values.
Transactions Read
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsOK struct {
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
Payload *models.OBReadTransaction6
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsOK) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsOK %+v", 200, o.Payload)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsOK) GetPayload() *models.OBReadTransaction6 {
return o.Payload
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
o.Payload = new(models.OBReadTransaction6)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsBadRequest creates a GetAccountsAccountIDStatementsStatementIDTransactionsBadRequest with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsBadRequest() *GetAccountsAccountIDStatementsStatementIDTransactionsBadRequest {
return &GetAccountsAccountIDStatementsStatementIDTransactionsBadRequest{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsBadRequest handles this case with default header values.
Bad request
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsBadRequest struct {
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
Payload *models.OBErrorResponse1
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsBadRequest) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsBadRequest %+v", 400, o.Payload)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsBadRequest) GetPayload() *models.OBErrorResponse1 {
return o.Payload
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
o.Payload = new(models.OBErrorResponse1)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized creates a GetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized() *GetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized {
return &GetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized handles this case with default header values.
Unauthorized
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized struct {
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsUnauthorized ", 401)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsUnauthorized) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
return nil
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsForbidden creates a GetAccountsAccountIDStatementsStatementIDTransactionsForbidden with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsForbidden() *GetAccountsAccountIDStatementsStatementIDTransactionsForbidden {
return &GetAccountsAccountIDStatementsStatementIDTransactionsForbidden{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsForbidden handles this case with default header values.
Forbidden
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsForbidden struct {
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
Payload *models.OBErrorResponse1
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsForbidden) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsForbidden %+v", 403, o.Payload)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsForbidden) GetPayload() *models.OBErrorResponse1 {
return o.Payload
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsForbidden) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
o.Payload = new(models.OBErrorResponse1)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsNotFound creates a GetAccountsAccountIDStatementsStatementIDTransactionsNotFound with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsNotFound() *GetAccountsAccountIDStatementsStatementIDTransactionsNotFound {
return &GetAccountsAccountIDStatementsStatementIDTransactionsNotFound{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsNotFound handles this case with default header values.
Not found
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsNotFound struct {
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsNotFound) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsNotFound ", 404)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
return nil
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed creates a GetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed() *GetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed {
return &GetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed handles this case with default header values.
Method Not Allowed
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed struct {
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsMethodNotAllowed ", 405)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsMethodNotAllowed) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
return nil
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable creates a GetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable() *GetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable {
return &GetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable handles this case with default header values.
Not Acceptable
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable struct {
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsNotAcceptable ", 406)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsNotAcceptable) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
return nil
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests creates a GetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests() *GetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests {
return &GetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests handles this case with default header values.
Too Many Requests
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests struct {
/*Number in seconds to wait
*/
RetryAfter int64
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsTooManyRequests ", 429)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsTooManyRequests) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header Retry-After
retryAfter, err := swag.ConvertInt64(response.GetHeader("Retry-After"))
if err != nil {
return errors.InvalidType("Retry-After", "header", "int64", response.GetHeader("Retry-After"))
}
o.RetryAfter = retryAfter
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
return nil
}
// NewGetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError creates a GetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError with default headers values
func NewGetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError() *GetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError {
return &GetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError{}
}
/*GetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError handles this case with default header values.
Internal Server Error
*/
type GetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError struct {
/*An RFC4122 UID used as a correlation id.
*/
XFapiInteractionID string
Payload *models.OBErrorResponse1
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError) Error() string {
return fmt.Sprintf("[GET /accounts/{AccountId}/statements/{StatementId}/transactions][%d] getAccountsAccountIdStatementsStatementIdTransactionsInternalServerError %+v", 500, o.Payload)
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError) GetPayload() *models.OBErrorResponse1 {
return o.Payload
}
func (o *GetAccountsAccountIDStatementsStatementIDTransactionsInternalServerError) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
// response header x-fapi-interaction-id
o.XFapiInteractionID = response.GetHeader("x-fapi-interaction-id")
o.Payload = new(models.OBErrorResponse1)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
|
{
return nil, err
}
|
StyledButton.react.js
|
/**
* @flow
*/
import * as React from 'react';
|
<Button color={PlatformColor('SystemAccentColor')} {...props} />
);
|
import {Button, PlatformColor} from 'react-native';
export default (props: React.ElementProps<typeof Button>): React.Node => (
|
map.go
|
package immutable
import (
"fmt"
)
// MapLiteralType is the shorthand type to be used in MapLiteral.
type MapLiteralType map[Comparable]interface{}
// MapRangeFunc defines the iteration function for Map type.
//
// Whenever MapRangeFunc returns a non-nil error, the iteration will be
// stopped. The error will be returned by Range function.
type MapRangeFunc func(key Comparable, value interface{}) error
// Map defines the interface of an immutable map.
type Map interface {
// Len returns the size of the map.
Len() int
// Get returns the value to the key.
//
// If the key is not in the map, value will be nil and ok will be false.
Get(key Comparable) (value interface{}, ok bool)
// Range iterates through the map.
//
// It will return the error returned by f.
Range(f MapRangeFunc) error
}
// MapBuilder defines the interface of an immutable map builder.
//
// It's not guaranteed to be thread-safe and shouldn't be used concurrently.
type MapBuilder interface {
Map
// Set sets the key value pair to the map.
//
// It should return self for chaining.
Set(key Comparable, value interface{}) MapBuilder
// Update updates every key value pair from m to the map.
//
// It should return self for chaining.
Update(m MapLiteralType) MapBuilder
// Build builds the immutable map.
Build() Map
}
// EmptyMap defines an immutable empty map.
var EmptyMap Map = (*immutableMap)(nil)
type immutableMap struct {
m MapLiteralType
}
func (m *immutableMap) Len() int {
if m == nil {
return 0
}
return len(m.m)
}
func (m *immutableMap) Get(key Comparable) (value interface{}, ok bool) {
if m == nil {
return
}
value, ok = m.m[key]
return
}
func (m *immutableMap) Range(f MapRangeFunc) (err error) {
if m == nil {
return
}
for k, v := range m.m {
err = f(k, v)
if err != nil
|
}
return
}
func (m *immutableMap) String() string {
return fmt.Sprintf("%v", m.m)
}
// Make sure *mapBuilder satisfies MapBuilder interface.
var _ MapBuilder = (*mapBuilder)(nil)
type mapBuilder struct {
immutableMap
}
func (mb *mapBuilder) Set(key Comparable, value interface{}) MapBuilder {
mb.immutableMap.m[key] = value
return mb
}
func (mb *mapBuilder) Update(incoming MapLiteralType) MapBuilder {
for k, v := range incoming {
mb.Set(k, v)
}
return mb
}
func (mb *mapBuilder) Build() Map {
m := make(MapLiteralType)
for k, v := range mb.immutableMap.m {
m[k] = v
}
return &immutableMap{
m: m,
}
}
// NewMapBuilder creates a new MapBuilder.
func NewMapBuilder() MapBuilder {
return &mapBuilder{
immutableMap: immutableMap{
m: make(MapLiteralType),
},
}
}
// MapLiteral creates an immutable map from existing map.
//
// It's shorthand for immutable.NewMapBuilder().Update(m).Build().
func MapLiteral(m MapLiteralType) Map {
return NewMapBuilder().Update(m).Build()
}
|
{
return
}
|
share_normal_form.rs
|
//! Share normal form.
//!
//! Replace the subexpressions of WHNFs that are not functions by thunks, such that they can be
//! shared. It is similar to the behavior of other lazy languages with respect to data
//! constructors. To do so, subexpressions are replaced by fresh variables, introduced by new let
//! bindings put at the beginning of the WHNF.
//!
//! For example, take the expression:
//! ```text
//! let x = {a = 1 + 1} in x.a + x.a
//! ```
//!
//! The term `{a = 1 + 1}` is a record, and hence a WHNF. In consequence, the thunk allocated to x
//! is never updated. Without additional machinery, `a` will be recomputed each time is it used,
//! two times here.
//!
//! The transformation replaces such subexpressions, namely the content of the fields
//! of records and the elements of lists - `(1 + 1)` in our example -, with fresh variables
//! introduced by `let` added at the head of the term:
//!
//! ```text
//! let x = (let var = 1 + 1 in {a = var}) in x.a + x.a
//! ```
//!
//! Now, the field `a` points to the thunk introduced by `var`: at the evaluation of the first
//! occurrence of `x.a`, this thunk is updated with `2`, and is not recomputed the second time.
//!
//! Newly introduced variables begin with a special character to avoid clashing with user-defined
//! variables.
use super::fresh_var;
use crate::identifier::Ident;
use crate::match_sharedterm;
use crate::position::TermPos;
use crate::term::{BindingType, RichTerm, Term};
/// Transform the top-level term of an AST to a share normal form, if it can.
///
/// This function is not recursive: it just tries to apply one step of the transformation to
/// the top-level node of the AST. For example, it transforms `[1 + 1, [1 + 2]]` to `let %0 = 1
/// + 1 in [%0, [1 + 2]]`: the nested subterm `[1 + 2]` is left as it was. If the term is
/// neither a record, a list nor an enriched value, it is returned the same. In other words,
/// the transformation is implemented as rewrite rules, and must be used in conjunction a
/// traversal to obtain a full transformation.
pub fn transform_one(rt: RichTerm) -> RichTerm {
let pos = rt.pos;
match_sharedterm! {rt.term,
with {
Term::Record(map, attrs) => {
let mut bindings = Vec::with_capacity(map.len());
let map = map
.into_iter()
.map(|(id, t)| {
if should_share(&t.term) {
let fresh_var = fresh_var();
let pos_t = t.pos;
bindings.push((fresh_var.clone(), t));
(id, RichTerm::new(Term::Var(fresh_var), pos_t))
} else {
(id, t)
}
})
.collect();
with_bindings(Term::Record(map, attrs), bindings, pos, BindingType::Normal)
},
Term::RecRecord(map, dyn_fields, attrs) => {
// When a recursive record is evaluated, all fields need to be turned to closures
// anyway (see the corresponding case in `eval::eval()`), which is what the share
// normal form transformation does. This is why the test is more lax here than for
// other constructors: it is not only about sharing, but also about the future
// evaluation of recursive records. Only constant are not required to be
// closurized.
//
// In theory, the variable case is one exception: if the field is already a bare
// variable, it seems useless to add one more indirection through a generated
// variable. However, it is currently fundamental for recursive record merging that
// the sare normal form transformation ensure the following post-condition: the
// fields of recursive records contain either a constant or a *generated* variable,
// but never a user-supplied variable directly (the former starts with a special
// marker). See comments inside [`RichTerm::closurize`] for more details.
let mut bindings = Vec::with_capacity(map.len());
let map = map
.into_iter()
.map(|(id, t)| {
// CHANGE THIS CONDITION CAREFULLY. Doing so can break the post-condition
// explained above.
if !t.as_ref().is_constant() {
let fresh_var = fresh_var();
let pos_t = t.pos;
bindings.push((fresh_var.clone(), t));
(id, RichTerm::new(Term::Var(fresh_var), pos_t))
} else {
(id, t)
}
})
.collect();
let dyn_fields = dyn_fields
.into_iter()
.map(|(id_t, t)| {
if !t.as_ref().is_constant() {
let fresh_var = fresh_var();
let pos_t = t.pos;
bindings.push((fresh_var.clone(), t));
(id_t, RichTerm::new(Term::Var(fresh_var), pos_t))
} else {
(id_t, t)
}
})
.collect();
with_bindings(Term::RecRecord(map, dyn_fields, attrs), bindings, pos, BindingType::Revertible)
},
Term::List(ts) => {
let mut bindings = Vec::with_capacity(ts.len());
let ts = ts
.into_iter()
.map(|t| {
if should_share(&t.term) {
let fresh_var = fresh_var();
let pos_t = t.pos;
bindings.push((fresh_var.clone(), t));
RichTerm::new(Term::Var(fresh_var), pos_t)
} else {
t
}
})
.collect();
with_bindings(Term::List(ts), bindings, pos, BindingType::Normal)
},
Term::MetaValue(meta) if meta.value.as_ref().map(|t| should_share(&t.term)).unwrap_or(false) => {
let mut meta = meta;
let fresh_var = fresh_var();
let t = meta.value.take().unwrap();
meta.value
.replace(RichTerm::new(Term::Var(fresh_var.clone()), t.pos));
let inner = RichTerm::new(Term::MetaValue(meta), pos);
RichTerm::new(Term::Let(fresh_var, t, inner, BindingType::Normal), pos)
}
} else rt
}
}
/// Determine if a subterm of a WHNF should be wrapped in a thunk in order to be shared.
///
/// Sharing is typically useless if the subterm is already a WHNF which can be copied without
/// duplicating any work. On the other hand, a WHNF which can contain other shareable
/// subexpressions, such as a record, should be shared.
fn should_share(t: &Term) -> bool {
match t {
Term::Null
| Term::Bool(_)
| Term::Num(_)
| Term::Str(_)
| Term::Lbl(_)
| Term::Sym(_)
| Term::Var(_)
| Term::Enum(_)
| Term::Fun(_, _) => false,
_ => true,
}
}
/// Bind a list of pairs `(identifier, term)` in a term.
///
/// Given the term `body` and bindings of identifiers to terms represented as a list of pairs
/// `(id_1, term_1), .., (id_n, term_n)`, return the new term `let id_n = term_n in ... let
/// id_1 = term_1 in body`.
fn
|
(
body: Term,
bindings: Vec<(Ident, RichTerm)>,
pos: TermPos,
btype: BindingType,
) -> RichTerm {
bindings
.into_iter()
.fold(RichTerm::new(body, pos.into_inherited()), |acc, (id, t)| {
RichTerm::new(Term::Let(id, t, acc, btype), pos)
})
}
|
with_bindings
|
readSample.py
|
import json as json
import numpy as np
import networkx as nx
from networkx.readwrite import json_graph
# with open('./example_data/toy-ppi-feats.npy') as load_f:
def t1():
|
# print(data['links'][3])
# val_cnt = cnt
# train_cnt = len(data['nodes']) - cnt - testcnt
# print('the test cnt', testcnt)
# print('the val cnt', val_cnt)
# print('the total ', len(data['nodes']))
# print('the train ', train_cnt)
# print('the train/total', train_cnt/len(data['nodes']))
# print(cnt)
# print(len(data['nodes'])- cnt)
# res = np.load('./unsup_example_data/graphsage_mean_small_0.000010/val.npy')
# # print(res[0])
# print(len(res))
# feats = np.load('./example_data/toy-ppi-feats.npy')
# print(type(feats))
# print(type(feats[0]))
# print(feats[0])
def t2():
with open('./fljson/sto-G.json', 'r') as fp:
file = json.load(fp)
itr = 0
# for key, items in file.items():
# if itr == 0:
# itr +=1
# print(key)
G = json_graph.node_link_graph(file)
print(G.nodes[0]['val'])
for edge in G.edges():
print(edge)
if __name__ == "__main__":
t2()
|
with open('./example_data/toy-ppi-G.json') as f:
data = json.load(f)
for i in data:
print(i)
print(data['directed'])
print(data['graph'])
print(data['multigraph'])
# print(data['nodes'])
print(type(data['graph']))
print(type(data['links']))
print(type(data['nodes']))
cnt = 0
itr = 0
testcnt = 0
train_set = set()
val_set = set()
test_set = set()
for i in data['nodes']:
mid = i['id']
if itr == 0:
print(i)
itr +=1
if i['val'] == True:
cnt += 1
val_set.add(mid)
elif i['test'] == True:
testcnt += 1
test_set.add(mid)
else:
train_set.add(mid)
if i['val'] == True and i['test']== True:
print('wtf?')
itr = 0
for link in data['links']:
if itr == 0:
print(i)
itr +=1
print(link)
print(type(link))
if link['train_removed'] == True:
# print('wtf')
target = link['target']
source = link['source']
if (target not in val_set or source not in val_set) and link['test_removed'] == False:
print('damn!!')
pass
if link['test_removed'] == True:
target = link['target']
source = link['source']
assert( (target in test_set) and (source in test_set))
|
error.go
|
package qetcd
import "errors"
var (
ErrConf = errors.New("conf error")
ErrHasRegistered = errors.New("address has registed")
|
)
| |
test_core.py
|
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
import openvino.runtime.opset8 as ov
from openvino.runtime.impl import Dimension, Function, PartialShape, Shape
def test_dimension():
dim = Dimension()
assert dim.is_dynamic
assert not dim.is_static
assert repr(dim) == "<Dimension: ?>"
dim = Dimension.dynamic()
assert dim.is_dynamic
assert not dim.is_static
assert repr(dim) == "<Dimension: ?>"
dim = Dimension(10)
assert dim.is_static
assert len(dim) == 10
assert dim.get_length() == 10
assert dim.get_min_length() == 10
assert dim.get_max_length() == 10
assert repr(dim) == "<Dimension: 10>"
dim = Dimension(5, 15)
assert dim.is_dynamic
assert dim.get_min_length() == 5
assert dim.get_max_length() == 15
assert repr(dim) == "<Dimension: [5, 15]>"
def test_dimension_comparisons():
d1 = Dimension.dynamic()
d2 = Dimension.dynamic()
assert d1 == d2
assert d1 == -1
assert d1.refines(d2)
assert d1.relaxes(d2)
assert d2.refines(d1)
assert d2.relaxes(d1)
assert d2.compatible(d1)
assert d2.same_scheme(d1)
d1 = Dimension.dynamic()
d2 = Dimension(3)
assert d1 != d2
assert d2 == 3
assert not d1.refines(d2)
assert d1.relaxes(d2)
assert d2.refines(d1)
assert not d2.relaxes(d1)
assert d2.compatible(d1)
assert not d2.same_scheme(d1)
d1 = Dimension(3)
d2 = Dimension(3)
assert d1 == d2
assert d1.refines(d2)
assert d1.relaxes(d2)
assert d2.refines(d1)
assert d2.relaxes(d1)
assert d2.compatible(d1)
assert d2.same_scheme(d1)
d1 = Dimension(4)
d2 = Dimension(3)
assert d1 != d2
assert not d1.refines(d2)
assert not d1.relaxes(d2)
assert not d2.refines(d1)
assert not d2.relaxes(d1)
assert not d2.compatible(d1)
assert not d2.same_scheme(d1)
def test_partial_shape():
ps = PartialShape([1, 2, 3, 4])
assert ps.is_static
assert not ps.is_dynamic
assert ps.rank == 4
assert repr(ps) == "<PartialShape: {1,2,3,4}>"
assert ps.get_dimension(0) == Dimension(1)
assert ps.get_dimension(1) == Dimension(2)
assert ps.get_dimension(2) == Dimension(3)
assert ps.get_dimension(3) == Dimension(4)
shape = Shape([1, 2, 3])
ps = PartialShape(shape)
assert ps.is_static
assert not ps.is_dynamic
assert ps.all_non_negative
assert ps.rank == 3
assert list(ps.get_shape()) == [1, 2, 3]
assert list(ps.get_max_shape()) == [1, 2, 3]
assert list(ps.get_min_shape()) == [1, 2, 3]
assert list(ps.to_shape()) == [1, 2, 3]
assert repr(shape) == "<Shape: {1, 2, 3}>"
assert repr(ps) == "<PartialShape: {1,2,3}>"
ps = PartialShape([Dimension(1), Dimension(2), Dimension(3), Dimension.dynamic()])
assert not ps.is_static
assert ps.is_dynamic
assert ps.all_non_negative
assert ps.rank == 4
assert list(ps.get_min_shape()) == [1, 2, 3, 0]
assert list(ps.get_max_shape())[3] > 1000000000
assert repr(ps) == "<PartialShape: {1,2,3,?}>"
assert ps.get_dimension(0) == Dimension(1)
assert ps.get_dimension(1) == Dimension(2)
assert ps.get_dimension(2) == Dimension(3)
assert ps.get_dimension(3) == Dimension.dynamic()
ps = PartialShape([1, 2, 3, -1])
assert not ps.is_static
assert ps.is_dynamic
assert ps.all_non_negative
assert ps.rank == 4
assert list(ps.get_min_shape()) == [1, 2, 3, 0]
assert list(ps.get_max_shape())[3] > 1000000000
assert repr(ps) == "<PartialShape: {1,2,3,?}>"
ps = PartialShape.dynamic()
assert not ps.is_static
assert ps.is_dynamic
assert ps.rank == Dimension.dynamic()
assert list(ps.get_min_shape()) == []
assert list(ps.get_max_shape()) == []
assert repr(ps) == "<PartialShape: ?>"
ps = PartialShape.dynamic(r=Dimension(2))
assert not ps.is_static
assert ps.is_dynamic
assert ps.rank == 2
assert 2 == ps.rank
assert list(ps.get_min_shape()) == [0, 0]
assert list(ps.get_max_shape())[0] > 1000000000
assert repr(ps) == "<PartialShape: {?,?}>"
def test_partial_shape_compatible():
ps1 = PartialShape.dynamic()
ps2 = PartialShape.dynamic()
assert ps1.compatible(ps2)
ps1 = PartialShape([3])
ps2 = PartialShape.dynamic()
assert ps1.compatible(ps2)
ps1 = PartialShape.dynamic()
ps2 = PartialShape([4])
assert ps1.compatible(ps2)
ps1 = PartialShape([2, -1, 3, -1, 5])
ps2 = PartialShape([2, -1, -1, 4, 5])
assert ps1.compatible(ps2)
ps1 = PartialShape([2, -1, 3, -1, 5])
ps2 = PartialShape([1, -1, -1, 4, 5])
assert not ps1.compatible(ps2)
def test_partial_shape_same_scheme():
ps1 = PartialShape([1, 2, -1])
ps2 = PartialShape([1, 3, -1])
assert not ps1.same_scheme(ps2)
ps1 = PartialShape([1, 2, -1])
ps2 = PartialShape([1, 2, -1])
assert ps1.same_scheme(ps2)
ps1 = PartialShape([1, 2, 3])
ps2 = PartialShape([1, 2, 3])
assert ps1.same_scheme(ps2)
ps1 = PartialShape([-1, 2, 3])
ps2 = PartialShape([1, -1, 3])
assert not ps1.same_scheme(ps2)
ps1 = PartialShape.dynamic()
ps2 = PartialShape.dynamic()
assert ps1.same_scheme(ps2)
def test_partial_shape_refinement():
ps1 = PartialShape.dynamic()
ps2 = PartialShape.dynamic()
assert ps1.refines(ps2)
assert ps1.relaxes(ps2)
assert ps2.refines(ps1)
assert ps2.relaxes(ps1)
ps1 = PartialShape.dynamic()
ps2 = PartialShape([3, -1, 7, 9])
assert not ps1.refines(ps2)
assert ps1.relaxes(ps2)
assert ps2.refines(ps1)
assert not ps2.relaxes(ps1)
ps1 = PartialShape.dynamic()
ps2 = PartialShape([3, 5, 7, 9])
assert not ps1.refines(ps2)
assert ps1.relaxes(ps2)
assert ps2.refines(ps1)
assert not ps2.relaxes(ps1)
def test_partial_shape_equals():
ps1 = PartialShape.dynamic()
ps2 = PartialShape.dynamic()
assert ps1 == ps2
ps1 = PartialShape([1, 2, 3])
ps2 = PartialShape([1, 2, 3])
assert ps1 == ps2
shape = Shape([1, 2, 3])
ps = PartialShape([1, 2, 3])
assert shape == ps
def
|
():
shape = PartialShape([-1, 2])
parameter_a = ov.parameter(shape, dtype=np.float32, name="A")
parameter_b = ov.parameter(shape, dtype=np.float32, name="B")
model = parameter_a + parameter_b
function = Function(model, [parameter_a, parameter_b], "simple_dyn_shapes_graph")
assert repr(function) == "<Function: 'simple_dyn_shapes_graph' ({?,2})>"
ops = function.get_ordered_ops()
for op in ops:
assert "{?,2}" in repr(op)
def test_discrete_type_info():
data_shape = [6, 12, 10, 24]
data_parameter = ov.parameter(data_shape, name="Data", dtype=np.float32)
k = np.int32(3)
axis = np.int32(1)
n1 = ov.topk(data_parameter, k, axis, "max", "value")
n2 = ov.topk(data_parameter, k, axis, "max", "value")
n3 = ov.sin(0.2)
assert n1.type_info.name == "TopK"
assert n3.type_info.name == "Sin"
assert n1.get_type_info().name == "TopK"
assert n3.get_type_info().name == "Sin"
assert n1.type_info.name == n2.type_info.name
assert n1.type_info.version == n2.type_info.version
assert n1.type_info.parent == n2.type_info.parent
assert n1.get_type_info().name == n2.get_type_info().name
assert n1.get_type_info().version == n2.get_type_info().version
assert n1.get_type_info().parent == n2.get_type_info().parent
assert n1.get_type_info().name != n3.get_type_info().name
assert n1.get_type_info().name > n3.get_type_info().name
assert n1.get_type_info().name >= n3.get_type_info().name
assert n3.get_type_info().name < n1.get_type_info().name
assert n3.get_type_info().name <= n1.get_type_info().name
|
test_repr_dynamic_shape
|
plural_cases.go
|
package messageformat
import (
"libs.altipla.consulting/langs"
"libs.altipla.consulting/messageformat/parse"
)
// Extract language rules as needed from:
// http://www.unicode.org/cldr/charts/latest/supplemental/language_plural_rules.html
func matchesPlural(lang string, c *parse.PluralCase, n int64) bool {
switch lang {
case langs.ES, langs.EN, langs.EU, langs.IT, langs.DE:
return matchesPluralCaseN1(c, n)
case langs.FR:
return matchesPluralCaseFrench(c, n)
}
panic("unsupported message format lang, please add it to plural_cases.go")
}
func matchesPluralCaseN1(c *parse.PluralCase, n int64) bool {
if n == 1 {
return c.Category == parse.PluralOne
}
return c.Category == parse.PluralOther
}
func matchesPluralCaseFrench(c *parse.PluralCase, n int64) bool {
if n == 1 || n == 0 {
return c.Category == parse.PluralOne
}
return c.Category == parse.PluralOther
|
}
|
|
main.rs
|
// Copyright (c) Astral Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Written by Tim Diekmann <[email protected]>, November 2018
// #![warn(
// bad_style,
// nonstandard_style,
// warnings,
// rust_2018_compatibility,
// rust_2018_idioms,
// single_use_lifetimes,
// trivial_casts,
// trivial_numeric_casts,
// variant_size_differences,
// absolute_paths_not_starting_with_crate,
// future_incompatible,
// unused,
// clippy::pedantic
// )]
// #![allow(clippy::cast_precision_loss)]
// use std::error::Error;
// use astral::{
// core::string::Name,
// resource::assets::{FileSystem, VirtualFileSystem},
// third_party::{
// rayon,
// slog::{error, info, o, trace, warn, Drain, Logger},
// },
// };
// fn app(engine: &astral::Engine) -> Result<(), Box<dyn Error>> {
// let core_system = astral::core::System::new(engine);
// let string_subsystem = astral::core::string::Subsystem::new(10 * 1024 * 1024, &core_system);
// let resource_system = astral::resource::System::new(&engine);
// let asset_subsystem = astral::resource::assets::Subsystem::new(&resource_system);
// let directory = Name::new("assets", &string_subsystem);
// let files = FileSystem::new(directory, &asset_subsystem, &string_subsystem)?
// .iter()?
// .collect::<Vec<_>>();
// let mut counter = 0_u32;
// for file in &files {
// counter += 1;
// info!(engine.logger(), "file"; "name" => ?file, "count" => counter);
// }
// let pool = rayon::ThreadPoolBuilder::new()
// .num_threads(2)
// .build()
// .unwrap();
// let ok: Vec<i32> = vec![1, 2, 3];
// pool.scope(|s| {
// let bad: Vec<i32> = vec![4, 5, 6];
// s.spawn(|_| {
// let bad = bad;
// info!(engine.logger(), "ok: {:?}", ok);
// info!(engine.logger(), "bad: {:?}", bad);
// });
// info!(engine.logger(), "borrowed {:?}", ok);
// });
// // catalog[core_namespace].add_virtual_file_system(filesystem)?;
// // let mut registry = Loader::<TextFile, Option<&str>>::new(
// // |string| Ok(TextFile::from(string.unwrap().to_string())),
// // |_, read| {
// // let mut string = String::new();
// // read.read_to_string(&mut string)?;
// // Ok(TextFile::from(string))
// // },
// // );
// // registry.set_catalog(catalog);
// // let cube_model = Location::from_string(core_namespace, "models/cube.obj");
// // let _cube_model_resource_id = registry.declare_asset(cube_model);
// // let _constant_resource_id = registry.declare_resource(Name::from("constant1"));
// // info!("Hello World");
// // dbg!(std::mem::size_of_val(&cube_model));
// // dbg!(std::mem::size_of_val(®istry));
// // dbg!(std::mem::size_of::<Location>());
// // dbg!(std::mem::size_of::<ResourceId>());
// // dbg!(std::mem::size_of::<astral::resource::assets::Error>());
// Ok(())
// }
// fn main() {
// let decorator = slog_term::TermDecorator::new().build();
// let drain = slog_term::CompactFormat::new(decorator).build().fuse();
// // let drain = slog_async::Async::new(drain)
// // .chan_size(64 * 1024)
// // .overflow_strategy(slog_async::OverflowStrategy::Block)
|
// // .fuse();
// let drain = std::sync::Mutex::new(drain).fuse();
// let log = Logger::root(drain, o!());
// trace!(log, "test");
// let engine = astral::Engine::new(&log);
// if let Err(err) = app(&engine) {
// let mut err: &dyn std::error::Error = err.as_ref();
// error!(log, "{}", err);
// while let Some(source) = err.source() {
// error!(log, " {}", source);
// err = source;
// }
// std::process::exit(1);
// }
// }
fn main() {}
|
// // .build()
|
road_storage.py
|
import json
import os
from typing import Tuple, List, Callable
from core.folders import folders
class RoadStorage:
def __init__(self, path: str = None):
if path is None:
path='test_driving'
self.folder = str(folders.member_seeds.joinpath(path))
os.makedirs(self.folder, exist_ok=True)
def all_files(self) -> List[str]:
|
def get_road_path_by_index(self, index) -> str:
assert index > 0
path = os.path.join(self.folder, 'road{:03}_nodes.json'.format(index))
return path
def get_road_nodes_by_index(self, index) -> List[Tuple[float, float, float, float]]:
path = self.get_road_path_by_index(index)
nodes = self.get_road_nodes(path)
return nodes
def get_road_nodes(self, path) -> List[Tuple[float, float, float, float]]:
assert os.path.exists(path), path
with open(path, 'r') as f:
nodes = json.loads(f.read())
return nodes
def cache(self, road_name: str, get_points: Callable) -> List[Tuple[float, float, float, float]]:
path = os.path.join(self.folder, road_name + '.json')
if os.path.exists(path):
with open(path, 'r') as f:
nodes = json.loads(f.read())
else:
nodes = get_points()
with open(path, 'w') as f:
f.write(json.dumps(nodes))
return nodes
def save(self, road_name: str, contents: str) -> List[Tuple[float, float, float, float]]:
path = os.path.join(self.folder, road_name + '.json')
with open(path, 'w') as f:
f.write(contents)
def read(self, path) -> List[Tuple[float, float, float, float]]:
assert os.path.exists(path), path
with open(path, 'r') as f:
beamng_member = json.loads(f.read())
return beamng_member
if __name__ == '__main__':
for i in range(1, 31):
nodes = RoadStorage().get_road_nodes_by_index(i)
print(i, len(nodes))
|
expanded = [os.path.join(self.folder, filename) for filename in os.listdir(self.folder)]
return [path for path in expanded if os.path.isfile(path)]
|
client_config.rs
|
// Copyright 2020-2022 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use std::time::Duration;
use identity::iota::ClientBuilder;
use identity::iota::DIDMessageEncoding;
use identity::iota_core::Network;
use wasm_bindgen::prelude::*;
use crate::error::WasmResult;
use crate::tangle::WasmDIDMessageEncoding;
/// Try construct a `ClientBuilder` directly from an `IClientConfig` interface.
impl TryFrom<IClientConfig> for ClientBuilder {
type Error = JsValue;
fn try_from(config: IClientConfig) -> std::result::Result<Self, Self::Error> {
let ConfigOptions {
network,
encoding,
nodes,
primary_node,
primary_pow_node,
permanodes,
node_auth,
node_sync_interval,
node_sync_disabled,
quorum,
quorum_size,
quorum_threshold,
local_pow,
fallback_to_local_pow,
tips_interval,
request_timeout,
} = config.into_serde::<ConfigOptions>().wasm_result()?;
let mut builder: ClientBuilder = ClientBuilder::new();
if let Some(network) = network {
builder = builder.network(network);
}
if let Some(encoding) = encoding {
builder = builder.encoding(DIDMessageEncoding::from(encoding));
}
if let Some(nodes) = nodes {
builder = builder
.nodes(&nodes.iter().map(AsRef::as_ref).collect::<Vec<_>>())
.wasm_result()?;
}
if let Some(NodeAuth {
url,
jwt,
username,
password,
}) = primary_node
{
builder = builder
.primary_node(&url, jwt, basic_auth(&username, &password))
.wasm_result()?;
}
if let Some(NodeAuth {
url,
jwt,
username,
password,
}) = primary_pow_node
{
builder = builder
.primary_pow_node(&url, jwt, basic_auth(&username, &password))
.wasm_result()?;
}
for NodeAuth {
url,
jwt,
username,
password,
} in permanodes.unwrap_or_default()
{
builder = builder
.permanode(&url, jwt, basic_auth(&username, &password))
.wasm_result()?;
}
for NodeAuth {
url,
jwt,
username,
password,
} in node_auth.unwrap_or_default()
{
builder = builder
.node_auth(&url, jwt, basic_auth(&username, &password))
.wasm_result()?;
}
if let Some(node_sync_interval) = node_sync_interval {
builder = builder.node_sync_interval(Duration::from_secs(u64::from(node_sync_interval)));
}
if let Some(node_sync_disabled) = node_sync_disabled {
if node_sync_disabled {
builder = builder.node_sync_disabled();
}
}
if let Some(quorum) = quorum {
builder = builder.quorum(quorum);
}
if let Some(quorum_size) = quorum_size {
builder = builder.quorum_size(quorum_size);
}
if let Some(quorum_threshold) = quorum_threshold {
builder = builder.quorum_threshold(quorum_threshold);
}
if let Some(local_pow) = local_pow {
builder = builder.local_pow(local_pow);
}
if let Some(fallback_to_local_pow) = fallback_to_local_pow {
builder = builder.fallback_to_local_pow(fallback_to_local_pow);
}
if let Some(tips_interval) = tips_interval {
builder = builder.tips_interval(u64::from(tips_interval));
}
if let Some(request_timeout) = request_timeout {
builder = builder.request_timeout(Duration::from_secs(u64::from(request_timeout)));
}
Ok(builder)
}
}
/// Helper function to combine a username and password into a basic authentication tuple.
fn
|
<'a>(username: &'a Option<String>, password: &'a Option<String>) -> Option<(&'a str, &'a str)> {
username.as_deref().zip(password.as_deref())
}
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(typescript_type = "IClientConfig")]
pub type IClientConfig;
}
/// Helper-struct for deserializing [`INodeAuth`].
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
struct NodeAuth {
url: String,
jwt: Option<String>,
username: Option<String>,
password: Option<String>,
}
#[wasm_bindgen(typescript_custom_section)]
const I_NODE_AUTH: &'static str = r#"
/** IOTA node details with optional authentication. */
interface INodeAuth {
readonly url: string;
readonly jwt?: string;
readonly username?: string;
readonly password?: string;
}"#;
/// Helper-struct for deserializing [`IConfigOptions`].
#[derive(Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct ConfigOptions {
network: Option<Network>,
encoding: Option<WasmDIDMessageEncoding>,
nodes: Option<Vec<String>>,
primary_node: Option<NodeAuth>,
primary_pow_node: Option<NodeAuth>,
permanodes: Option<Vec<NodeAuth>>,
node_auth: Option<Vec<NodeAuth>>,
node_sync_interval: Option<u32>,
node_sync_disabled: Option<bool>,
quorum: Option<bool>,
quorum_size: Option<usize>,
quorum_threshold: Option<usize>,
local_pow: Option<bool>,
fallback_to_local_pow: Option<bool>,
tips_interval: Option<u32>,
request_timeout: Option<u32>,
}
#[wasm_bindgen(typescript_custom_section)]
const I_CLIENT_CONFIG: &'static str = r#"
/** {@link Client} configuration options. */
interface IClientConfig {
/** Sets the IOTA Tangle network. */
readonly network?: Network;
/** Sets the DID message encoding used when publishing to the Tangle. */
readonly encoding?: DIDMessageEncoding;
/** Adds a list of IOTA nodes to use by their URLs. */
readonly nodes?: string[];
/** Sets an IOTA node by its URL to be used as primary node. */
readonly primaryNode?: INodeAuth;
/** Adds an IOTA node by its URL to be used as primary PoW node (for remote PoW). */
readonly primaryPowNode?: INodeAuth;
/** Adds a list of IOTA permanodes by their URLs. */
readonly permanodes?: INodeAuth[];
/** Adds a list of IOTA nodes to be used by their URLs. */
readonly nodeAuth?: INodeAuth[];
/** Sets the node sync interval in seconds. */
readonly nodeSyncInterval?: number;
/** Disables the node sync process. */
readonly nodeSyncDisabled?: boolean;
/** Enables/disables quorum. */
readonly quorum?: boolean;
/** Sets the number of nodes used for quorum. */
readonly quorumSize?: number;
/** Sets the quorum threshold. */
readonly quorumThreshold?: number;
/** Sets whether proof-of-work (PoW) is performed locally or remotely.
* Default: false.
*/
readonly localPow?: boolean;
/** Sets whether the PoW should be done locally in case a node doesn't support remote PoW.
* Default: true.
*/
readonly fallbackToLocalPow?: boolean;
/** Sets the number of seconds that new tips will be requested during PoW. */
readonly tipsInterval?: number;
/** Sets the default request timeout. */
readonly requestTimeout?: number;
}"#;
#[cfg(test)]
mod tests {
use identity::core::FromJson;
use identity::core::Object;
use identity::iota::ClientBuilder;
use identity::iota::DIDMessageEncoding;
use identity::iota_core::Network;
use wasm_bindgen::JsCast;
use wasm_bindgen::JsValue;
use wasm_bindgen_test::*;
use crate::tangle::client_config::ConfigOptions;
use crate::tangle::client_config::NodeAuth;
use crate::tangle::IClientConfig;
fn mock_client_config_json() -> JsValue {
JsValue::from_serde(
&Object::from_json(
r#"{
"network": "dev",
"encoding": 1,
"nodes": ["https://example.com:1", "https://example.com:2"],
"primaryNode": {
"url": "https://example.com:3",
"username": "user",
"password": "pass"
},
"primaryPowNode": {
"url": "https://example.com:4"
},
"permanodes": [{ "url": "https://example.com:5" }, { "url": "https://example.com:6" }],
"nodeAuth": [{ "url": "https://example.com:7" }, { "url": "https://example.com:8" }],
"nodeSyncInterval": 42,
"nodeSyncDisabled": true,
"quorum": true,
"quorumSize": 3,
"quorumThreshold": 2,
"localPow": false,
"fallbackToLocalPow": false,
"tipsInterval": 7,
"requestTimeout": 60
}"#,
)
.unwrap(),
)
.unwrap()
}
#[wasm_bindgen_test]
fn test_client_config_try_from() {
let json: JsValue = mock_client_config_json();
let _client_builder: ClientBuilder = ClientBuilder::try_from(json.unchecked_into::<IClientConfig>()).unwrap();
}
#[wasm_bindgen_test]
fn test_client_config_serde() {
let json: JsValue = mock_client_config_json();
let ConfigOptions {
network,
encoding,
nodes,
primary_node,
primary_pow_node,
permanodes,
node_auth,
node_sync_interval,
node_sync_disabled,
quorum,
quorum_size,
quorum_threshold,
local_pow,
fallback_to_local_pow,
tips_interval,
request_timeout,
} = json.into_serde::<ConfigOptions>().unwrap();
assert_eq!(network, Some(Network::Devnet));
assert_eq!(
encoding.map(DIDMessageEncoding::from),
Some(DIDMessageEncoding::JsonBrotli)
);
assert_eq!(
nodes,
Some(vec![
"https://example.com:1".to_owned(),
"https://example.com:2".to_owned(),
])
);
assert_eq!(
primary_node,
Some(NodeAuth {
url: "https://example.com:3".to_owned(),
jwt: None,
username: Some("user".to_owned()),
password: Some("pass".to_owned()),
})
);
assert_eq!(
primary_pow_node,
Some(NodeAuth {
url: "https://example.com:4".to_owned(),
jwt: None,
username: None,
password: None,
})
);
assert_eq!(
permanodes,
Some(vec![
NodeAuth {
url: "https://example.com:5".to_owned(),
jwt: None,
username: None,
password: None,
},
NodeAuth {
url: "https://example.com:6".to_owned(),
jwt: None,
username: None,
password: None,
},
])
);
assert_eq!(
node_auth,
Some(vec![
NodeAuth {
url: "https://example.com:7".to_owned(),
jwt: None,
username: None,
password: None,
},
NodeAuth {
url: "https://example.com:8".to_owned(),
jwt: None,
username: None,
password: None,
},
])
);
assert_eq!(node_sync_interval, Some(42));
assert_eq!(node_sync_disabled, Some(true));
assert_eq!(quorum, Some(true));
assert_eq!(quorum_size, Some(3));
assert_eq!(quorum_threshold, Some(2));
assert_eq!(local_pow, Some(false));
assert_eq!(fallback_to_local_pow, Some(false));
assert_eq!(tips_interval, Some(7));
assert_eq!(request_timeout, Some(60));
}
}
|
basic_auth
|
patient-banner.test.js
|
import React from 'react';
import { shallow } from 'enzyme';
import * as pb from '../../../src/components/PatientBanner/patient-banner';
describe('Patient Banner component', () => {
let props = {
patientName: 'Morris',
patientId: '123',
};
function expectContainsElement(renderedComponent, expected) {
expect(renderedComponent.containsMatchingElement(expected)).toBeTruthy();
}
it('has a functioning mapStateToProps', () => {
let state = {
patientState: {
currentPatient: {
name: props.patientName,
id: props.patientId,
}
}
};
expect(pb.testingMapStateToProps(state)).toEqual(props);
});
it('contains a default name and id when no patient is selected', () => {
let component = <pb.PatientBanner />;
let renderedComponent = shallow(component);
expectContainsElement(renderedComponent, pb.defaultName);
|
});
it('contains a name and id when a patient is selected', () => {
let component = <pb.PatientBanner {...props} />;
let renderedComponent = shallow(component);
expectContainsElement(renderedComponent, props.patientName);
expectContainsElement(renderedComponent, props.patientId);
});
});
|
expectContainsElement(renderedComponent, pb.defaultId);
|
external-link.directive.ts
|
import { Directive, ElementRef, Input, OnChanges, SimpleChanges } from "@angular/core";
@Directive({
|
@Input() link: string
constructor(private el: ElementRef) {
}
ngOnChanges(changes: SimpleChanges): void {
console.log(this.link, 'link');
if (this.link) {
if (this.link.startsWith('https')) {
this.el.nativeElement.href = this.link;
return;
}
else {
this.link = '//' + this.link;;
this.el.nativeElement.href = this.link;
return;
}
}
}
}
|
selector: '[externalLink]'
})
export class ExternalLinkDirective implements OnChanges {
|
abios_sdk.go
|
package abios
import (
"bytes"
"encoding/json"
"net/http"
"time"
. "github.com/PatronGG/abios-go-sdk/structs"
"github.com/gobuffalo/uuid"
"github.com/gorilla/websocket"
)
// Constant variables that represents endpoints
const (
baseUrl = "https://api.abiosgaming.com/v2/"
errorEndpoint = baseUrl + "error"
access_token = baseUrl + "oauth/access_token"
games = baseUrl + "games"
series = baseUrl + "series"
seriesById = series + "/"
matches = baseUrl + "matches/"
tournaments = baseUrl + "tournaments"
tournamentsById = tournaments + "/"
substages = baseUrl + "substages/"
teams = baseUrl + "teams"
teamsById = teams + "/"
players = baseUrl + "players"
playersById = players + "/"
rosters = baseUrl + "rosters/"
search = baseUrl + "search"
incidents = baseUrl + "incidents"
incidentsBySeries = incidents + "/"
organisations = baseUrl + "organisations"
organisationsById = organisations + "/"
// PUSH API
wsBaseUrl = "wss://ws.abiosgaming.com/v0"
wsRestUrl = "https://ws.abiosgaming.com/v0/"
subscriptions = wsRestUrl + "subscription"
subscriptionsById = subscriptions + "/"
pushConfig = wsRestUrl + "/config"
)
// AbiosSdk defines the interface of an implementation of a SDK targeting the Abios endpoints.
type AbiosSdk interface {
SetRate(second, minute int)
Games(params Parameters) (GameStructPaginated, *ErrorStruct)
Series(params Parameters) (SeriesStructPaginated, *ErrorStruct)
SeriesById(id int, params Parameters) (SeriesStruct, *ErrorStruct)
MatchesById(id int, params Parameters) (MatchStruct, *ErrorStruct)
Tournaments(params Parameters) (TournamentStructPaginated, *ErrorStruct)
TournamentsById(id int, params Parameters) (TournamentStruct, *ErrorStruct)
SubstagesById(id int, params Parameters) (SubstageStruct, *ErrorStruct)
Teams(params Parameters) (TeamStructPaginated, *ErrorStruct)
TeamsById(id int, params Parameters) (TeamStruct, *ErrorStruct)
Players(params Parameters) (PlayerStructPaginated, *ErrorStruct)
PlayersById(id int, params Parameters) (PlayerStruct, *ErrorStruct)
RostersById(id int, params Parameters) (RosterStruct, *ErrorStruct)
Search(query string, params Parameters) ([]SearchResultStruct, *ErrorStruct)
Incidents(params Parameters) (IncidentStructPaginated, *ErrorStruct)
IncidentsBySeriesId(id int) (SeriesIncidentsStruct, *ErrorStruct)
Organisations(params Parameters) (OrganisationStructPaginated, *ErrorStruct)
OrganisationsById(id int, params Parameters) (OrganisationStructPaginated, *ErrorStruct)
// PUSH API
CreateSubscription(sub Subscription) (uuid.UUID, error)
ListSubscriptions() ([]Subscription, error)
// UpdateSubscription(id int, sub Subscription) (Subscription, error)
// DeleteSubscription(id int) error
// PushServiceConfig() ([]byte, error)
PushServiceConnect() error
}
// client holds the oauth string returned from Authenticate as well as this sessions
// requestHandler.
type client struct {
username string
password string
oauth AccessTokenStruct
handler *requestHandler
wsConn *websocket.Conn
reconnectToken uuid.UUID
}
// authenticator makes sure the oauth token doesn't expire.
func (a *client) authenticator() {
for {
// Wait until token is about to expire
expires := time.Duration(a.oauth.ExpiresIn) * time.Second
time.Sleep(expires - time.Minute*9) // Sleep until at most 9 minutes left.
err := a.authenticate() // try once
if err == nil {
continue // It succeded.
}
// If we get an error we retry every 30 seconds for 5 minutes before we override
// the responses.
retry := time.NewTicker(30 * time.Second)
fail := time.NewTimer(5 * time.Minute)
select {
case <-retry.C:
err = a.authenticate()
if err == nil {
a.handler.override = responseOverride{override: false, data: result{}}
break
}
case <-fail.C:
a.handler.override = responseOverride{override: true, data: *err}
break
}
}
}
// NewAbios returns a new endpoint-wrapper for api version 2 with given credentials.
func
|
(username, password string) *client {
r := newRequestHandler()
c := &client{
username: username,
password: password,
oauth: AccessTokenStruct{},
handler: r,
wsConn: nil,
reconnectToken: uuid.Nil,
}
err := c.authenticate()
if err != nil {
c.handler.override = responseOverride{override: true, data: *err}
}
go c.authenticator() // Launch authenticator
return c
}
// SetRate sets the outgoing rate to "second" requests per second and "minute" requests
// per minte. A value less than or equal to 0 means previous
// value is kept. Default values are (5, 300)
func (a *client) SetRate(second, minute int) {
a.handler.setRate(second, minute)
}
// authenticate queries the /oauth/access_token endpoint with the given credentials and
// stores the returned oauth token. Return nil if the request was successful.
func (a *client) authenticate() *result {
var payload = []byte(`grant_type=client_credentials&client_id=` + a.username + `&client_secret=` + a.password)
req, _ := http.NewRequest("POST", access_token, bytes.NewBuffer(payload))
req.Header = http.Header{"Content-Type": {"application/x-www-form-urlencoded"}}
statusCode, b := apiCall(req)
dec := json.NewDecoder(bytes.NewBuffer(b))
if 200 <= statusCode && statusCode < 300 {
target := AccessTokenStruct{}
dec.Decode(&target)
a.oauth = target
return nil
} else {
return &result{statuscode: statusCode, body: b}
}
return nil
}
|
New
|
setup.py
|
import os
import json
import shutil
from lib.config import Config
from lib.variables import Variables, HACKERMODE_FOLDER_NAME
RED = '\033[1;31m'
GREEN = '\033[1;32m'
YELLOW = '\033[1;33m'
NORMAL = '\033[0m'
UNDERLINE = '\033[4m'
BOLD = '\033[1m'
with open(os.path.join(Variables.HACKERMODE_PATH, 'packages.json')) as fp:
INSTALL_DATA = json.load(fp)
class HackerModeInstaller:
def python_system_modules(self) -> list:
"""this
function return all modules that installed in system."""
return os.popen("pip3 freeze").read().split("\n")
def is_installed(self, module, python_modules):
for python_module in python_modules:
if module in python_module:
return [module, python_module]
return False
def installed_message(self, package, show=True):
if show:
default_message = f'{package.split("=")[0]} installed successfully.'
print(f'{NORMAL}[{GREEN}✔{NORMAL}] {GREEN}{default_message}{NORMAL}')
def failed_message(self, package, show=True, is_base=False):
if show:
default_message = f'not able to install "{package}".'
color = RED if is_base else YELLOW
print(f'{NORMAL}[{color}{"✗" if is_base else "!"}{NORMAL}] {color}{default_message}{NORMAL}')
def check(self, show_output=True) -> dict:
"""this
function check packages and modules
and return all packages that not installed.
"""
modules: list = []
packages: list = []
python_modules = self.python_system_modules()
if show_output:
print("\nCHECKING:")
print("python modules:")
for module in INSTALL_DATA["PYTHON3_MODULES"]:
if self.is_installed(module, python_modules) or os.path.exists(
os.popen(f"realpath $(command -v {module}) 2> /dev/null").read().strip()):
self.installed_message(module, show=show_output)
else:
modules.append(module)
self.failed_message(module, show=show_output)
if show_output:
print("packages:")
for package in INSTALL_DATA["PACKAGES"].keys():
if not INSTALL_DATA["PACKAGES"][package][Variables.PLATFORME]:
continue
if os.path.exists(os.popen(f"realpath $(command -v {package.strip()})").read().strip()):
self.installed_message(package, show=show_output)
else:
packages.append(package)
self.failed_message(package, show=show_output)
return {"packages": packages, "modules": modules}
def install(self):
# check platforme
if not Variables.PLATFORME in ('termux', 'linux'):
if Variables.PLATFORME == 'unknown':
print("# The tool could not recognize the system!")
print("# Do You want to continue anyway?")
while True:
if input('# [Y/N]: ').lower() == 'y':
break
else:
print('# good bye :D')
return
else:
print(f"# The tool does not support {Variables.PLATFORME}")
print('# good bye :D')
return
# install packages
need_to_install = self.check(show_output=False)
for package in need_to_install["packages"]:
for command in INSTALL_DATA["PACKAGES"][package][Variables.PLATFORME]:
os.system(command)
# install modules
for module in need_to_install["modules"]:
os.system(f"pip3 install {module}")
# move HackerMode to install path
if Config.get('actions', 'DEBUG', False):
print("# can't move the HackerMode folder ")
print("# to install path in debug mode!")
return None
if os.path.isdir(HACKERMODE_FOLDER_NAME):
try:
shutil.move(HACKERMODE_FOLDER_NAME, Variables.HACKERMODE_INSTALL_PATH)
self.install_tools_packages()
Config.set('actions', 'IS_INSTALLED', True)
self.check()
print(f'# {GREEN}HackerMode installed successfully...{NORMAL}')
except shutil.Error as e:
self.delete(show_message=False)
|
print(f'{RED}# Error: the tool path not found!')
print(f'# try to run tool using\n# {GREEN}"python3 HackerMode install"{NORMAL}')
print('# installed failed!')
def update(self):
if not Config.get('actions', 'DEBUG', cast=bool, default=False):
hackermode_command_line_path = os.environ.get("_").split("bin/")[0] + "bin/HackerMode"
if os.path.exists(hackermode_command_line_path):
os.remove(hackermode_command_line_path)
os.system(
f'curl https://raw.githubusercontent.com/Arab-developers/HackerMode/future/install.sh > HackerModeInstall && bash HackerModeInstall')
print(f'# {GREEN}HackerMode updated successfully...{NORMAL}')
else:
print("# can't update in the DEUBG mode!")
def add_shortcut(self):
# add HackerMode shortcut...
try:
with open(Variables.BASHRIC_FILE_PATH, "r") as f:
data = f.read()
if data.find(Variables.HACKERMODE_SHORTCUT.strip()) == -1:
with open(Variables.BASHRIC_FILE_PATH, "w") as f:
f.write(data + Variables.HACKERMODE_SHORTCUT)
except PermissionError:
print(NORMAL + "# add HackerMode shortcut:")
print(f"# '{YELLOW}{Variables.HACKERMODE_SHORTCUT}{NORMAL}'")
print("# to this path:")
print("# " + Variables.HACKERMODE_BIN_PATH)
def delete(self, show_message=True):
if show_message:
status = input("# Do you really want to delete the tool?\n [n/y]: ").lower()
else:
status = "y"
if status in ("y", "yes", "ok", "yep"):
bin_path = os.path.join(os.environ["SHELL"].split("/bin/")[0], "/bin/HackerMode")
tool_path = os.path.join(os.environ["HOME"], ".HackerMode")
if os.path.exists(bin_path):
os.remove(bin_path)
if os.path.exists(tool_path):
shutil.rmtree(tool_path)
try:
with open(Variables.BASHRIC_FILE_PATH, "r") as f:
data = f.read()
if data.find(Variables.HACKERMODE_SHORTCUT.strip()) != -1:
with open(Variables.BASHRIC_FILE_PATH, "w") as f:
f.write(data.replace(Variables.HACKERMODE_SHORTCUT, ""))
except PermissionError:
if show_message:
print("# cannot remove HackerMode shortcut!")
if show_message:
print("# The deletion was successful...")
def install_tools_packages(self):
# compile shell file
old_path = os.getcwd()
os.chdir(os.path.join(os.environ.get("HOME"), ".HackerMode/HackerMode/lib"))
os.system("bash setup.sh")
os.chdir(old_path)
# install tools packages
tools_path = os.path.join(os.environ.get("HOME"), ".HackerMode/HackerMode/tools")
for root, dirs, files in os.walk(tools_path):
for dir in dirs:
if os.path.exists(os.path.join(root, dir, "setup.sh")):
print(f"installing {dir} packages:")
old_path = os.getcwd()
os.chdir(os.path.join(root, dir))
os.system("bash setup.sh")
os.chdir(old_path)
if __name__ == "__main__":
x = HackerModeInstaller()
x.check()
x.install()
|
print(e)
print('# installed failed!')
else:
self.delete(show_message=False)
|
utils.ts
|
export const uncapitalize = (value: string): string =>
|
value.replace(/\w\S*/g, w => w.replace(/^\w/, c => c.toLowerCase()))
|
|
issue-50577.rs
|
// revisions: stock if_match
#![cfg_attr(if_match, feature(const_if_match))]
fn
|
() {
enum Foo {
Drop = assert_eq!(1, 1)
//[stock,if_match]~^ ERROR if may be missing an else clause
//[stock]~^^ ERROR `match` is not allowed in a `const`
//[stock]~| ERROR `match` is not allowed in a `const`
//[stock]~| ERROR `if` is not allowed in a `const`
}
}
|
main
|
admin.py
|
from django.contrib import admin
from .models import UserProfile
# Register your models here.
class UserProfileAdmin(admin.AdminSite):
|
admin.site.register(UserProfile)
|
list_display = ('username', 'email', 'mobile', 'is_superuser', 'date_joined')
search_fields = ('username', 'email', 'mobile',)
list_filter = ('date_joined',)
|
configuration.go
|
/*
* Yahoo Finance
*
* Yahoo Finance API specification
*
* API version: 1.0.8
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package yq2
import (
"context"
"fmt"
"net/http"
"strings"
)
// contextKeys are used to identify the type of value in the context.
// Since these are string, it is possible to get a short description of the
// context key for logging and debugging using key.String().
type contextKey string
func (c contextKey) String() string {
return "auth " + string(c)
}
var (
// ContextOAuth2 takes an oauth2.TokenSource as authentication for the request.
ContextOAuth2 = contextKey("token")
// ContextBasicAuth takes BasicAuth as authentication for the request.
ContextBasicAuth = contextKey("basic")
// ContextAccessToken takes a string oauth2 access token as authentication for the request.
ContextAccessToken = contextKey("accesstoken")
// ContextAPIKeys takes a string apikey as authentication for the request
ContextAPIKeys = contextKey("apiKeys")
// ContextHttpSignatureAuth takes HttpSignatureAuth as authentication for the request.
ContextHttpSignatureAuth = contextKey("httpsignature")
// ContextServerIndex uses a server configuration from the index.
ContextServerIndex = contextKey("serverIndex")
// ContextOperationServerIndices uses a server configuration from the index mapping.
ContextOperationServerIndices = contextKey("serverOperationIndices")
// ContextServerVariables overrides a server configuration variables.
ContextServerVariables = contextKey("serverVariables")
// ContextOperationServerVariables overrides a server configuration variables using operation specific values.
ContextOperationServerVariables = contextKey("serverOperationVariables")
)
// BasicAuth provides basic http authentication to a request passed via context using ContextBasicAuth
type BasicAuth struct {
UserName string `json:"userName,omitempty"`
Password string `json:"password,omitempty"`
}
// APIKey provides API key based authentication to a request passed via context using ContextAPIKey
type APIKey struct {
Key string
Prefix string
}
// ServerVariable stores the information about a server variable
type ServerVariable struct {
Description string
DefaultValue string
EnumValues []string
}
// ServerConfiguration stores the information about a server
type ServerConfiguration struct {
URL string
Description string
Variables map[string]ServerVariable
}
// ServerConfigurations stores multiple ServerConfiguration items
type ServerConfigurations []ServerConfiguration
// Configuration stores the configuration of the API client
type Configuration struct {
Host string `json:"host,omitempty"`
Scheme string `json:"scheme,omitempty"`
DefaultHeader map[string]string `json:"defaultHeader,omitempty"`
UserAgent string `json:"userAgent,omitempty"`
Debug bool `json:"debug,omitempty"`
Servers ServerConfigurations
OperationServers map[string]ServerConfigurations
HTTPClient *http.Client
}
// NewConfiguration returns a new Configuration object
func NewConfiguration() *Configuration {
cfg := &Configuration{
DefaultHeader: make(map[string]string),
UserAgent: "OpenAPI-Generator/1.0.0/go",
Debug: false,
Servers: ServerConfigurations{
{
URL: "https://query2.finance.yahoo.com",
Description: "No description provided",
},
},
OperationServers: map[string]ServerConfigurations{
},
}
return cfg
}
// AddDefaultHeader adds a new HTTP header to the default header in the request
func (c *Configuration) AddDefaultHeader(key string, value string) {
c.DefaultHeader[key] = value
}
// URL formats template on a index using given variables
func (sc ServerConfigurations) URL(index int, variables map[string]string) (string, error) {
if index < 0 || len(sc) <= index {
return "", fmt.Errorf("Index %v out of range %v", index, len(sc)-1)
}
server := sc[index]
url := server.URL
// go through variables and replace placeholders
for name, variable := range server.Variables {
if value, ok := variables[name]; ok {
found := bool(len(variable.EnumValues) == 0)
for _, enumValue := range variable.EnumValues {
if value == enumValue {
found = true
}
}
if !found {
return "", fmt.Errorf("The variable %s in the server URL has invalid value %v. Must be %v", name, value, variable.EnumValues)
}
url = strings.Replace(url, "{"+name+"}", value, -1)
} else {
url = strings.Replace(url, "{"+name+"}", variable.DefaultValue, -1)
}
}
return url, nil
}
// ServerURL returns URL based on server settings
func (c *Configuration) ServerURL(index int, variables map[string]string) (string, error) {
return c.Servers.URL(index, variables)
}
func getServerIndex(ctx context.Context) (int, error) {
si := ctx.Value(ContextServerIndex)
if si != nil {
if index, ok := si.(int); ok {
return index, nil
}
return 0, reportError("Invalid type %T should be int", si)
}
return 0, nil
}
func getServerOperationIndex(ctx context.Context, endpoint string) (int, error) {
osi := ctx.Value(ContextOperationServerIndices)
if osi != nil {
if operationIndices, ok := osi.(map[string]int); !ok {
return 0, reportError("Invalid type %T should be map[string]int", osi)
} else {
index, ok := operationIndices[endpoint]
if ok {
return index, nil
}
}
}
return getServerIndex(ctx)
}
func
|
(ctx context.Context) (map[string]string, error) {
sv := ctx.Value(ContextServerVariables)
if sv != nil {
if variables, ok := sv.(map[string]string); ok {
return variables, nil
}
return nil, reportError("ctx value of ContextServerVariables has invalid type %T should be map[string]string", sv)
}
return nil, nil
}
func getServerOperationVariables(ctx context.Context, endpoint string) (map[string]string, error) {
osv := ctx.Value(ContextOperationServerVariables)
if osv != nil {
if operationVariables, ok := osv.(map[string]map[string]string); !ok {
return nil, reportError("ctx value of ContextOperationServerVariables has invalid type %T should be map[string]map[string]string", osv)
} else {
variables, ok := operationVariables[endpoint]
if ok {
return variables, nil
}
}
}
return getServerVariables(ctx)
}
// ServerURLWithContext returns a new server URL given an endpoint
func (c *Configuration) ServerURLWithContext(ctx context.Context, endpoint string) (string, error) {
sc, ok := c.OperationServers[endpoint]
if !ok {
sc = c.Servers
}
if ctx == nil {
return sc.URL(0, nil)
}
index, err := getServerOperationIndex(ctx, endpoint)
if err != nil {
return "", err
}
variables, err := getServerOperationVariables(ctx, endpoint)
if err != nil {
return "", err
}
return sc.URL(index, variables)
}
|
getServerVariables
|
connector.go
|
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package confluent
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
type Connector struct {
pulumi.CustomResourceState
// ID of containing cluster, e.g. lkc-abc123
ClusterId pulumi.StringOutput `pulumi:"clusterId"`
// Type-specific Configuration of connector. String keys and values
Config pulumi.MapOutput `pulumi:"config"`
// Sensitive part of connector configuration. String keys and values
ConfigSensitive pulumi.MapOutput `pulumi:"configSensitive"`
// ID of containing environment, e.g. env-abc123
EnvironmentId pulumi.StringOutput `pulumi:"environmentId"`
// The name of the connector
Name pulumi.StringOutput `pulumi:"name"`
}
// NewConnector registers a new resource with the given unique name, arguments, and options.
func NewConnector(ctx *pulumi.Context,
name string, args *ConnectorArgs, opts ...pulumi.ResourceOption) (*Connector, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.ClusterId == nil {
return nil, errors.New("invalid value for required argument 'ClusterId'")
}
if args.Config == nil
|
if args.EnvironmentId == nil {
return nil, errors.New("invalid value for required argument 'EnvironmentId'")
}
var resource Connector
err := ctx.RegisterResource("confluent:index/connector:Connector", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetConnector gets an existing Connector resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetConnector(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *ConnectorState, opts ...pulumi.ResourceOption) (*Connector, error) {
var resource Connector
err := ctx.ReadResource("confluent:index/connector:Connector", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering Connector resources.
type connectorState struct {
// ID of containing cluster, e.g. lkc-abc123
ClusterId *string `pulumi:"clusterId"`
// Type-specific Configuration of connector. String keys and values
Config map[string]interface{} `pulumi:"config"`
// Sensitive part of connector configuration. String keys and values
ConfigSensitive map[string]interface{} `pulumi:"configSensitive"`
// ID of containing environment, e.g. env-abc123
EnvironmentId *string `pulumi:"environmentId"`
// The name of the connector
Name *string `pulumi:"name"`
}
type ConnectorState struct {
// ID of containing cluster, e.g. lkc-abc123
ClusterId pulumi.StringPtrInput
// Type-specific Configuration of connector. String keys and values
Config pulumi.MapInput
// Sensitive part of connector configuration. String keys and values
ConfigSensitive pulumi.MapInput
// ID of containing environment, e.g. env-abc123
EnvironmentId pulumi.StringPtrInput
// The name of the connector
Name pulumi.StringPtrInput
}
func (ConnectorState) ElementType() reflect.Type {
return reflect.TypeOf((*connectorState)(nil)).Elem()
}
type connectorArgs struct {
// ID of containing cluster, e.g. lkc-abc123
ClusterId string `pulumi:"clusterId"`
// Type-specific Configuration of connector. String keys and values
Config map[string]interface{} `pulumi:"config"`
// Sensitive part of connector configuration. String keys and values
ConfigSensitive map[string]interface{} `pulumi:"configSensitive"`
// ID of containing environment, e.g. env-abc123
EnvironmentId string `pulumi:"environmentId"`
// The name of the connector
Name *string `pulumi:"name"`
}
// The set of arguments for constructing a Connector resource.
type ConnectorArgs struct {
// ID of containing cluster, e.g. lkc-abc123
ClusterId pulumi.StringInput
// Type-specific Configuration of connector. String keys and values
Config pulumi.MapInput
// Sensitive part of connector configuration. String keys and values
ConfigSensitive pulumi.MapInput
// ID of containing environment, e.g. env-abc123
EnvironmentId pulumi.StringInput
// The name of the connector
Name pulumi.StringPtrInput
}
func (ConnectorArgs) ElementType() reflect.Type {
return reflect.TypeOf((*connectorArgs)(nil)).Elem()
}
type ConnectorInput interface {
pulumi.Input
ToConnectorOutput() ConnectorOutput
ToConnectorOutputWithContext(ctx context.Context) ConnectorOutput
}
func (*Connector) ElementType() reflect.Type {
return reflect.TypeOf((*Connector)(nil))
}
func (i *Connector) ToConnectorOutput() ConnectorOutput {
return i.ToConnectorOutputWithContext(context.Background())
}
func (i *Connector) ToConnectorOutputWithContext(ctx context.Context) ConnectorOutput {
return pulumi.ToOutputWithContext(ctx, i).(ConnectorOutput)
}
func (i *Connector) ToConnectorPtrOutput() ConnectorPtrOutput {
return i.ToConnectorPtrOutputWithContext(context.Background())
}
func (i *Connector) ToConnectorPtrOutputWithContext(ctx context.Context) ConnectorPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(ConnectorPtrOutput)
}
type ConnectorPtrInput interface {
pulumi.Input
ToConnectorPtrOutput() ConnectorPtrOutput
ToConnectorPtrOutputWithContext(ctx context.Context) ConnectorPtrOutput
}
type connectorPtrType ConnectorArgs
func (*connectorPtrType) ElementType() reflect.Type {
return reflect.TypeOf((**Connector)(nil))
}
func (i *connectorPtrType) ToConnectorPtrOutput() ConnectorPtrOutput {
return i.ToConnectorPtrOutputWithContext(context.Background())
}
func (i *connectorPtrType) ToConnectorPtrOutputWithContext(ctx context.Context) ConnectorPtrOutput {
return pulumi.ToOutputWithContext(ctx, i).(ConnectorPtrOutput)
}
// ConnectorArrayInput is an input type that accepts ConnectorArray and ConnectorArrayOutput values.
// You can construct a concrete instance of `ConnectorArrayInput` via:
//
// ConnectorArray{ ConnectorArgs{...} }
type ConnectorArrayInput interface {
pulumi.Input
ToConnectorArrayOutput() ConnectorArrayOutput
ToConnectorArrayOutputWithContext(context.Context) ConnectorArrayOutput
}
type ConnectorArray []ConnectorInput
func (ConnectorArray) ElementType() reflect.Type {
return reflect.TypeOf((*[]*Connector)(nil)).Elem()
}
func (i ConnectorArray) ToConnectorArrayOutput() ConnectorArrayOutput {
return i.ToConnectorArrayOutputWithContext(context.Background())
}
func (i ConnectorArray) ToConnectorArrayOutputWithContext(ctx context.Context) ConnectorArrayOutput {
return pulumi.ToOutputWithContext(ctx, i).(ConnectorArrayOutput)
}
// ConnectorMapInput is an input type that accepts ConnectorMap and ConnectorMapOutput values.
// You can construct a concrete instance of `ConnectorMapInput` via:
//
// ConnectorMap{ "key": ConnectorArgs{...} }
type ConnectorMapInput interface {
pulumi.Input
ToConnectorMapOutput() ConnectorMapOutput
ToConnectorMapOutputWithContext(context.Context) ConnectorMapOutput
}
type ConnectorMap map[string]ConnectorInput
func (ConnectorMap) ElementType() reflect.Type {
return reflect.TypeOf((*map[string]*Connector)(nil)).Elem()
}
func (i ConnectorMap) ToConnectorMapOutput() ConnectorMapOutput {
return i.ToConnectorMapOutputWithContext(context.Background())
}
func (i ConnectorMap) ToConnectorMapOutputWithContext(ctx context.Context) ConnectorMapOutput {
return pulumi.ToOutputWithContext(ctx, i).(ConnectorMapOutput)
}
type ConnectorOutput struct{ *pulumi.OutputState }
func (ConnectorOutput) ElementType() reflect.Type {
return reflect.TypeOf((*Connector)(nil))
}
func (o ConnectorOutput) ToConnectorOutput() ConnectorOutput {
return o
}
func (o ConnectorOutput) ToConnectorOutputWithContext(ctx context.Context) ConnectorOutput {
return o
}
func (o ConnectorOutput) ToConnectorPtrOutput() ConnectorPtrOutput {
return o.ToConnectorPtrOutputWithContext(context.Background())
}
func (o ConnectorOutput) ToConnectorPtrOutputWithContext(ctx context.Context) ConnectorPtrOutput {
return o.ApplyTWithContext(ctx, func(_ context.Context, v Connector) *Connector {
return &v
}).(ConnectorPtrOutput)
}
type ConnectorPtrOutput struct{ *pulumi.OutputState }
func (ConnectorPtrOutput) ElementType() reflect.Type {
return reflect.TypeOf((**Connector)(nil))
}
func (o ConnectorPtrOutput) ToConnectorPtrOutput() ConnectorPtrOutput {
return o
}
func (o ConnectorPtrOutput) ToConnectorPtrOutputWithContext(ctx context.Context) ConnectorPtrOutput {
return o
}
func (o ConnectorPtrOutput) Elem() ConnectorOutput {
return o.ApplyT(func(v *Connector) Connector {
if v != nil {
return *v
}
var ret Connector
return ret
}).(ConnectorOutput)
}
type ConnectorArrayOutput struct{ *pulumi.OutputState }
func (ConnectorArrayOutput) ElementType() reflect.Type {
return reflect.TypeOf((*[]Connector)(nil))
}
func (o ConnectorArrayOutput) ToConnectorArrayOutput() ConnectorArrayOutput {
return o
}
func (o ConnectorArrayOutput) ToConnectorArrayOutputWithContext(ctx context.Context) ConnectorArrayOutput {
return o
}
func (o ConnectorArrayOutput) Index(i pulumi.IntInput) ConnectorOutput {
return pulumi.All(o, i).ApplyT(func(vs []interface{}) Connector {
return vs[0].([]Connector)[vs[1].(int)]
}).(ConnectorOutput)
}
type ConnectorMapOutput struct{ *pulumi.OutputState }
func (ConnectorMapOutput) ElementType() reflect.Type {
return reflect.TypeOf((*map[string]Connector)(nil))
}
func (o ConnectorMapOutput) ToConnectorMapOutput() ConnectorMapOutput {
return o
}
func (o ConnectorMapOutput) ToConnectorMapOutputWithContext(ctx context.Context) ConnectorMapOutput {
return o
}
func (o ConnectorMapOutput) MapIndex(k pulumi.StringInput) ConnectorOutput {
return pulumi.All(o, k).ApplyT(func(vs []interface{}) Connector {
return vs[0].(map[string]Connector)[vs[1].(string)]
}).(ConnectorOutput)
}
func init() {
pulumi.RegisterInputType(reflect.TypeOf((*ConnectorInput)(nil)).Elem(), &Connector{})
pulumi.RegisterInputType(reflect.TypeOf((*ConnectorPtrInput)(nil)).Elem(), &Connector{})
pulumi.RegisterInputType(reflect.TypeOf((*ConnectorArrayInput)(nil)).Elem(), ConnectorArray{})
pulumi.RegisterInputType(reflect.TypeOf((*ConnectorMapInput)(nil)).Elem(), ConnectorMap{})
pulumi.RegisterOutputType(ConnectorOutput{})
pulumi.RegisterOutputType(ConnectorPtrOutput{})
pulumi.RegisterOutputType(ConnectorArrayOutput{})
pulumi.RegisterOutputType(ConnectorMapOutput{})
}
|
{
return nil, errors.New("invalid value for required argument 'Config'")
}
|
label.go
|
// Package label is a priority label based selector.
package label
import (
"context"
"sync"
"github.com/jinbanglin/go-micro/cmd"
"github.com/jinbanglin/go-micro/registry"
"github.com/jinbanglin/go-micro/selector"
)
/*
A priority based label selector. Rather than just returning nodes with specific labels
this selector orders the nodes based on a list of labels. If no labels match all the
nodes are still returned. The priority based label selector is useful for such things
as rudimentary AZ based routing where requests made to other services should remain
in the same AZ.
*/
type labelSelector struct {
so selector.Options
}
func init() {
cmd.DefaultSelectors["label"] = NewSelector
}
func prioritise(nodes []*registry.Node, labels []label) []*registry.Node {
var lnodes []*registry.Node
marked := make(map[string]bool)
for _, label := range labels {
for _, node := range nodes {
// already used
if _, ok := marked[node.Id]; ok {
continue
}
// nil metadata?
if node.Metadata == nil {
continue
}
// matching label?
if val, ok := node.Metadata[label.key]; !ok || label.val != val {
continue
}
// matched! mark it
marked[node.Id] = true
// append to nodes
lnodes = append(lnodes, node)
}
}
// grab the leftovers
for _, node := range nodes {
if _, ok := marked[node.Id]; ok {
continue
}
lnodes = append(lnodes, node)
}
return lnodes
}
func next(nodes []*registry.Node) func() (*registry.Node, error) {
var i int
var mtx sync.Mutex
return func() (*registry.Node, error) {
mtx.Lock()
if i >= len(nodes)
|
node := nodes[i]
i++
mtx.Unlock()
return node, nil
}
}
func (r *labelSelector) Init(opts ...selector.Option) error {
for _, o := range opts {
o(&r.so)
}
return nil
}
func (r *labelSelector) Options() selector.Options {
return r.so
}
func (r *labelSelector) Select(service string, opts ...selector.SelectOption) (selector.Next, error) {
var sopts selector.SelectOptions
for _, opt := range opts {
opt(&sopts)
}
// get the service
services, err := r.so.Registry.GetService(service)
if err != nil {
return nil, err
}
// apply the filters
for _, filter := range sopts.Filters {
services = filter(services)
}
// if there's nothing left, return
if len(services) == 0 {
return nil, selector.ErrNotFound
}
var nodes []*registry.Node
// flatten node list
for _, service := range services {
for _, node := range service.Nodes {
nodes = append(nodes, node)
}
}
// any nodes left?
if len(nodes) == 0 {
return nil, selector.ErrNotFound
}
// now prioritise the list based on labels
// oh god the O(n)^2 cruft or well not really
// more like O(m*n) or something like that
if labels, ok := r.so.Context.Value(labelKey{}).([]label); ok {
nodes = prioritise(nodes, labels)
}
return next(nodes), nil
}
func (r *labelSelector) Mark(service string, node *registry.Node, err error) {
return
}
func (r *labelSelector) Reset(service string) {
return
}
func (r *labelSelector) Close() error {
return nil
}
func (r *labelSelector) String() string {
return "label"
}
func NewSelector(opts ...selector.Option) selector.Selector {
sopts := selector.Options{
Context: context.TODO(),
Registry: registry.DefaultRegistry,
}
for _, opt := range opts {
opt(&sopts)
}
return &labelSelector{sopts}
}
|
{
i = 0
}
|
label.go
|
// Copyright 2021 Google LLC. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
|
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package label
import (
"context"
"fmt"
"strings"
"github.com/apigee/registry/cmd/registry/core"
"github.com/apigee/registry/connection"
"github.com/apigee/registry/gapic"
"github.com/apigee/registry/log"
"github.com/apigee/registry/rpc"
"github.com/apigee/registry/server/registry/names"
"github.com/spf13/cobra"
"google.golang.org/genproto/protobuf/field_mask"
)
func Command(ctx context.Context) *cobra.Command {
var (
filter string
overwrite bool
)
cmd := &cobra.Command{
Use: "label RESOURCE KEY_1=VAL_1 ... KEY_N=VAL_N",
Short: "Label resources in the API Registry",
Args: cobra.MinimumNArgs(2),
Run: func(cmd *cobra.Command, args []string) {
client, err := connection.NewClient(ctx)
if err != nil {
log.FromContext(ctx).WithError(err).Fatal("Failed to get client")
}
taskQueue, wait := core.WorkerPool(ctx, 64)
defer wait()
valuesToClear := make([]string, 0)
valuesToSet := make(map[string]string)
for _, operation := range args[1:] {
if len(operation) > 1 && strings.HasSuffix(operation, "-") {
valuesToClear = append(valuesToClear, strings.TrimSuffix(operation, "-"))
} else {
pair := strings.Split(operation, "=")
if len(pair) != 2 {
log.Fatalf(ctx, "%q must have the form \"key=value\" (value can be empty) or \"key-\" (to remove the key)", operation)
}
if pair[0] == "" {
log.Fatalf(ctx, "%q is invalid because it specifies an empty key", operation)
}
valuesToSet[pair[0]] = pair[1]
}
}
labeling := &core.Labeling{Overwrite: overwrite, Set: valuesToSet, Clear: valuesToClear}
err = matchAndHandleLabelCmd(ctx, client, taskQueue, args[0], filter, labeling)
if err != nil {
log.FromContext(ctx).WithError(err).Fatal("Failed to handle command")
}
},
}
cmd.Flags().StringVar(&filter, "filter", "", "Filter selected resources")
cmd.Flags().BoolVar(&overwrite, "overwrite", false, "Overwrite existing labels")
return cmd
}
func matchAndHandleLabelCmd(
ctx context.Context,
client connection.Client,
taskQueue chan<- core.Task,
name string,
filter string,
labeling *core.Labeling,
) error {
// First try to match collection names.
if api, err := names.ParseApiCollection(name); err == nil {
return labelAPIs(ctx, client, api, filter, labeling, taskQueue)
} else if version, err := names.ParseVersionCollection(name); err == nil {
return labelVersions(ctx, client, version, filter, labeling, taskQueue)
} else if spec, err := names.ParseSpecCollection(name); err == nil {
return labelSpecs(ctx, client, spec, filter, labeling, taskQueue)
} else if deployment, err := names.ParseDeploymentCollection(name); err == nil {
return labelDeployments(ctx, client, deployment, filter, labeling, taskQueue)
}
// Then try to match resource names.
if api, err := names.ParseApi(name); err == nil {
return labelAPIs(ctx, client, api, filter, labeling, taskQueue)
} else if version, err := names.ParseVersion(name); err == nil {
return labelVersions(ctx, client, version, filter, labeling, taskQueue)
} else if spec, err := names.ParseSpec(name); err == nil {
return labelSpecs(ctx, client, spec, filter, labeling, taskQueue)
} else if deployment, err := names.ParseDeployment(name); err == nil {
return labelDeployments(ctx, client, deployment, filter, labeling, taskQueue)
} else {
return fmt.Errorf("unsupported resource name %s", name)
}
}
func labelAPIs(ctx context.Context,
client *gapic.RegistryClient,
api names.Api,
filterFlag string,
labeling *core.Labeling,
taskQueue chan<- core.Task) error {
return core.ListAPIs(ctx, client, api, filterFlag, func(api *rpc.Api) {
taskQueue <- &labelApiTask{
client: client,
api: api,
labeling: labeling,
}
})
}
func labelVersions(
ctx context.Context,
client *gapic.RegistryClient,
version names.Version,
filterFlag string,
labeling *core.Labeling,
taskQueue chan<- core.Task) error {
return core.ListVersions(ctx, client, version, filterFlag, func(version *rpc.ApiVersion) {
taskQueue <- &labelVersionTask{
client: client,
version: version,
labeling: labeling,
}
})
}
func labelSpecs(
ctx context.Context,
client *gapic.RegistryClient,
spec names.Spec,
filterFlag string,
labeling *core.Labeling,
taskQueue chan<- core.Task) error {
return core.ListSpecs(ctx, client, spec, filterFlag, func(spec *rpc.ApiSpec) {
taskQueue <- &labelSpecTask{
client: client,
spec: spec,
labeling: labeling,
}
})
}
func labelDeployments(
ctx context.Context,
client *gapic.RegistryClient,
deployment names.Deployment,
filterFlag string,
labeling *core.Labeling,
taskQueue chan<- core.Task) error {
return core.ListDeployments(ctx, client, deployment, filterFlag, func(deployment *rpc.ApiDeployment) {
taskQueue <- &labelDeploymentTask{
client: client,
deployment: deployment,
labeling: labeling,
}
})
}
type labelApiTask struct {
client connection.Client
api *rpc.Api
labeling *core.Labeling
}
func (task *labelApiTask) String() string {
return "label " + task.api.Name
}
func (task *labelApiTask) Run(ctx context.Context) error {
var err error
task.api.Labels, err = task.labeling.Apply(task.api.Labels)
if err != nil {
log.FromContext(ctx).WithError(err).Errorf("Invalid labelling")
return nil
}
_, err = task.client.UpdateApi(ctx,
&rpc.UpdateApiRequest{
Api: task.api,
UpdateMask: &field_mask.FieldMask{
Paths: []string{"labels"},
},
})
return err
}
type labelVersionTask struct {
client connection.Client
version *rpc.ApiVersion
labeling *core.Labeling
}
func (task *labelVersionTask) String() string {
return "label " + task.version.Name
}
func (task *labelVersionTask) Run(ctx context.Context) error {
var err error
task.version.Labels, err = task.labeling.Apply(task.version.Labels)
if err != nil {
log.FromContext(ctx).WithError(err).Errorf("Invalid labelling")
return nil
}
_, err = task.client.UpdateApiVersion(ctx,
&rpc.UpdateApiVersionRequest{
ApiVersion: task.version,
UpdateMask: &field_mask.FieldMask{
Paths: []string{"labels"},
},
})
return err
}
type labelSpecTask struct {
client connection.Client
spec *rpc.ApiSpec
labeling *core.Labeling
}
func (task *labelSpecTask) String() string {
return "label " + task.spec.Name
}
func (task *labelSpecTask) Run(ctx context.Context) error {
var err error
task.spec.Labels, err = task.labeling.Apply(task.spec.Labels)
if err != nil {
log.FromContext(ctx).WithError(err).Errorf("Invalid labelling")
return nil
}
_, err = task.client.UpdateApiSpec(ctx,
&rpc.UpdateApiSpecRequest{
ApiSpec: task.spec,
UpdateMask: &field_mask.FieldMask{
Paths: []string{"labels"},
},
})
return err
}
type labelDeploymentTask struct {
client connection.Client
deployment *rpc.ApiDeployment
labeling *core.Labeling
}
func (task *labelDeploymentTask) String() string {
return "label " + task.deployment.Name
}
func (task *labelDeploymentTask) Run(ctx context.Context) error {
var err error
task.deployment.Labels, err = task.labeling.Apply(task.deployment.Labels)
if err != nil {
log.FromContext(ctx).WithError(err).Errorf("Invalid labelling")
return nil
}
_, err = task.client.UpdateApiDeployment(ctx,
&rpc.UpdateApiDeploymentRequest{
ApiDeployment: task.deployment,
UpdateMask: &field_mask.FieldMask{
Paths: []string{"labels"},
},
})
return err
}
|
//
|
message.pb.go
|
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: message.proto
package messageswap_pb
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "github.com/gogo/protobuf/gogoproto"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
type Message_MessageType int32
const (
Message_PUT_VALUE Message_MessageType = 0
Message_GET_VALUE Message_MessageType = 1
Message_ADD_PROVIDER Message_MessageType = 2
Message_GET_PROVIDERS Message_MessageType = 3
Message_FIND_NODE Message_MessageType = 4
Message_PING Message_MessageType = 5
Message_GET_PREFIX Message_MessageType = 6
Message_APPEND_VALUE Message_MessageType = 7
Message_META_SYNC Message_MessageType = 8
Message_User_Init_Req Message_MessageType = 9
Message_User_Init_Res Message_MessageType = 10
Message_User_NewKP_Req Message_MessageType = 11
Message_New_User_Notif Message_MessageType = 12
Message_Block_Meta Message_MessageType = 13
Message_Delete_Block Message_MessageType = 14
Message_Challenge Message_MessageType = 15
Message_Proof Message_MessageType = 16
Message_Proof_Sync Message_MessageType = 17
Message_Repair Message_MessageType = 18
Message_Query_Info Message_MessageType = 19
Message_Repair_Res Message_MessageType = 20
Message_Storage_Sync Message_MessageType = 21
Message_MetaInfo Message_MessageType = 22
Message_MetaBroadcast Message_MessageType = 23
)
var Message_MessageType_name = map[int32]string{
0: "PUT_VALUE",
1: "GET_VALUE",
2: "ADD_PROVIDER",
3: "GET_PROVIDERS",
4: "FIND_NODE",
5: "PING",
6: "GET_PREFIX",
7: "APPEND_VALUE",
8: "META_SYNC",
9: "User_Init_Req",
10: "User_Init_Res",
11: "User_NewKP_Req",
12: "New_User_Notif",
13: "Block_Meta",
14: "Delete_Block",
15: "Challenge",
16: "Proof",
17: "Proof_Sync",
18: "Repair",
19: "Query_Info",
20: "Repair_Res",
21: "Storage_Sync",
22: "MetaInfo",
23: "MetaBroadcast",
}
var Message_MessageType_value = map[string]int32{
"PUT_VALUE": 0,
"GET_VALUE": 1,
"ADD_PROVIDER": 2,
"GET_PROVIDERS": 3,
"FIND_NODE": 4,
"PING": 5,
"GET_PREFIX": 6,
"APPEND_VALUE": 7,
"META_SYNC": 8,
"User_Init_Req": 9,
"User_Init_Res": 10,
"User_NewKP_Req": 11,
"New_User_Notif": 12,
"Block_Meta": 13,
"Delete_Block": 14,
"Challenge": 15,
"Proof": 16,
"Proof_Sync": 17,
"Repair": 18,
"Query_Info": 19,
"Repair_Res": 20,
"Storage_Sync": 21,
"MetaInfo": 22,
"MetaBroadcast": 23,
}
func (x Message_MessageType) String() string {
return proto.EnumName(Message_MessageType_name, int32(x))
}
func (Message_MessageType) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_message_59eb3dadee47de0f, []int{0, 0}
}
// `protoc -I=. -I=$GOPATH/src -I=$GOPATH/src/github.com/gogo/protobuf/protobuf
// --gogo_out=. *.proto`
type Message struct {
Type Message_MessageType `protobuf:"varint,1,opt,name=type,proto3,enum=messageswap.pb.Message_MessageType" json:"type,omitempty"`
Key Message_Key `protobuf:"bytes,2,opt,name=key" json:"key"`
Value [][]byte `protobuf:"bytes,3,rep,name=value" json:"value,omitempty"`
Timestamp string `protobuf:"bytes,4,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Message) Reset() { *m = Message{} }
func (m *Message) String() string { return proto.CompactTextString(m) }
func (*Message) ProtoMessage() {}
func (*Message) Descriptor() ([]byte, []int) {
return fileDescriptor_message_59eb3dadee47de0f, []int{0}
}
func (m *Message) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Message.Unmarshal(m, b)
}
func (m *Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Message.Marshal(b, m, deterministic)
}
func (dst *Message) XXX_Merge(src proto.Message) {
xxx_messageInfo_Message.Merge(dst, src)
}
func (m *Message) XXX_Size() int {
return xxx_messageInfo_Message.Size(m)
}
func (m *Message) XXX_DiscardUnknown() {
xxx_messageInfo_Message.DiscardUnknown(m)
}
var xxx_messageInfo_Message proto.InternalMessageInfo
func (m *Message) GetType() Message_MessageType {
if m != nil {
return m.Type
}
return Message_PUT_VALUE
}
func (m *Message) GetKey() Message_Key {
if m != nil {
return m.Key
}
return Message_Key{}
}
func (m *Message) GetValue() [][]byte {
if m != nil {
return m.Value
}
return nil
}
func (m *Message) GetTimestamp() string {
if m != nil {
return m.Timestamp
}
return ""
}
type Message_Key struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
Arguments [][]byte `protobuf:"bytes,2,rep,name=arguments" json:"arguments,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Message_Key) Reset() { *m = Message_Key{} }
func (m *Message_Key) String() string { return proto.CompactTextString(m) }
func (*Message_Key) ProtoMessage() {}
func (*Message_Key) Descriptor() ([]byte, []int) {
return fileDescriptor_message_59eb3dadee47de0f, []int{0, 0}
}
func (m *Message_Key) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Message_Key.Unmarshal(m, b)
}
func (m *Message_Key) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Message_Key.Marshal(b, m, deterministic)
}
func (dst *Message_Key) XXX_Merge(src proto.Message) {
xxx_messageInfo_Message_Key.Merge(dst, src)
}
func (m *Message_Key) XXX_Size() int {
return xxx_messageInfo_Message_Key.Size(m)
}
func (m *Message_Key) XXX_DiscardUnknown() {
xxx_messageInfo_Message_Key.DiscardUnknown(m)
}
var xxx_messageInfo_Message_Key proto.InternalMessageInfo
func (m *Message_Key) GetID() string {
if m != nil {
return m.ID
}
return ""
}
func (m *Message_Key) GetArguments() [][]byte {
if m != nil {
return m.Arguments
}
return nil
}
func init()
|
func init() { proto.RegisterFile("message.proto", fileDescriptor_message_59eb3dadee47de0f) }
var fileDescriptor_message_59eb3dadee47de0f = []byte{
// 485 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xc1, 0x6e, 0xda, 0x40,
0x14, 0x45, 0x03, 0x36, 0x04, 0x3f, 0xc0, 0x9d, 0x4c, 0xd3, 0x16, 0xa5, 0x95, 0x8a, 0xd2, 0x0d,
0x9b, 0x12, 0x29, 0x2c, 0xba, 0x86, 0xd8, 0x89, 0x2c, 0x8a, 0xe3, 0x1a, 0x88, 0xda, 0xd5, 0x68,
0xa0, 0x0f, 0x07, 0x05, 0x3c, 0xae, 0x3d, 0x2e, 0xf2, 0x77, 0xf4, 0xa7, 0xfa, 0x15, 0xfd, 0x8c,
0xae, 0xab, 0x19, 0x83, 0xd2, 0x56, 0xca, 0xca, 0x73, 0xef, 0xdc, 0x73, 0xdf, 0xb3, 0x34, 0xd0,
0xde, 0x62, 0x96, 0xf1, 0x08, 0xfb, 0x49, 0x2a, 0xa4, 0xa0, 0xf6, 0x5e, 0x66, 0x3b, 0x9e, 0xf4,
0x93, 0xc5, 0xd9, 0xfb, 0x68, 0x2d, 0xef, 0xf3, 0x45, 0x7f, 0x29, 0xb6, 0x17, 0x91, 0x88, 0xc4,
0x85, 0x8e, 0x2d, 0xf2, 0x95, 0x56, 0x5a, 0xe8, 0x53, 0x89, 0x9f, 0xff, 0x36, 0xe1, 0x78, 0x52,
0x36, 0xd0, 0x0f, 0x60, 0xca, 0x22, 0xc1, 0x4e, 0xa5, 0x5b, 0xe9, 0xd9, 0x97, 0xef, 0xfa, 0xff,
0x36, 0xf7, 0xf7, 0xb1, 0xc3, 0x77, 0x56, 0x24, 0x18, 0x6a, 0x80, 0x0e, 0xc0, 0x78, 0xc0, 0xa2,
0x53, 0xed, 0x56, 0x7a, 0xcd, 0xcb, 0xd7, 0x4f, 0x71, 0x63, 0x2c, 0x46, 0xe6, 0xcf, 0x5f, 0x6f,
0x8f, 0x42, 0x95, 0xa6, 0xa7, 0x50, 0xfb, 0xce, 0x37, 0x39, 0x76, 0x8c, 0xae, 0xd1, 0x6b, 0x85,
0xa5, 0xa0, 0x6f, 0xc0, 0x92, 0xeb, 0x2d, 0x66, 0x92, 0x6f, 0x93, 0x8e, 0xd9, 0xad, 0xf4, 0xac,
0xf0, 0xd1, 0x38, 0x1b, 0x80, 0x31, 0xc6, 0x82, 0xda, 0x50, 0xf5, 0x1c, 0xbd, 0xa6, 0x15, 0x56,
0x3d, 0x47, 0x41, 0x3c, 0x8d, 0xf2, 0x2d, 0xc6, 0x32, 0xeb, 0x54, 0x75, 0xdd, 0xa3, 0x71, 0xfe,
0xc3, 0x80, 0xe6, 0x5f, 0x3b, 0xd3, 0x36, 0x58, 0xc1, 0x7c, 0xc6, 0xee, 0x86, 0x1f, 0xe7, 0x2e,
0x39, 0x52, 0xf2, 0xc6, 0x3d, 0xc8, 0x0a, 0x25, 0xd0, 0x1a, 0x3a, 0x0e, 0x0b, 0xc2, 0xdb, 0x3b,
0xcf, 0x71, 0x43, 0x52, 0xa5, 0x27, 0xd0, 0x56, 0x81, 0x83, 0x33, 0x25, 0x86, 0x62, 0xae, 0x3d,
0xdf, 0x61, 0xfe, 0xad, 0xe3, 0x12, 0x93, 0x36, 0xc0, 0x0c, 0x3c, 0xff, 0x86, 0xd4, 0xa8, 0x0d,
0x50, 0x66, 0xdd, 0x6b, 0xef, 0x33, 0xa9, 0xeb, 0xb6, 0x20, 0x70, 0x7d, 0x67, 0xdf, 0x7f, 0xac,
0xd0, 0x89, 0x3b, 0x1b, 0xb2, 0xe9, 0x17, 0xff, 0x8a, 0x34, 0x54, 0xf9, 0x3c, 0xc3, 0x94, 0x79,
0xf1, 0x5a, 0xb2, 0x10, 0xbf, 0x11, 0xeb, 0x7f, 0x2b, 0x23, 0x40, 0x29, 0xd8, 0xda, 0xf2, 0x71,
0x37, 0x0e, 0x74, 0xac, 0xa9, 0x3c, 0x1f, 0x77, 0xac, 0xf4, 0x85, 0x5c, 0xaf, 0x48, 0x4b, 0x8d,
0x1f, 0x6d, 0xc4, 0xf2, 0x81, 0x4d, 0x50, 0x72, 0xd2, 0x56, 0xe3, 0x1d, 0xdc, 0xa0, 0x44, 0xa6,
0x6d, 0x62, 0xab, 0xf1, 0x57, 0xf7, 0x7c, 0xb3, 0xc1, 0x38, 0x42, 0xf2, 0x8c, 0x5a, 0x50, 0x0b,
0x52, 0x21, 0x56, 0x84, 0x28, 0x56, 0x1f, 0xd9, 0xb4, 0x88, 0x97, 0xe4, 0x84, 0x02, 0xd4, 0x43,
0x4c, 0xf8, 0x3a, 0x25, 0x54, 0xdd, 0x7d, 0xca, 0x31, 0x2d, 0x98, 0x17, 0xaf, 0x04, 0x79, 0xae,
0x74, 0x79, 0xa7, 0xf7, 0x3b, 0x55, 0x73, 0xa6, 0x52, 0xa4, 0x3c, 0xc2, 0x92, 0x7e, 0x41, 0x5b,
0xd0, 0x50, 0x3b, 0xe8, 0xfc, 0x4b, 0xf5, 0x4b, 0x4a, 0x8d, 0x52, 0xc1, 0xbf, 0x2e, 0x79, 0x26,
0xc9, 0xab, 0x45, 0x5d, 0xbf, 0xbf, 0xc1, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc8, 0x5e, 0xbf,
0x74, 0xcf, 0x02, 0x00, 0x00,
}
|
{
proto.RegisterType((*Message)(nil), "messageswap.pb.Message")
proto.RegisterType((*Message_Key)(nil), "messageswap.pb.Message.Key")
proto.RegisterEnum("messageswap.pb.Message_MessageType", Message_MessageType_name, Message_MessageType_value)
}
|
mime_type.pb.go
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.22.0
// protoc v3.12.3
// source: google/ads/googleads/v1/enums/mime_type.proto
package enums
import (
reflect "reflect"
sync "sync"
proto "github.com/golang/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
// The mime type
type MimeTypeEnum_MimeType int32
const (
// The mime type has not been specified.
MimeTypeEnum_UNSPECIFIED MimeTypeEnum_MimeType = 0
// The received value is not known in this version.
//
// This is a response-only value.
MimeTypeEnum_UNKNOWN MimeTypeEnum_MimeType = 1
// MIME type of image/jpeg.
MimeTypeEnum_IMAGE_JPEG MimeTypeEnum_MimeType = 2
// MIME type of image/gif.
MimeTypeEnum_IMAGE_GIF MimeTypeEnum_MimeType = 3
// MIME type of image/png.
MimeTypeEnum_IMAGE_PNG MimeTypeEnum_MimeType = 4
// MIME type of application/x-shockwave-flash.
MimeTypeEnum_FLASH MimeTypeEnum_MimeType = 5
// MIME type of text/html.
MimeTypeEnum_TEXT_HTML MimeTypeEnum_MimeType = 6
// MIME type of application/pdf.
MimeTypeEnum_PDF MimeTypeEnum_MimeType = 7
// MIME type of application/msword.
MimeTypeEnum_MSWORD MimeTypeEnum_MimeType = 8
// MIME type of application/vnd.ms-excel.
MimeTypeEnum_MSEXCEL MimeTypeEnum_MimeType = 9
// MIME type of application/rtf.
MimeTypeEnum_RTF MimeTypeEnum_MimeType = 10
// MIME type of audio/wav.
MimeTypeEnum_AUDIO_WAV MimeTypeEnum_MimeType = 11
// MIME type of audio/mp3.
MimeTypeEnum_AUDIO_MP3 MimeTypeEnum_MimeType = 12
// MIME type of application/x-html5-ad-zip.
MimeTypeEnum_HTML5_AD_ZIP MimeTypeEnum_MimeType = 13
)
// Enum value maps for MimeTypeEnum_MimeType.
var (
MimeTypeEnum_MimeType_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "IMAGE_JPEG",
3: "IMAGE_GIF",
4: "IMAGE_PNG",
5: "FLASH",
6: "TEXT_HTML",
7: "PDF",
8: "MSWORD",
9: "MSEXCEL",
10: "RTF",
11: "AUDIO_WAV",
12: "AUDIO_MP3",
13: "HTML5_AD_ZIP",
}
MimeTypeEnum_MimeType_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"IMAGE_JPEG": 2,
"IMAGE_GIF": 3,
"IMAGE_PNG": 4,
"FLASH": 5,
"TEXT_HTML": 6,
"PDF": 7,
"MSWORD": 8,
"MSEXCEL": 9,
"RTF": 10,
"AUDIO_WAV": 11,
"AUDIO_MP3": 12,
"HTML5_AD_ZIP": 13,
}
)
func (x MimeTypeEnum_MimeType) Enum() *MimeTypeEnum_MimeType {
p := new(MimeTypeEnum_MimeType)
*p = x
return p
}
func (x MimeTypeEnum_MimeType) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (MimeTypeEnum_MimeType) Descriptor() protoreflect.EnumDescriptor {
return file_google_ads_googleads_v1_enums_mime_type_proto_enumTypes[0].Descriptor()
}
func (MimeTypeEnum_MimeType) Type() protoreflect.EnumType {
return &file_google_ads_googleads_v1_enums_mime_type_proto_enumTypes[0]
}
func (x MimeTypeEnum_MimeType) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use MimeTypeEnum_MimeType.Descriptor instead.
func (MimeTypeEnum_MimeType) EnumDescriptor() ([]byte, []int) {
return file_google_ads_googleads_v1_enums_mime_type_proto_rawDescGZIP(), []int{0, 0}
}
// Container for enum describing the mime types.
type MimeTypeEnum struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *MimeTypeEnum) Reset() {
*x = MimeTypeEnum{}
if protoimpl.UnsafeEnabled {
mi := &file_google_ads_googleads_v1_enums_mime_type_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MimeTypeEnum) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MimeTypeEnum) ProtoMessage() {}
func (x *MimeTypeEnum) ProtoReflect() protoreflect.Message {
mi := &file_google_ads_googleads_v1_enums_mime_type_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MimeTypeEnum.ProtoReflect.Descriptor instead.
func (*MimeTypeEnum) Descriptor() ([]byte, []int) {
return file_google_ads_googleads_v1_enums_mime_type_proto_rawDescGZIP(), []int{0}
}
var File_google_ads_googleads_v1_enums_mime_type_proto protoreflect.FileDescriptor
var file_google_ads_googleads_v1_enums_mime_type_proto_rawDesc = []byte{
0x0a, 0x2d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x31, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x2f,
0x6d, 0x69, 0x6d, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12,
0x1d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0x1a, 0x1c,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xdc, 0x01, 0x0a,
0x0c, 0x4d, 0x69, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x45, 0x6e, 0x75, 0x6d, 0x22, 0xcb, 0x01,
0x0a, 0x08, 0x4d, 0x69, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e,
0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x55,
0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x49, 0x4d, 0x41, 0x47,
0x45, 0x5f, 0x4a, 0x50, 0x45, 0x47, 0x10, 0x02, 0x12, 0x0d, 0x0a, 0x09, 0x49, 0x4d, 0x41, 0x47,
0x45, 0x5f, 0x47, 0x49, 0x46, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x49, 0x4d, 0x41, 0x47, 0x45,
0x5f, 0x50, 0x4e, 0x47, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, 0x46, 0x4c, 0x41, 0x53, 0x48, 0x10,
0x05, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x45, 0x58, 0x54, 0x5f, 0x48, 0x54, 0x4d, 0x4c, 0x10, 0x06,
0x12, 0x07, 0x0a, 0x03, 0x50, 0x44, 0x46, 0x10, 0x07, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x53, 0x57,
0x4f, 0x52, 0x44, 0x10, 0x08, 0x12, 0x0b, 0x0a, 0x07, 0x4d, 0x53, 0x45, 0x58, 0x43, 0x45, 0x4c,
0x10, 0x09, 0x12, 0x07, 0x0a, 0x03, 0x52, 0x54, 0x46, 0x10, 0x0a, 0x12, 0x0d, 0x0a, 0x09, 0x41,
0x55, 0x44, 0x49, 0x4f, 0x5f, 0x57, 0x41, 0x56, 0x10, 0x0b, 0x12, 0x0d, 0x0a, 0x09, 0x41, 0x55,
0x44, 0x49, 0x4f, 0x5f, 0x4d, 0x50, 0x33, 0x10, 0x0c, 0x12, 0x10, 0x0a, 0x0c, 0x48, 0x54, 0x4d,
0x4c, 0x35, 0x5f, 0x41, 0x44, 0x5f, 0x5a, 0x49, 0x50, 0x10, 0x0d, 0x42, 0xe2, 0x01, 0x0a, 0x21,
0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x64, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x65, 0x6e, 0x75, 0x6d,
0x73, 0x42, 0x0d, 0x4d, 0x69, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f,
0x50, 0x01, 0x5a, 0x42, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e,
0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x65, 0x6e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2f, 0x61, 0x64, 0x73, 0x2f, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x61, 0x64, 0x73, 0x2f, 0x76, 0x31, 0x2f, 0x65, 0x6e, 0x75, 0x6d, 0x73,
0x3b, 0x65, 0x6e, 0x75, 0x6d, 0x73, 0xa2, 0x02, 0x03, 0x47, 0x41, 0x41, 0xaa, 0x02, 0x1d, 0x47,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x41, 0x64, 0x73, 0x2e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x41, 0x64, 0x73, 0x2e, 0x56, 0x31, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x73, 0xca, 0x02, 0x1d, 0x47,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x5c, 0x41, 0x64, 0x73, 0x5c, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x41, 0x64, 0x73, 0x5c, 0x56, 0x31, 0x5c, 0x45, 0x6e, 0x75, 0x6d, 0x73, 0xea, 0x02, 0x21, 0x47,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x47, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x41, 0x64, 0x73, 0x3a, 0x3a, 0x56, 0x31, 0x3a, 0x3a, 0x45, 0x6e, 0x75, 0x6d, 0x73,
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_google_ads_googleads_v1_enums_mime_type_proto_rawDescOnce sync.Once
file_google_ads_googleads_v1_enums_mime_type_proto_rawDescData = file_google_ads_googleads_v1_enums_mime_type_proto_rawDesc
)
func file_google_ads_googleads_v1_enums_mime_type_proto_rawDescGZIP() []byte {
file_google_ads_googleads_v1_enums_mime_type_proto_rawDescOnce.Do(func() {
file_google_ads_googleads_v1_enums_mime_type_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_ads_googleads_v1_enums_mime_type_proto_rawDescData)
})
return file_google_ads_googleads_v1_enums_mime_type_proto_rawDescData
}
var file_google_ads_googleads_v1_enums_mime_type_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_google_ads_googleads_v1_enums_mime_type_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_google_ads_googleads_v1_enums_mime_type_proto_goTypes = []interface{}{
(MimeTypeEnum_MimeType)(0), // 0: google.ads.googleads.v1.enums.MimeTypeEnum.MimeType
(*MimeTypeEnum)(nil), // 1: google.ads.googleads.v1.enums.MimeTypeEnum
}
var file_google_ads_googleads_v1_enums_mime_type_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for method output_type
0, // [0:0] is the sub-list for method input_type
0, // [0:0] is the sub-list for extension type_name
0, // [0:0] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_google_ads_googleads_v1_enums_mime_type_proto_init() }
func file_google_ads_googleads_v1_enums_mime_type_proto_init() {
if File_google_ads_googleads_v1_enums_mime_type_proto != nil {
return
}
if !protoimpl.UnsafeEnabled
|
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_google_ads_googleads_v1_enums_mime_type_proto_rawDesc,
NumEnums: 1,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_google_ads_googleads_v1_enums_mime_type_proto_goTypes,
DependencyIndexes: file_google_ads_googleads_v1_enums_mime_type_proto_depIdxs,
EnumInfos: file_google_ads_googleads_v1_enums_mime_type_proto_enumTypes,
MessageInfos: file_google_ads_googleads_v1_enums_mime_type_proto_msgTypes,
}.Build()
File_google_ads_googleads_v1_enums_mime_type_proto = out.File
file_google_ads_googleads_v1_enums_mime_type_proto_rawDesc = nil
file_google_ads_googleads_v1_enums_mime_type_proto_goTypes = nil
file_google_ads_googleads_v1_enums_mime_type_proto_depIdxs = nil
}
|
{
file_google_ads_googleads_v1_enums_mime_type_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MimeTypeEnum); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
|
gen.go
|
/*
* Copyright 2021, Offchain Labs, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package arboscontracts
import (
"path/filepath"
"github.com/offchainlabs/arbitrum/packages/arb-util/binding"
)
//go:generate go run createBindings.go
func
|
() error {
base, err := binding.ArbOSArtifactsFolder()
if err != nil {
return err
}
periph, err := binding.PeripheralsArtifactsFolder()
if err != nil {
return err
}
contracts := binding.GenerateContractsList(
base,
[]string{
"ArbAddressTable",
"ArbAggregator",
"ArbBLS",
"ArbFunctionTable",
"ArbGasInfo",
"ArbInfo",
"ArbOwner",
"ArbosTest",
"ArbRetryableTx",
"ArbStatistics",
"ArbSys",
},
)
contracts = append(contracts, binding.GenerateContractsList(
filepath.Join(periph, "contracts", "rpc-utils"),
[]string{"NodeInterface", "RetryableTicketCreator"},
)...)
for _, con := range contracts {
err := binding.GenerateBinding(con.File, con.Contract, "arboscontracts")
if err != nil {
return err
}
}
return nil
}
|
RunBindingGen
|
is-in.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isIn = void 0;
const base_string_validation_rule_1 = require("./base-string-validation-rule");
const n_defensive_1 = require("@nivinjoseph/n-defensive");
function isIn(values, ignoreCase) {
return new StringIsIn(values, !!ignoreCase);
}
exports.isIn = isIn;
class
|
extends base_string_validation_rule_1.BaseStringValidationRule {
constructor(values, ignoreCase) {
n_defensive_1.given(values, "values").ensureHasValue();
super();
this.addValidationRule({
validate: t => t == null || (ignoreCase
? values.some(v => v.trim().toLowerCase() === t.trim().toLowerCase())
: values.some(v => v.trim() === t.trim())),
error: "Invalid value"
});
}
}
//# sourceMappingURL=is-in.js.map
|
StringIsIn
|
falconx_summary_report_v1_response.rs
|
/*
* CrowdStrike API Specification
*
* Use this API specification as a reference for the API endpoints you can use to interact with your Falcon environment. These endpoints support authentication via OAuth2 and interact with detections and network containment. For detailed usage guides and more information about API endpoints that don't yet support OAuth2, see our [documentation inside the Falcon console](https://falcon.crowdstrike.com/support/documentation). To use the APIs described below, combine the base URL with the path shown for each API endpoint. For commercial cloud customers, your base URL is `https://api.crowdstrike.com`. Each API endpoint requires authorization via an OAuth2 token. Your first API request should retrieve an OAuth2 token using the `oauth2/token` endpoint, such as `https://api.crowdstrike.com/oauth2/token`. For subsequent requests, include the OAuth2 token in an HTTP authorization header. Tokens expire after 30 minutes, after which you should make a new token request to continue making API requests.
*
* The version of the OpenAPI document: 2021-10-05T19:33:53Z
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)]
pub struct FalconxSummaryReportV1Response {
#[serde(rename = "errors")]
pub errors: Vec<crate::models::MsaApiError>,
#[serde(rename = "meta")]
pub meta: Box<crate::models::FalconxMetaInfo>,
#[serde(rename = "resources")]
pub resources: Vec<crate::models::FalconxSummaryReportV1>,
}
impl FalconxSummaryReportV1Response {
pub fn new(errors: Vec<crate::models::MsaApiError>, meta: crate::models::FalconxMetaInfo, resources: Vec<crate::models::FalconxSummaryReportV1>) -> FalconxSummaryReportV1Response
|
}
|
{
FalconxSummaryReportV1Response {
errors,
meta: Box::new(meta),
resources,
}
}
|
authority.rs
|
use super::{
character_classes::{
REG_NAME_NOT_PCT_ENCODED,
USER_INFO_NOT_PCT_ENCODED,
},
codec::{
decode_element,
encode_element,
},
context::Context,
error::Error,
parse_host_port::parse_host_port,
validate_ipv6_address::validate_ipv6_address,
};
/// This is the optional part of a URI which governs the URI's namespace. It
/// typically contains a host name or IP address, and may also include a port
/// number and/or userinfo component.
///
/// # Examples
///
/// ## Parsing an Authority into its components
///
/// ```rust
/// # extern crate rhymuri;
/// use rhymuri::Authority;
///
/// # fn main() -> Result<(), rhymuri::Error> {
/// let authority = Authority::parse("[email protected]:8080")?;
/// assert_eq!(Some("nobody".as_bytes()), authority.userinfo());
/// assert_eq!("www.example.com".as_bytes(), authority.host());
/// assert_eq!(Some(8080), authority.port());
/// # Ok(())
/// # }
/// ```
///
/// ## Generating a URI from its components
///
/// ```rust
/// # extern crate rhymuri;
/// use rhymuri::Authority;
///
/// # fn main() -> Result<(), rhymuri::Error> {
/// let mut authority = Authority::default();
/// authority.set_userinfo(Some("nobody").map(Into::into));
/// authority.set_host("www.example.com");
/// authority.set_port(Some(8080));
/// assert_eq!("[email protected]:8080", authority.to_string());
/// # Ok(())
/// # }
/// ```
#[derive(Clone, Debug, Default, PartialEq)]
pub struct Authority {
userinfo: Option<Vec<u8>>,
host: Vec<u8>,
port: Option<u16>,
}
impl Authority {
/// Borrow the host name part of the Authority.
#[must_use = "why u no use host return value?"]
pub fn host(&self) -> &[u8]
|
/// Borrow the port number part of the Authority.
#[must_use = "why did you get the port number and then throw it away?"]
pub fn port(&self) -> Option<u16> {
self.port
}
/// Change the userinfo part of the Authority.
pub fn set_userinfo<T>(
&mut self,
userinfo: T,
) where
T: Into<Option<Vec<u8>>>,
{
self.userinfo = userinfo.into();
}
/// Change the host name part of the Authority.
pub fn set_host<T>(
&mut self,
host: T,
) where
T: Into<Vec<u8>>,
{
self.host = host.into();
}
/// Change the port number part of the Authority.
pub fn set_port(
&mut self,
port: Option<u16>,
) {
self.port = port;
}
/// Borrow the userinfo part of the Authority.
#[must_use = "security breach... security breach... userinfo not used"]
pub fn userinfo(&self) -> Option<&[u8]> {
self.userinfo.as_deref()
}
/// Interpret the given string as the Authority component of a URI,
/// separating its various subcomponents, returning an `Authority` value
/// containing them.
///
/// # Errors
///
/// There are many ways to screw up the Authority part of URI string, and
/// this function will let you know what's up by returning a variant of the
/// [`Error`](enum.Error.html) type.
#[must_use = "you parsed it; don't you want the results?"]
pub fn parse<T>(authority_string: T) -> Result<Self, Error>
where
T: AsRef<str>,
{
let (userinfo, host_port_string) =
Self::parse_userinfo(authority_string.as_ref())?;
let (host, port) = parse_host_port(host_port_string)?;
Ok(Self {
userinfo,
host,
port,
})
}
fn parse_userinfo(
authority: &str
) -> Result<(Option<Vec<u8>>, &str), Error> {
Ok(match authority.find('@') {
Some(delimiter) => (
Some(decode_element(
&authority[0..delimiter],
&USER_INFO_NOT_PCT_ENCODED,
Context::Userinfo,
)?),
&authority[delimiter + 1..],
),
None => (None, authority),
})
}
}
impl std::fmt::Display for Authority {
fn fmt(
&self,
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
if let Some(userinfo) = &self.userinfo {
write!(
f,
"{}@",
encode_element(&userinfo, &USER_INFO_NOT_PCT_ENCODED)
)?;
}
let host_to_string = String::from_utf8(self.host.clone());
match host_to_string {
Ok(host_to_string)
if validate_ipv6_address(&host_to_string).is_ok() =>
{
write!(f, "[{}]", host_to_string.to_ascii_lowercase())?;
}
_ => {
write!(
f,
"{}",
encode_element(&self.host, ®_NAME_NOT_PCT_ENCODED)
)?;
},
}
if let Some(port) = self.port {
write!(f, ":{}", port)?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
// NOTE: These lints are disabled because they're triggered inside the
// `named_tuple!` macro expansion.
#[allow(clippy::ref_option_ref)]
#[allow(clippy::from_over_into)]
fn userinfo() {
named_tuple!(
struct TestVector {
authority_string: &'static str,
userinfo: Option<&'static str>,
}
);
let test_vectors: &[TestVector] = &[
("www.example.com", None).into(),
("[email protected]", Some("joe")).into(),
("pepe:[email protected]", Some("pepe:feelsbadman"))
.into(),
];
for test_vector in test_vectors {
let authority = Authority::parse(test_vector.authority_string());
assert!(authority.is_ok());
let authority = authority.unwrap();
assert_eq!(
test_vector.userinfo().map(str::as_bytes),
authority.userinfo.as_ref().map(|v| &v[..])
);
}
}
#[test]
fn userinfo_illegal_characters() {
let test_vectors = ["%[email protected]", "{@www.example.com"];
for test_vector in &test_vectors {
let authority = Authority::parse(test_vector);
assert!(authority.is_err());
}
}
#[test]
// NOTE: This lint is disabled because it's triggered inside the
// `named_tuple!` macro expansion.
#[allow(clippy::from_over_into)]
fn userinfo_barely_legal() {
named_tuple!(
struct TestVector {
uri_string: &'static str,
userinfo: &'static str,
}
);
let test_vectors: &[TestVector] = &[
("%[email protected]", "A").into(),
("@www.example.com", "").into(),
("[email protected]", "!").into(),
("'@www.example.com", "'").into(),
("(@www.example.com", "(").into(),
(";@www.example.com", ";").into(),
(":@www.example.com", ":").into(),
];
for test_vector in test_vectors {
let authority = Authority::parse(test_vector.uri_string());
assert!(authority.is_ok());
let authority = authority.unwrap();
assert_eq!(
Some(test_vector.userinfo().as_bytes()),
authority.userinfo.as_ref().map(|v| &v[..])
);
}
}
#[test]
fn host_illegal_characters() {
let test_vectors = ["%[email protected]", "@www:example.com", "[vX.:]"];
for test_vector in &test_vectors {
let authority = Authority::parse(test_vector);
assert!(authority.is_err());
}
}
#[test]
// NOTE: This lint is disabled because it's triggered inside the
// `named_tuple!` macro expansion.
#[allow(clippy::from_over_into)]
fn host_barely_legal() {
named_tuple!(
struct TestVector {
authority_string: &'static str,
host: &'static str,
}
);
let test_vectors: &[TestVector] = &[
("%41", "a").into(),
("", "").into(),
("!", "!").into(),
("'", "'").into(),
("(", "(").into(),
(";", ";").into(),
("1.2.3.4", "1.2.3.4").into(),
("[v7.:]", "v7.:").into(),
("[v7.aB]", "v7.aB").into(),
];
for test_vector in test_vectors {
let authority = Authority::parse(test_vector.authority_string());
assert!(authority.is_ok());
let authority = authority.unwrap();
assert_eq!(test_vector.host().as_bytes(), authority.host());
}
}
#[test]
fn host_ends_in_dot() {
let authority = Authority::parse("example.com.");
assert!(authority.is_ok());
let authority = authority.unwrap();
assert_eq!(b"example.com.", authority.host());
}
#[test]
fn host_mixed_case() {
let test_vectors = [
"www.example.com",
"www.EXAMPLE.com",
"www.exAMple.com",
"www.example.cOM",
"wWw.exampLe.Com",
];
let normalized_host = "www.example.com";
for test_vector in &test_vectors {
let authority = Authority::parse(*test_vector);
assert!(authority.is_ok());
let authority = authority.unwrap();
assert_eq!(normalized_host.as_bytes(), authority.host());
}
}
}
|
{
&self.host
}
|
Command.go
|
// Copyright (C) 2018, Michael P. Gerlek (Flaxen Consulting)
//
// Portions of this code were derived from the PROJ.4 software
// In keeping with the terms of the PROJ.4 project, this software
// is provided under the MIT-style license in `LICENSE.md` and may
// additionally be subject to the copyrights of the PROJ.4 authors.
package gie
import (
"fmt"
"math"
"strconv"
"strings"
"github.com/go-spatial/proj/core"
"github.com/go-spatial/proj/mlog"
"github.com/go-spatial/proj/support"
)
// the 4 input (or output) values, although
// we only support 2D (a,b) right now
type coord struct {
a, b, c, d float64 // lam,phi or x,y
}
// represents a single invocation of the operation
type testcase struct {
inv bool
accept coord
expect coord
}
// Command holds a set of testcases
//
// As the gie file is read, the current Command object
// gets modified, testcases get added, etc.
type Command struct {
ProjString string
tolerance float64
testcases []testcase
invFlag bool
completeFailure bool
File string
Line int
roundtripCount int
roundtripDelta float64
}
// NewCommand returns a new Command
func NewCommand(file string, line int, ps string) *Command {
c := &Command{
ProjString: ps,
testcases: []testcase{},
File: file,
Line: line,
tolerance: 0.5 * unitsValue("mm"),
}
//mlog.Printf("OPERATION: %s", ps)
return c
}
// ProjectionName returns the name of the projection used in this test
func (c *Command) ProjectionName() string {
s := c.ProjString
for {
t := strings.Replace(s, "\t", " ", -1)
t = strings.Replace(t, " ", " ", -1)
t = strings.Replace(t, " =", "=", -1)
t = strings.Replace(t, "= ", "=", -1)
if s == t {
break
}
s = t
}
toks := strings.Fields(s)
for _, tok := range toks {
if tok[0:1] == "+" {
tok = tok[1:]
}
if strings.HasPrefix(tok, "proj=") {
return tok[5:]
}
}
return "UNKNOWN"
}
func (c *Command) setDirection(s1 string) {
if s1 == "inverse" {
c.invFlag = true
} else if s1 == "forward" {
c.invFlag = false
} else {
panic(s1)
}
}
func (c *Command) setAccept(s1, s2, s3, s4 string) {
s1 = strings.Replace(s1, "_", "", -1)
s2 = strings.Replace(s2, "_", "", -1)
s3 = strings.Replace(s3, "_", "", -1)
s4 = strings.Replace(s4, "_", "", -1)
v1, err := strconv.ParseFloat(s1, 64)
if err != nil {
panic(err)
}
v2, err := strconv.ParseFloat(s2, 64)
if err != nil {
panic(err)
}
v3, err := strconv.ParseFloat(s3, 64)
if err != nil {
panic(err)
}
v4, err := strconv.ParseFloat(s4, 64)
if err != nil {
panic(err)
}
tc := testcase{
accept: coord{v1, v2, v3, v4},
inv: c.invFlag,
}
c.testcases = append(c.testcases, tc)
}
func (c *Command) setExpectFailure() {
n := len(c.testcases)
if n == 0 {
c.completeFailure = true
} else {
tc := &c.testcases[n-1]
tc.expect = coord{math.MaxFloat64, math.MaxFloat64, math.MaxFloat64, math.MaxFloat64}
}
}
func (c *Command) setExpect(s1, s2, s3, s4 string) {
s1 = strings.Replace(s1, "_", "", -1)
s2 = strings.Replace(s2, "_", "", -1)
s3 = strings.Replace(s3, "_", "", -1)
s4 = strings.Replace(s4, "_", "", -1)
v1, err := strconv.ParseFloat(s1, 64)
if err != nil {
panic(err)
}
v2, err := strconv.ParseFloat(s2, 64)
if err != nil {
panic(err)
}
v3, err := strconv.ParseFloat(s3, 64)
if err != nil {
panic(err)
}
v4, err := strconv.ParseFloat(s4, 64)
if err != nil {
panic(err)
}
tc := &c.testcases[len(c.testcases)-1]
tc.expect = coord{v1, v2, v3, v4}
}
func (c *Command) setRoundtrip(s1, s2, s3 string) {
count, err := strconv.Atoi(s1)
if err != nil {
panic(err)
}
v, err := strconv.ParseFloat(s2, 64)
if err != nil {
panic(err)
}
delta := v / unitsValue(s3)
c.roundtripCount = count
c.roundtripDelta = delta
}
func (c *Command) setTolerance(s1, s2 string) {
//mlog.Printf("TOLERANCE: %s %s", s1, s2)
v, err := strconv.ParseFloat(s1, 64)
if err != nil {
panic(err)
}
c.tolerance = v / unitsValue(s2)
}
func
|
(s string) float64 {
switch s {
case "*":
return 1.0
case "cm":
return 100.0
case "nm":
return 1.0e9
case "um":
return 1.0e6
case "mm":
return 1000.0
case "m":
return 1.0
}
panic(s)
}
// Execute runs the testcases
//
// First it parses the proj string, then it creates the coordinate system,
// then it executes the operation for each of the inputs.
func (c *Command) Execute() error {
ps, err := support.NewProjString(c.ProjString)
if err != nil {
if c.completeFailure {
return nil
}
return err
}
_, opx, err := core.NewSystem(ps)
if err != nil {
if c.completeFailure {
return nil
}
return err
}
op := opx.(core.IConvertLPToXY)
for _, tc := range c.testcases {
if !tc.inv {
if c.roundtripCount == 0 {
_, _, err = c.executeForwardOnce(
tc.accept.a, tc.accept.b,
tc.expect.a, tc.expect.b,
op, c.tolerance)
} else {
err = c.executeRoundtrip(
tc.accept.a, tc.accept.b,
tc.expect.a, tc.expect.b,
op, c.roundtripDelta, c.roundtripCount)
}
} else {
if c.roundtripCount == 0 {
_, _, err = c.executeInverseOnce(
tc.accept.a, tc.accept.b,
tc.expect.a, tc.expect.b,
op, c.roundtripDelta)
} else {
// roundtrips are always done from the Forward funcs
panic(9)
}
}
if err != nil {
return err
}
}
return nil
}
func (c *Command) executeForwardOnce(
in1, in2, out1, out2 float64,
op core.IConvertLPToXY,
tolerance float64) (float64, float64, error) {
input := &core.CoordLP{Lam: support.DDToR(in1), Phi: support.DDToR(in2)}
output, err := op.Forward(input)
if err != nil {
return 0, 0, err
}
x, y := output.X, output.Y
ok1 := check(out1, x, c.tolerance)
ok2 := check(out2, y, c.tolerance)
if !ok1 || !ok2 {
return 0, 0, fmt.Errorf("delta failed")
}
return x, y, nil
}
func (c *Command) executeInverseOnce(
in1, in2, out1, out2 float64,
op core.IConvertLPToXY,
tolerance float64) (float64, float64, error) {
input := &core.CoordXY{X: in1, Y: in2}
output, err := op.Inverse(input)
if err != nil {
return 0, 0, err
}
lam, phi := support.RToDD(output.Lam), support.RToDD(output.Phi)
ok1 := check(out1, lam, c.tolerance)
ok2 := check(out2, phi, c.tolerance)
if !ok1 || !ok2 {
return 0, 0, fmt.Errorf("delta failed")
}
return lam, phi, nil
}
func (c *Command) executeRoundtrip(
in1, in2, out1, out2 float64,
op core.IConvertLPToXY,
tolerance float64,
count int) error {
for i := 0; i < count; i++ {
x, y, err := c.executeForwardOnce(in1, in2, out1, out2, op, tolerance)
if err != nil {
return err
}
lam, phi, err := c.executeInverseOnce(x, y, in1, in1, op, tolerance)
if err != nil {
return err
}
in1, in2 = lam, phi
}
return nil
}
func check(expect, actual, tolerance float64) bool {
diff := math.Abs(expect - actual)
if diff > tolerance {
mlog.Printf("TEST FAILED")
mlog.Printf("expected: %f", expect)
mlog.Printf("actual: %f", actual)
mlog.Printf("tolerance: %f", tolerance)
mlog.Printf("diff: %f", diff)
return false
}
return true
}
|
unitsValue
|
generic.py
|
import collections
from datetime import timedelta
import functools
import gc
import json
import operator
import pickle
import re
from textwrap import dedent
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
FrozenSet,
Hashable,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Type,
Union,
)
import warnings
import weakref
import numpy as np
from pandas._config import config
from pandas._libs import Timestamp, iNaT, lib
from pandas._typing import (
Axis,
FilePathOrBuffer,
FrameOrSeries,
JSONSerializable,
Label,
Level,
Renamer,
)
from pandas.compat import set_function_name
from pandas.compat._optional import import_optional_dependency
from pandas.compat.numpy import function as nv
from pandas.errors import AbstractMethodError
from pandas.util._decorators import (
Appender,
Substitution,
doc,
rewrite_axis_style_signature,
)
from pandas.util._validators import (
validate_bool_kwarg,
validate_fillna_kwargs,
validate_percentile,
)
from pandas.core.dtypes.common import (
ensure_int64,
ensure_object,
ensure_str,
is_bool,
is_bool_dtype,
is_datetime64_any_dtype,
is_datetime64tz_dtype,
is_dict_like,
is_extension_array_dtype,
is_float,
is_integer,
is_list_like,
is_number,
is_numeric_dtype,
is_object_dtype,
is_re_compilable,
is_scalar,
is_timedelta64_dtype,
pandas_dtype,
)
from pandas.core.dtypes.generic import ABCDataFrame, ABCSeries
from pandas.core.dtypes.inference import is_hashable
from pandas.core.dtypes.missing import isna, notna
import pandas as pd
from pandas.core import missing, nanops
import pandas.core.algorithms as algos
from pandas.core.base import PandasObject, SelectionMixin
import pandas.core.common as com
from pandas.core.construction import create_series_with_explicit_dtype
from pandas.core.indexes.api import (
Index,
InvalidIndexError,
MultiIndex,
RangeIndex,
ensure_index,
)
from pandas.core.indexes.datetimes import DatetimeIndex
from pandas.core.indexes.period import Period, PeriodIndex
import pandas.core.indexing as indexing
from pandas.core.internals import BlockManager
from pandas.core.missing import find_valid_index
from pandas.core.ops import _align_method_FRAME
from pandas.io.formats import format as fmt
from pandas.io.formats.format import DataFrameFormatter, format_percentiles
from pandas.io.formats.printing import pprint_thing
from pandas.tseries.frequencies import to_offset
if TYPE_CHECKING:
from pandas.core.resample import Resampler
# goal is to be able to define the docs close to function, while still being
# able to share
_shared_docs: Dict[str, str] = dict()
_shared_doc_kwargs = dict(
axes="keywords for axes",
klass="Series/DataFrame",
axes_single_arg="int or labels for object",
args_transpose="axes to permute (int or label for object)",
optional_by="""
by : str or list of str
Name or list of names to sort by""",
)
def _single_replace(self, to_replace, method, inplace, limit):
"""
Replaces values in a Series using the fill method specified when no
replacement value is given in the replace method
"""
if self.ndim != 1:
raise TypeError(
f"cannot replace {to_replace} with method {method} on a "
f"{type(self).__name__}"
)
orig_dtype = self.dtype
result = self if inplace else self.copy()
fill_f = missing.get_fill_func(method)
mask = missing.mask_missing(result.values, to_replace)
values = fill_f(result.values, limit=limit, mask=mask)
if values.dtype == orig_dtype and inplace:
return
result = pd.Series(values, index=self.index, dtype=self.dtype).__finalize__(self)
if inplace:
self._update_inplace(result._data)
return
return result
bool_t = bool # Need alias because NDFrame has def bool:
class NDFrame(PandasObject, SelectionMixin, indexing.IndexingMixin):
"""
N-dimensional analogue of DataFrame. Store multi-dimensional in a
size-mutable, labeled data structure
Parameters
----------
data : BlockManager
axes : list
copy : bool, default False
"""
_internal_names: List[str] = [
"_data",
"_cacher",
"_item_cache",
"_cache",
"_is_copy",
"_subtyp",
"_name",
"_index",
"_default_kind",
"_default_fill_value",
"_metadata",
"__array_struct__",
"__array_interface__",
]
_internal_names_set: Set[str] = set(_internal_names)
_accessors: Set[str] = set()
_deprecations: FrozenSet[str] = frozenset(["get_values"])
_metadata: List[str] = []
_is_copy = None
_data: BlockManager
_attrs: Dict[Optional[Hashable], Any]
_typ: str
# ----------------------------------------------------------------------
# Constructors
def __init__(
self,
data: BlockManager,
copy: bool = False,
attrs: Optional[Mapping[Optional[Hashable], Any]] = None,
):
# copy kwarg is retained for mypy compat, is not used
object.__setattr__(self, "_is_copy", None)
object.__setattr__(self, "_data", data)
object.__setattr__(self, "_item_cache", {})
if attrs is None:
attrs = {}
else:
attrs = dict(attrs)
object.__setattr__(self, "_attrs", attrs)
@classmethod
def _init_mgr(cls, mgr, axes=None, dtype=None, copy=False):
""" passed a manager and a axes dict """
for a, axe in axes.items():
if axe is not None:
mgr = mgr.reindex_axis(
axe, axis=cls._get_block_manager_axis(a), copy=False
)
# make a copy if explicitly requested
if copy:
mgr = mgr.copy()
if dtype is not None:
# avoid further copies if we can
if len(mgr.blocks) > 1 or mgr.blocks[0].values.dtype != dtype:
mgr = mgr.astype(dtype=dtype)
return mgr
# ----------------------------------------------------------------------
@property
def attrs(self) -> Dict[Optional[Hashable], Any]:
"""
Dictionary of global attributes on this object.
.. warning::
attrs is experimental and may change without warning.
"""
if self._attrs is None:
self._attrs = {}
return self._attrs
@attrs.setter
def attrs(self, value: Mapping[Optional[Hashable], Any]) -> None:
self._attrs = dict(value)
@classmethod
def _validate_dtype(cls, dtype):
""" validate the passed dtype """
if dtype is not None:
dtype = pandas_dtype(dtype)
# a compound dtype
if dtype.kind == "V":
raise NotImplementedError(
"compound dtypes are not implemented "
f"in the {cls.__name__} constructor"
)
return dtype
# ----------------------------------------------------------------------
# Construction
@property
def _constructor(self: FrameOrSeries) -> Type[FrameOrSeries]:
"""
Used when a manipulation result has the same dimensions as the
original.
"""
raise AbstractMethodError(self)
@property
def _constructor_sliced(self):
"""
Used when a manipulation result has one lower dimension(s) as the
original, such as DataFrame single columns slicing.
"""
raise AbstractMethodError(self)
@property
def _constructor_expanddim(self):
"""
Used when a manipulation result has one higher dimension as the
original, such as Series.to_frame()
"""
raise NotImplementedError
# ----------------------------------------------------------------------
# Axis
_AXIS_ALIASES = {"rows": 0}
_AXIS_IALIASES = {0: "rows"}
_stat_axis_number = 0
_stat_axis_name = "index"
_ix = None
_AXIS_ORDERS: List[str]
_AXIS_NUMBERS: Dict[str, int]
_AXIS_NAMES: Dict[int, str]
_AXIS_REVERSED: bool
_info_axis_number: int
_info_axis_name: str
_AXIS_LEN: int
def _construct_axes_dict(self, axes=None, **kwargs):
"""Return an axes dictionary for myself."""
d = {a: self._get_axis(a) for a in (axes or self._AXIS_ORDERS)}
d.update(kwargs)
return d
@classmethod
def _construct_axes_from_arguments(
cls, args, kwargs, require_all: bool = False, sentinel=None
):
"""
Construct and returns axes if supplied in args/kwargs.
If require_all, raise if all axis arguments are not supplied
return a tuple of (axes, kwargs).
sentinel specifies the default parameter when an axis is not
supplied; useful to distinguish when a user explicitly passes None
in scenarios where None has special meaning.
"""
# construct the args
args = list(args)
for a in cls._AXIS_ORDERS:
# look for a argument by position
if a not in kwargs:
try:
kwargs[a] = args.pop(0)
except IndexError as err:
if require_all:
raise TypeError(
"not enough/duplicate arguments specified!"
) from err
axes = {a: kwargs.pop(a, sentinel) for a in cls._AXIS_ORDERS}
return axes, kwargs
@classmethod
def _get_axis_number(cls, axis):
axis = cls._AXIS_ALIASES.get(axis, axis)
if is_integer(axis):
if axis in cls._AXIS_NAMES:
return axis
else:
try:
return cls._AXIS_NUMBERS[axis]
except KeyError:
pass
raise ValueError(f"No axis named {axis} for object type {cls}")
@classmethod
def _get_axis_name(cls, axis):
axis = cls._AXIS_ALIASES.get(axis, axis)
if isinstance(axis, str):
if axis in cls._AXIS_NUMBERS:
return axis
else:
try:
return cls._AXIS_NAMES[axis]
except KeyError:
pass
raise ValueError(f"No axis named {axis} for object type {cls}")
def _get_axis(self, axis):
name = self._get_axis_name(axis)
return getattr(self, name)
@classmethod
def _get_block_manager_axis(cls, axis):
"""Map the axis to the block_manager axis."""
axis = cls._get_axis_number(axis)
if cls._AXIS_REVERSED:
m = cls._AXIS_LEN - 1
return m - axis
return axis
def _get_axis_resolvers(self, axis: str) -> Dict[str, ABCSeries]:
# index or columns
axis_index = getattr(self, axis)
d = dict()
prefix = axis[0]
for i, name in enumerate(axis_index.names):
if name is not None:
key = level = name
else:
# prefix with 'i' or 'c' depending on the input axis
# e.g., you must do ilevel_0 for the 0th level of an unnamed
# multiiindex
key = f"{prefix}level_{i}"
level = i
level_values = axis_index.get_level_values(level)
s = level_values.to_series()
s.index = axis_index
d[key] = s
# put the index/columns itself in the dict
if isinstance(axis_index, MultiIndex):
dindex = axis_index
else:
dindex = axis_index.to_series()
d[axis] = dindex
return d
def _get_index_resolvers(self) -> Dict[str, ABCSeries]:
from pandas.core.computation.parsing import clean_column_name
d: Dict[str, ABCSeries] = {}
for axis_name in self._AXIS_ORDERS:
d.update(self._get_axis_resolvers(axis_name))
return {clean_column_name(k): v for k, v in d.items() if not isinstance(k, int)}
def _get_cleaned_column_resolvers(self) -> Dict[str, ABCSeries]:
"""
Return the special character free column resolvers of a dataframe.
Column names with special characters are 'cleaned up' so that they can
be referred to by backtick quoting.
Used in :meth:`DataFrame.eval`.
"""
from pandas.core.computation.parsing import clean_column_name
if isinstance(self, ABCSeries):
return {clean_column_name(self.name): self}
return {
clean_column_name(k): v for k, v in self.items() if not isinstance(k, int)
}
@property
def _info_axis(self):
return getattr(self, self._info_axis_name)
@property
def _stat_axis(self):
return getattr(self, self._stat_axis_name)
@property
def shape(self) -> Tuple[int, ...]:
"""
Return a tuple of axis dimensions
"""
return tuple(len(self._get_axis(a)) for a in self._AXIS_ORDERS)
@property
def axes(self) -> List[Index]:
"""
Return index label(s) of the internal NDFrame
"""
# we do it this way because if we have reversed axes, then
# the block manager shows then reversed
return [self._get_axis(a) for a in self._AXIS_ORDERS]
@property
def ndim(self) -> int:
"""
Return an int representing the number of axes / array dimensions.
Return 1 if Series. Otherwise return 2 if DataFrame.
See Also
--------
ndarray.ndim : Number of array dimensions.
Examples
--------
>>> s = pd.Series({'a': 1, 'b': 2, 'c': 3})
>>> s.ndim
1
>>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
>>> df.ndim
2
"""
return self._data.ndim
@property
def size(self) -> int:
"""
Return an int representing the number of elements in this object.
Return the number of rows if Series. Otherwise return the number of
rows times number of columns if DataFrame.
See Also
--------
ndarray.size : Number of elements in the array.
Examples
--------
>>> s = pd.Series({'a': 1, 'b': 2, 'c': 3})
>>> s.size
3
>>> df = pd.DataFrame({'col1': [1, 2], 'col2': [3, 4]})
>>> df.size
4
"""
return np.prod(self.shape)
@property
def _selected_obj(self: FrameOrSeries) -> FrameOrSeries:
""" internal compat with SelectionMixin """
return self
@property
def _obj_with_exclusions(self: FrameOrSeries) -> FrameOrSeries:
""" internal compat with SelectionMixin """
return self
def set_axis(self, labels, axis: Axis = 0, inplace: bool = False):
"""
Assign desired index to given axis.
Indexes for%(extended_summary_sub)s row labels can be changed by assigning
a list-like or Index.
.. versionchanged:: 0.21.0
The signature is now `labels` and `axis`, consistent with
the rest of pandas API. Previously, the `axis` and `labels`
arguments were respectively the first and second positional
arguments.
Parameters
----------
labels : list-like, Index
The values for the new index.
axis : %(axes_single_arg)s, default 0
The axis to update. The value 0 identifies the rows%(axis_description_sub)s.
inplace : bool, default False
Whether to return a new %(klass)s instance.
Returns
-------
renamed : %(klass)s or None
An object of type %(klass)s if inplace=False, None otherwise.
See Also
--------
%(klass)s.rename_axis : Alter the name of the index%(see_also_sub)s.
"""
if inplace:
setattr(self, self._get_axis_name(axis), labels)
else:
obj = self.copy()
obj.set_axis(labels, axis=axis, inplace=True)
return obj
def _set_axis(self, axis: int, labels: Index) -> None:
labels = ensure_index(labels)
self._data.set_axis(axis, labels)
self._clear_item_cache()
def swapaxes(self: FrameOrSeries, axis1, axis2, copy=True) -> FrameOrSeries:
"""
Interchange axes and swap values axes appropriately.
Returns
-------
y : same as input
"""
i = self._get_axis_number(axis1)
j = self._get_axis_number(axis2)
if i == j:
if copy:
return self.copy()
return self
mapping = {i: j, j: i}
new_axes = (self._get_axis(mapping.get(k, k)) for k in range(self._AXIS_LEN))
new_values = self.values.swapaxes(i, j)
if copy:
new_values = new_values.copy()
return self._constructor(new_values, *new_axes).__finalize__(self)
def droplevel(self: FrameOrSeries, level, axis=0) -> FrameOrSeries:
"""
Return DataFrame with requested index / column level(s) removed.
.. versionadded:: 0.24.0
Parameters
----------
level : int, str, or list-like
If a string is given, must be the name of a level
If list-like, elements must be names or positional indexes
of levels.
axis : {0 or 'index', 1 or 'columns'}, default 0
Axis along which the level(s) is removed:
* 0 or 'index': remove level(s) in column.
* 1 or 'columns': remove level(s) in row.
Returns
-------
DataFrame
DataFrame with requested index / column level(s) removed.
Examples
--------
>>> df = pd.DataFrame([
... [1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12]
... ]).set_index([0, 1]).rename_axis(['a', 'b'])
>>> df.columns = pd.MultiIndex.from_tuples([
... ('c', 'e'), ('d', 'f')
... ], names=['level_1', 'level_2'])
>>> df
level_1 c d
level_2 e f
a b
1 2 3 4
5 6 7 8
9 10 11 12
>>> df.droplevel('a')
level_1 c d
level_2 e f
b
2 3 4
6 7 8
10 11 12
>>> df.droplevel('level_2', axis=1)
level_1 c d
a b
1 2 3 4
5 6 7 8
9 10 11 12
"""
labels = self._get_axis(axis)
new_labels = labels.droplevel(level)
result = self.set_axis(new_labels, axis=axis, inplace=False)
return result
def pop(self: FrameOrSeries, item) -> FrameOrSeries:
"""
Return item and drop from frame. Raise KeyError if not found.
Parameters
----------
item : str
Label of column to be popped.
Returns
-------
Series
Examples
--------
>>> df = pd.DataFrame([('falcon', 'bird', 389.0),
... ('parrot', 'bird', 24.0),
... ('lion', 'mammal', 80.5),
... ('monkey', 'mammal', np.nan)],
... columns=('name', 'class', 'max_speed'))
>>> df
name class max_speed
0 falcon bird 389.0
1 parrot bird 24.0
2 lion mammal 80.5
3 monkey mammal NaN
>>> df.pop('class')
0 bird
1 bird
2 mammal
3 mammal
Name: class, dtype: object
>>> df
name max_speed
0 falcon 389.0
1 parrot 24.0
2 lion 80.5
3 monkey NaN
"""
result = self[item]
del self[item]
try:
result._reset_cacher()
except AttributeError:
pass
return result
def squeeze(self, axis=None):
"""
Squeeze 1 dimensional axis objects into scalars.
Series or DataFrames with a single element are squeezed to a scalar.
DataFrames with a single column or a single row are squeezed to a
Series. Otherwise the object is unchanged.
This method is most useful when you don't know if your
object is a Series or DataFrame, but you do know it has just a single
column. In that case you can safely call `squeeze` to ensure you have a
Series.
Parameters
----------
axis : {0 or 'index', 1 or 'columns', None}, default None
A specific axis to squeeze. By default, all length-1 axes are
squeezed.
Returns
-------
DataFrame, Series, or scalar
The projection after squeezing `axis` or all the axes.
See Also
--------
Series.iloc : Integer-location based indexing for selecting scalars.
DataFrame.iloc : Integer-location based indexing for selecting Series.
Series.to_frame : Inverse of DataFrame.squeeze for a
single-column DataFrame.
Examples
--------
>>> primes = pd.Series([2, 3, 5, 7])
Slicing might produce a Series with a single value:
>>> even_primes = primes[primes % 2 == 0]
>>> even_primes
0 2
dtype: int64
>>> even_primes.squeeze()
2
Squeezing objects with more than one value in every axis does nothing:
>>> odd_primes = primes[primes % 2 == 1]
>>> odd_primes
1 3
2 5
3 7
dtype: int64
>>> odd_primes.squeeze()
1 3
2 5
3 7
dtype: int64
Squeezing is even more effective when used with DataFrames.
>>> df = pd.DataFrame([[1, 2], [3, 4]], columns=['a', 'b'])
>>> df
a b
0 1 2
1 3 4
Slicing a single column will produce a DataFrame with the columns
having only one value:
>>> df_a = df[['a']]
>>> df_a
a
0 1
1 3
So the columns can be squeezed down, resulting in a Series:
>>> df_a.squeeze('columns')
0 1
1 3
Name: a, dtype: int64
Slicing a single row from a single column will produce a single
scalar DataFrame:
>>> df_0a = df.loc[df.index < 1, ['a']]
>>> df_0a
a
0 1
Squeezing the rows produces a single scalar Series:
>>> df_0a.squeeze('rows')
a 1
Name: 0, dtype: int64
Squeezing all axes will project directly into a scalar:
>>> df_0a.squeeze()
1
"""
axis = self._AXIS_NAMES if axis is None else (self._get_axis_number(axis),)
return self.iloc[
tuple(
0 if i in axis and len(a) == 1 else slice(None)
for i, a in enumerate(self.axes)
)
]
# ----------------------------------------------------------------------
# Rename
def rename(
self: FrameOrSeries,
mapper: Optional[Renamer] = None,
*,
index: Optional[Renamer] = None,
columns: Optional[Renamer] = None,
axis: Optional[Axis] = None,
copy: bool = True,
inplace: bool = False,
level: Optional[Level] = None,
errors: str = "ignore",
) -> Optional[FrameOrSeries]:
"""
Alter axes input function or functions. Function / dict values must be
unique (1-to-1). Labels not contained in a dict / Series will be left
as-is. Extra labels listed don't throw an error. Alternatively, change
``Series.name`` with a scalar value (Series only).
Parameters
----------
%(axes)s : scalar, list-like, dict-like or function, optional
Scalar or list-like will alter the ``Series.name`` attribute,
and raise on DataFrame.
dict-like or functions are transformations to apply to
that axis' values
copy : bool, default True
Also copy underlying data.
inplace : bool, default False
Whether to return a new %(klass)s. If True then value of copy is
ignored.
level : int or level name, default None
In case of a MultiIndex, only rename labels in the specified
level.
errors : {'ignore', 'raise'}, default 'ignore'
If 'raise', raise a `KeyError` when a dict-like `mapper`, `index`,
or `columns` contains labels that are not present in the Index
being transformed.
If 'ignore', existing keys will be renamed and extra keys will be
ignored.
Returns
-------
renamed : %(klass)s (new object)
Raises
------
KeyError
If any of the labels is not found in the selected axis and
"errors='raise'".
See Also
--------
NDFrame.rename_axis
Examples
--------
>>> s = pd.Series([1, 2, 3])
>>> s
0 1
1 2
2 3
dtype: int64
>>> s.rename("my_name") # scalar, changes Series.name
0 1
1 2
2 3
Name: my_name, dtype: int64
>>> s.rename(lambda x: x ** 2) # function, changes labels
0 1
1 2
4 3
dtype: int64
>>> s.rename({1: 3, 2: 5}) # mapping, changes labels
0 1
3 2
5 3
dtype: int64
Since ``DataFrame`` doesn't have a ``.name`` attribute,
only mapping-type arguments are allowed.
>>> df = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
>>> df.rename(2)
Traceback (most recent call last):
...
TypeError: 'int' object is not callable
``DataFrame.rename`` supports two calling conventions
* ``(index=index_mapper, columns=columns_mapper, ...)``
* ``(mapper, axis={'index', 'columns'}, ...)``
We *highly* recommend using keyword arguments to clarify your
intent.
>>> df.rename(index=str, columns={"A": "a", "B": "c"})
a c
0 1 4
1 2 5
2 3 6
>>> df.rename(index=str, columns={"A": "a", "C": "c"})
a B
0 1 4
1 2 5
2 3 6
Using axis-style parameters
>>> df.rename(str.lower, axis='columns')
a b
0 1 4
1 2 5
2 3 6
>>> df.rename({1: 2, 2: 4}, axis='index')
A B
0 1 4
2 2 5
4 3 6
See the :ref:`user guide <basics.rename>` for more.
"""
if mapper is None and index is None and columns is None:
raise TypeError("must pass an index to rename")
if index is not None or columns is not None:
if axis is not None:
raise TypeError(
"Cannot specify both 'axis' and any of 'index' or 'columns'"
)
elif mapper is not None:
raise TypeError(
"Cannot specify both 'mapper' and any of 'index' or 'columns'"
)
else:
# use the mapper argument
if axis and self._get_axis_number(axis) == 1:
columns = mapper
else:
index = mapper
result = self if inplace else self.copy(deep=copy)
for axis_no, replacements in enumerate((index, columns)):
if replacements is None:
continue
ax = self._get_axis(axis_no)
baxis = self._get_block_manager_axis(axis_no)
f = com.get_rename_function(replacements)
if level is not None:
level = ax._get_level_number(level)
# GH 13473
if not callable(replacements):
indexer = ax.get_indexer_for(replacements)
if errors == "raise" and len(indexer[indexer == -1]):
missing_labels = [
label
for index, label in enumerate(replacements)
if indexer[index] == -1
]
raise KeyError(f"{missing_labels} not found in axis")
result._data = result._data.rename_axis(
f, axis=baxis, copy=copy, level=level
)
result._clear_item_cache()
if inplace:
self._update_inplace(result._data)
return None
else:
return result.__finalize__(self)
@rewrite_axis_style_signature("mapper", [("copy", True), ("inplace", False)])
def rename_axis(self, mapper=lib.no_default, **kwargs):
"""
Set the name of the axis for the index or columns.
Parameters
----------
mapper : scalar, list-like, optional
Value to set the axis name attribute.
index, columns : scalar, list-like, dict-like or function, optional
A scalar, list-like, dict-like or functions transformations to
apply to that axis' values.
Use either ``mapper`` and ``axis`` to
specify the axis to target with ``mapper``, or ``index``
and/or ``columns``.
.. versionchanged:: 0.24.0
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to rename.
copy : bool, default True
Also copy underlying data.
inplace : bool, default False
Modifies the object directly, instead of creating a new Series
or DataFrame.
Returns
-------
Series, DataFrame, or None
The same type as the caller or None if `inplace` is True.
See Also
--------
Series.rename : Alter Series index labels or name.
DataFrame.rename : Alter DataFrame index labels or name.
Index.rename : Set new names on index.
Notes
-----
``DataFrame.rename_axis`` supports two calling conventions
* ``(index=index_mapper, columns=columns_mapper, ...)``
* ``(mapper, axis={'index', 'columns'}, ...)``
The first calling convention will only modify the names of
the index and/or the names of the Index object that is the columns.
In this case, the parameter ``copy`` is ignored.
The second calling convention will modify the names of the
the corresponding index if mapper is a list or a scalar.
However, if mapper is dict-like or a function, it will use the
deprecated behavior of modifying the axis *labels*.
We *highly* recommend using keyword arguments to clarify your
intent.
Examples
--------
**Series**
>>> s = pd.Series(["dog", "cat", "monkey"])
>>> s
0 dog
1 cat
2 monkey
dtype: object
>>> s.rename_axis("animal")
animal
0 dog
1 cat
2 monkey
dtype: object
**DataFrame**
>>> df = pd.DataFrame({"num_legs": [4, 4, 2],
... "num_arms": [0, 0, 2]},
... ["dog", "cat", "monkey"])
>>> df
num_legs num_arms
dog 4 0
cat 4 0
monkey 2 2
>>> df = df.rename_axis("animal")
>>> df
num_legs num_arms
animal
dog 4 0
cat 4 0
monkey 2 2
>>> df = df.rename_axis("limbs", axis="columns")
>>> df
limbs num_legs num_arms
animal
dog 4 0
cat 4 0
monkey 2 2
**MultiIndex**
>>> df.index = pd.MultiIndex.from_product([['mammal'],
... ['dog', 'cat', 'monkey']],
... names=['type', 'name'])
>>> df
limbs num_legs num_arms
type name
mammal dog 4 0
cat 4 0
monkey 2 2
>>> df.rename_axis(index={'type': 'class'})
limbs num_legs num_arms
class name
mammal dog 4 0
cat 4 0
monkey 2 2
>>> df.rename_axis(columns=str.upper)
LIMBS num_legs num_arms
type name
mammal dog 4 0
cat 4 0
monkey 2 2
"""
axes, kwargs = self._construct_axes_from_arguments(
(), kwargs, sentinel=lib.no_default
)
copy = kwargs.pop("copy", True)
inplace = kwargs.pop("inplace", False)
axis = kwargs.pop("axis", 0)
if axis is not None:
axis = self._get_axis_number(axis)
if kwargs:
raise TypeError(
"rename_axis() got an unexpected keyword "
f'argument "{list(kwargs.keys())[0]}"'
)
inplace = validate_bool_kwarg(inplace, "inplace")
if mapper is not lib.no_default:
# Use v0.23 behavior if a scalar or list
non_mapper = is_scalar(mapper) or (
is_list_like(mapper) and not is_dict_like(mapper)
)
if non_mapper:
return self._set_axis_name(mapper, axis=axis, inplace=inplace)
else:
raise ValueError("Use `.rename` to alter labels with a mapper.")
else:
# Use new behavior. Means that index and/or columns
# is specified
result = self if inplace else self.copy(deep=copy)
for axis in range(self._AXIS_LEN):
v = axes.get(self._AXIS_NAMES[axis])
if v is lib.no_default:
continue
non_mapper = is_scalar(v) or (is_list_like(v) and not is_dict_like(v))
if non_mapper:
newnames = v
else:
f = com.get_rename_function(v)
curnames = self._get_axis(axis).names
newnames = [f(name) for name in curnames]
result._set_axis_name(newnames, axis=axis, inplace=True)
if not inplace:
return result
def _set_axis_name(self, name, axis=0, inplace=False):
"""
Set the name(s) of the axis.
Parameters
----------
name : str or list of str
Name(s) to set.
axis : {0 or 'index', 1 or 'columns'}, default 0
The axis to set the label. The value 0 or 'index' specifies index,
and the value 1 or 'columns' specifies columns.
inplace : bool, default False
If `True`, do operation inplace and return None.
.. versionadded:: 0.21.0
Returns
-------
Series, DataFrame, or None
The same type as the caller or `None` if `inplace` is `True`.
See Also
--------
DataFrame.rename : Alter the axis labels of :class:`DataFrame`.
Series.rename : Alter the index labels or set the index name
of :class:`Series`.
Index.rename : Set the name of :class:`Index` or :class:`MultiIndex`.
Examples
--------
>>> df = pd.DataFrame({"num_legs": [4, 4, 2]},
... ["dog", "cat", "monkey"])
>>> df
num_legs
dog 4
cat 4
monkey 2
>>> df._set_axis_name("animal")
num_legs
animal
dog 4
cat 4
monkey 2
>>> df.index = pd.MultiIndex.from_product(
... [["mammal"], ['dog', 'cat', 'monkey']])
>>> df._set_axis_name(["type", "name"])
legs
type name
mammal dog 4
cat 4
monkey 2
"""
axis = self._get_axis_number(axis)
idx = self._get_axis(axis).set_names(name)
inplace = validate_bool_kwarg(inplace, "inplace")
renamed = self if inplace else self.copy()
renamed.set_axis(idx, axis=axis, inplace=True)
if not inplace:
return renamed
# ----------------------------------------------------------------------
# Comparison Methods
def _indexed_same(self, other) -> bool:
return all(
self._get_axis(a).equals(other._get_axis(a)) for a in self._AXIS_ORDERS
)
def equals(self, other):
"""
Test whether two objects contain the same elements.
This function allows two Series or DataFrames to be compared against
each other to see if they have the same shape and elements. NaNs in
the same location are considered equal. The column headers do not
need to have the same type, but the elements within the columns must
be the same dtype.
Parameters
----------
other : Series or DataFrame
The other Series or DataFrame to be compared with the first.
Returns
-------
bool
True if all elements are the same in both objects, False
otherwise.
See Also
--------
Series.eq : Compare two Series objects of the same length
and return a Series where each element is True if the element
in each Series is equal, False otherwise.
DataFrame.eq : Compare two DataFrame objects of the same shape and
return a DataFrame where each element is True if the respective
element in each DataFrame is equal, False otherwise.
testing.assert_series_equal : Raises an AssertionError if left and
right are not equal. Provides an easy interface to ignore
inequality in dtypes, indexes and precision among others.
testing.assert_frame_equal : Like assert_series_equal, but targets
DataFrames.
numpy.array_equal : Return True if two arrays have the same shape
and elements, False otherwise.
Notes
-----
This function requires that the elements have the same dtype as their
respective elements in the other Series or DataFrame. However, the
column labels do not need to have the same type, as long as they are
still considered equal.
Examples
--------
>>> df = pd.DataFrame({1: [10], 2: [20]})
>>> df
1 2
0 10 20
DataFrames df and exactly_equal have the same types and values for
their elements and column labels, which will return True.
>>> exactly_equal = pd.DataFrame({1: [10], 2: [20]})
>>> exactly_equal
1 2
0 10 20
>>> df.equals(exactly_equal)
True
DataFrames df and different_column_type have the same element
types and values, but have different types for the column labels,
which will still return True.
>>> different_column_type = pd.DataFrame({1.0: [10], 2.0: [20]})
>>> different_column_type
1.0 2.0
0 10 20
>>> df.equals(different_column_type)
True
DataFrames df and different_data_type have different types for the
same values for their elements, and will return False even though
their column labels are the same values and types.
>>> different_data_type = pd.DataFrame({1: [10.0], 2: [20.0]})
>>> different_data_type
1 2
0 10.0 20.0
>>> df.equals(different_data_type)
False
"""
if not isinstance(other, self._constructor):
return False
return self._data.equals(other._data)
# -------------------------------------------------------------------------
# Unary Methods
def __neg__(self):
values = self._values
if is_bool_dtype(values):
arr = operator.inv(values)
elif (
is_numeric_dtype(values)
or is_timedelta64_dtype(values)
or is_object_dtype(values)
):
arr = operator.neg(values)
else:
raise TypeError(f"Unary negative expects numeric dtype, not {values.dtype}")
return self.__array_wrap__(arr)
def __pos__(self):
values = self._values
if is_bool_dtype(values):
arr = values
elif (
is_numeric_dtype(values)
or is_timedelta64_dtype(values)
or is_object_dtype(values)
):
arr = operator.pos(values)
else:
raise TypeError(f"Unary plus expects numeric dtype, not {values.dtype}")
return self.__array_wrap__(arr)
def __invert__(self):
if not self.size:
# inv fails with 0 len
return self
new_data = self._data.apply(operator.invert)
result = self._constructor(new_data).__finalize__(self)
return result
def __nonzero__(self):
raise ValueError(
f"The truth value of a {type(self).__name__} is ambiguous. "
"Use a.empty, a.bool(), a.item(), a.any() or a.all()."
)
__bool__ = __nonzero__
def bool(self):
"""
Return the bool of a single element PandasObject.
This must be a boolean scalar value, either True or False. Raise a
ValueError if the PandasObject does not have exactly 1 element, or that
element is not boolean
Returns
-------
bool
Same single boolean value converted to bool type.
"""
v = self.squeeze()
if isinstance(v, (bool, np.bool_)):
return bool(v)
elif is_scalar(v):
raise ValueError(
"bool cannot act on a non-boolean single element "
f"{type(self).__name__}"
)
self.__nonzero__()
def __abs__(self: FrameOrSeries) -> FrameOrSeries:
return self.abs()
def __round__(self: FrameOrSeries, decimals: int = 0) -> FrameOrSeries:
return self.round(decimals)
# -------------------------------------------------------------------------
# Label or Level Combination Helpers
#
# A collection of helper methods for DataFrame/Series operations that
# accept a combination of column/index labels and levels. All such
# operations should utilize/extend these methods when possible so that we
# have consistent precedence and validation logic throughout the library.
def _is_level_reference(self, key, axis=0):
"""
Test whether a key is a level reference for a given axis.
To be considered a level reference, `key` must be a string that:
- (axis=0): Matches the name of an index level and does NOT match
a column label.
- (axis=1): Matches the name of a column level and does NOT match
an index label.
Parameters
----------
key : str
Potential level name for the given axis
axis : int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
is_level : bool
"""
axis = self._get_axis_number(axis)
return (
key is not None
and is_hashable(key)
and key in self.axes[axis].names
and not self._is_label_reference(key, axis=axis)
)
def _is_label_reference(self, key, axis=0) -> bool_t:
"""
Test whether a key is a label reference for a given axis.
To be considered a label reference, `key` must be a string that:
- (axis=0): Matches a column label
- (axis=1): Matches an index label
Parameters
----------
key: str
Potential label name
axis: int, default 0
Axis perpendicular to the axis that labels are associated with
(0 means search for column labels, 1 means search for index labels)
Returns
-------
is_label: bool
"""
axis = self._get_axis_number(axis)
other_axes = (ax for ax in range(self._AXIS_LEN) if ax != axis)
return (
key is not None
and is_hashable(key)
and any(key in self.axes[ax] for ax in other_axes)
)
def _is_label_or_level_reference(self, key: str, axis: int = 0) -> bool_t:
"""
Test whether a key is a label or level reference for a given axis.
To be considered either a label or a level reference, `key` must be a
string that:
- (axis=0): Matches a column label or an index level
- (axis=1): Matches an index label or a column level
Parameters
----------
key: str
Potential label or level name
axis: int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
is_label_or_level: bool
"""
return self._is_level_reference(key, axis=axis) or self._is_label_reference(
key, axis=axis
)
def _check_label_or_level_ambiguity(self, key, axis: int = 0) -> None:
"""
Check whether `key` is ambiguous.
By ambiguous, we mean that it matches both a level of the input
`axis` and a label of the other axis.
Parameters
----------
key: str or object
Label or level name.
axis: int, default 0
Axis that levels are associated with (0 for index, 1 for columns).
Raises
------
ValueError: `key` is ambiguous
"""
axis = self._get_axis_number(axis)
other_axes = (ax for ax in range(self._AXIS_LEN) if ax != axis)
if (
key is not None
and is_hashable(key)
and key in self.axes[axis].names
and any(key in self.axes[ax] for ax in other_axes)
):
# Build an informative and grammatical warning
level_article, level_type = (
("an", "index") if axis == 0 else ("a", "column")
)
label_article, label_type = (
("a", "column") if axis == 0 else ("an", "index")
)
msg = (
f"'{key}' is both {level_article} {level_type} level and "
f"{label_article} {label_type} label, which is ambiguous."
)
raise ValueError(msg)
def _get_label_or_level_values(self, key: str, axis: int = 0) -> np.ndarray:
"""
Return a 1-D array of values associated with `key`, a label or level
from the given `axis`.
Retrieval logic:
- (axis=0): Return column values if `key` matches a column label.
Otherwise return index level values if `key` matches an index
level.
- (axis=1): Return row values if `key` matches an index label.
Otherwise return column level values if 'key' matches a column
level
Parameters
----------
key: str
Label or level name.
axis: int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
values: np.ndarray
Raises
------
KeyError
if `key` matches neither a label nor a level
ValueError
if `key` matches multiple labels
FutureWarning
if `key` is ambiguous. This will become an ambiguity error in a
future version
"""
axis = self._get_axis_number(axis)
other_axes = [ax for ax in range(self._AXIS_LEN) if ax != axis]
if self._is_label_reference(key, axis=axis):
self._check_label_or_level_ambiguity(key, axis=axis)
values = self.xs(key, axis=other_axes[0])._values
elif self._is_level_reference(key, axis=axis):
values = self.axes[axis].get_level_values(key)._values
else:
raise KeyError(key)
# Check for duplicates
if values.ndim > 1:
if other_axes and isinstance(self._get_axis(other_axes[0]), MultiIndex):
multi_message = (
"\n"
"For a multi-index, the label must be a "
"tuple with elements corresponding to each level."
)
else:
multi_message = ""
label_axis_name = "column" if axis == 0 else "index"
raise ValueError(
(
f"The {label_axis_name} label '{key}' "
f"is not unique.{multi_message}"
)
)
return values
def _drop_labels_or_levels(self, keys, axis: int = 0):
"""
Drop labels and/or levels for the given `axis`.
For each key in `keys`:
- (axis=0): If key matches a column label then drop the column.
Otherwise if key matches an index level then drop the level.
- (axis=1): If key matches an index label then drop the row.
Otherwise if key matches a column level then drop the level.
Parameters
----------
keys: str or list of str
labels or levels to drop
axis: int, default 0
Axis that levels are associated with (0 for index, 1 for columns)
Returns
-------
dropped: DataFrame
Raises
------
ValueError
if any `keys` match neither a label nor a level
"""
axis = self._get_axis_number(axis)
# Validate keys
keys = com.maybe_make_list(keys)
invalid_keys = [
k for k in keys if not self._is_label_or_level_reference(k, axis=axis)
]
if invalid_keys:
raise ValueError(
(
"The following keys are not valid labels or "
f"levels for axis {axis}: {invalid_keys}"
)
)
# Compute levels and labels to drop
levels_to_drop = [k for k in keys if self._is_level_reference(k, axis=axis)]
labels_to_drop = [k for k in keys if not self._is_level_reference(k, axis=axis)]
# Perform copy upfront and then use inplace operations below.
# This ensures that we always perform exactly one copy.
# ``copy`` and/or ``inplace`` options could be added in the future.
dropped = self.copy()
if axis == 0:
# Handle dropping index levels
if levels_to_drop:
dropped.reset_index(levels_to_drop, drop=True, inplace=True)
# Handle dropping columns labels
if labels_to_drop:
dropped.drop(labels_to_drop, axis=1, inplace=True)
else:
# Handle dropping column levels
if levels_to_drop:
if isinstance(dropped.columns, MultiIndex):
# Drop the specified levels from the MultiIndex
dropped.columns = dropped.columns.droplevel(levels_to_drop)
else:
# Drop the last level of Index by replacing with
# a RangeIndex
dropped.columns = RangeIndex(dropped.columns.size)
# Handle dropping index labels
if labels_to_drop:
dropped.drop(labels_to_drop, axis=0, inplace=True)
return dropped
# ----------------------------------------------------------------------
# Iteration
def __hash__(self):
raise TypeError(
f"{repr(type(self).__name__)} objects are mutable, "
f"thus they cannot be hashed"
)
def __iter__(self):
"""
Iterate over info axis.
Returns
-------
iterator
Info axis as iterator.
"""
return iter(self._info_axis)
# can we get a better explanation of this?
def keys(self):
"""
Get the 'info axis' (see Indexing for more).
This is index for Series, columns for DataFrame.
Returns
-------
Index
Info axis.
"""
return self._info_axis
def items(self):
"""
Iterate over (label, values) on info axis
This is index for Series and columns for DataFrame.
Returns
-------
Generator
"""
for h in self._info_axis:
yield h, self[h]
@Appender(items.__doc__)
def iteritems(self):
return self.items()
def __len__(self) -> int:
"""Returns length of info axis"""
return len(self._info_axis)
def __contains__(self, key) -> bool_t:
"""True if the key is in the info axis"""
return key in self._info_axis
@property
def empty(self) -> bool_t:
"""
Indicator whether DataFrame is empty.
True if DataFrame is entirely empty (no items), meaning any of the
axes are of length 0.
Returns
-------
bool
If DataFrame is empty, return True, if not return False.
See Also
--------
Series.dropna : Return series without null values.
DataFrame.dropna : Return DataFrame with labels on given axis omitted
where (all or any) data are missing.
Notes
-----
If DataFrame contains only NaNs, it is still not considered empty. See
the example below.
Examples
--------
An example of an actual empty DataFrame. Notice the index is empty:
>>> df_empty = pd.DataFrame({'A' : []})
>>> df_empty
Empty DataFrame
Columns: [A]
Index: []
>>> df_empty.empty
True
If we only have NaNs in our DataFrame, it is not considered empty! We
will need to drop the NaNs to make the DataFrame empty:
>>> df = pd.DataFrame({'A' : [np.nan]})
>>> df
A
0 NaN
>>> df.empty
False
>>> df.dropna().empty
True
"""
return any(len(self._get_axis(a)) == 0 for a in self._AXIS_ORDERS)
# ----------------------------------------------------------------------
# Array Interface
# This is also set in IndexOpsMixin
# GH#23114 Ensure ndarray.__op__(DataFrame) returns NotImplemented
__array_priority__ = 1000
def __array__(self, dtype=None) -> np.ndarray:
return np.asarray(self._values, dtype=dtype)
def __array_wrap__(self, result, context=None):
result = lib.item_from_zerodim(result)
if is_scalar(result):
# e.g. we get here with np.ptp(series)
# ptp also requires the item_from_zerodim
return result
d = self._construct_axes_dict(self._AXIS_ORDERS, copy=False)
return self._constructor(result, **d).__finalize__(self)
# ideally we would define this to avoid the getattr checks, but
# is slower
# @property
# def __array_interface__(self):
# """ provide numpy array interface method """
# values = self.values
# return dict(typestr=values.dtype.str,shape=values.shape,data=values)
# ----------------------------------------------------------------------
# Picklability
def __getstate__(self) -> Dict[str, Any]:
meta = {k: getattr(self, k, None) for k in self._metadata}
return dict(
_data=self._data,
_typ=self._typ,
_metadata=self._metadata,
attrs=self.attrs,
**meta,
)
def __setstate__(self, state):
if isinstance(state, BlockManager):
self._data = state
elif isinstance(state, dict):
typ = state.get("_typ")
if typ is not None:
attrs = state.get("_attrs", {})
object.__setattr__(self, "_attrs", attrs)
# set in the order of internal names
# to avoid definitional recursion
# e.g. say fill_value needing _data to be
# defined
meta = set(self._internal_names + self._metadata)
for k in list(meta):
if k in state:
v = state[k]
object.__setattr__(self, k, v)
for k, v in state.items():
if k not in meta:
object.__setattr__(self, k, v)
else:
raise NotImplementedError("Pre-0.12 pickles are no longer supported")
elif len(state) == 2:
raise NotImplementedError("Pre-0.12 pickles are no longer supported")
self._item_cache = {}
# ----------------------------------------------------------------------
# Rendering Methods
def __repr__(self) -> str:
# string representation based upon iterating over self
# (since, by definition, `PandasContainers` are iterable)
prepr = f"[{','.join(map(pprint_thing, self))}]"
return f"{type(self).__name__}({prepr})"
def _repr_latex_(self):
"""
Returns a LaTeX representation for a particular object.
Mainly for use with nbconvert (jupyter notebook conversion to pdf).
"""
if config.get_option("display.latex.repr"):
return self.to_latex()
else:
return None
def _repr_data_resource_(self):
"""
Not a real Jupyter special repr method, but we use the same
naming convention.
"""
if config.get_option("display.html.table_schema"):
data = self.head(config.get_option("display.max_rows"))
payload = json.loads(
data.to_json(orient="table"), object_pairs_hook=collections.OrderedDict
)
return payload
# ----------------------------------------------------------------------
# I/O Methods
_shared_docs[
"to_markdown"
] = """
Print %(klass)s in Markdown-friendly format.
.. versionadded:: 1.0.0
Parameters
----------
buf : str, Path or StringIO-like, optional, default None
Buffer to write to. If None, the output is returned as a string.
mode : str, optional
Mode in which file is opened.
**kwargs
These parameters will be passed to `tabulate`.
Returns
-------
str
%(klass)s in Markdown-friendly format.
"""
_shared_docs[
"to_excel"
] = """
Write %(klass)s to an Excel sheet.
To write a single %(klass)s to an Excel .xlsx file it is only necessary to
specify a target file name. To write to multiple sheets it is necessary to
create an `ExcelWriter` object with a target file name, and specify a sheet
in the file to write to.
Multiple sheets may be written to by specifying unique `sheet_name`.
With all data written to the file it is necessary to save the changes.
Note that creating an `ExcelWriter` object with a file name that already
exists will result in the contents of the existing file being erased.
Parameters
----------
excel_writer : str or ExcelWriter object
File path or existing ExcelWriter.
sheet_name : str, default 'Sheet1'
Name of sheet which will contain DataFrame.
na_rep : str, default ''
Missing data representation.
float_format : str, optional
Format string for floating point numbers. For example
``float_format="%%.2f"`` will format 0.1234 to 0.12.
columns : sequence or list of str, optional
Columns to write.
header : bool or list of str, default True
Write out the column names. If a list of string is given it is
assumed to be aliases for the column names.
index : bool, default True
Write row names (index).
index_label : str or sequence, optional
Column label for index column(s) if desired. If not specified, and
`header` and `index` are True, then the index names are used. A
sequence should be given if the DataFrame uses MultiIndex.
startrow : int, default 0
Upper left cell row to dump data frame.
startcol : int, default 0
Upper left cell column to dump data frame.
engine : str, optional
Write engine to use, 'openpyxl' or 'xlsxwriter'. You can also set this
via the options ``io.excel.xlsx.writer``, ``io.excel.xls.writer``, and
``io.excel.xlsm.writer``.
merge_cells : bool, default True
Write MultiIndex and Hierarchical Rows as merged cells.
encoding : str, optional
Encoding of the resulting excel file. Only necessary for xlwt,
other writers support unicode natively.
inf_rep : str, default 'inf'
Representation for infinity (there is no native representation for
infinity in Excel).
verbose : bool, default True
Display more information in the error logs.
freeze_panes : tuple of int (length 2), optional
Specifies the one-based bottommost row and rightmost column that
is to be frozen.
See Also
--------
to_csv : Write DataFrame to a comma-separated values (csv) file.
ExcelWriter : Class for writing DataFrame objects into excel sheets.
read_excel : Read an Excel file into a pandas DataFrame.
read_csv : Read a comma-separated values (csv) file into DataFrame.
Notes
-----
For compatibility with :meth:`~DataFrame.to_csv`,
to_excel serializes lists and dicts to strings before writing.
Once a workbook has been saved it is not possible write further data
without rewriting the whole workbook.
Examples
--------
Create, write to and save a workbook:
>>> df1 = pd.DataFrame([['a', 'b'], ['c', 'd']],
... index=['row 1', 'row 2'],
... columns=['col 1', 'col 2'])
>>> df1.to_excel("output.xlsx") # doctest: +SKIP
To specify the sheet name:
>>> df1.to_excel("output.xlsx",
... sheet_name='Sheet_name_1') # doctest: +SKIP
If you wish to write to more than one sheet in the workbook, it is
necessary to specify an ExcelWriter object:
>>> df2 = df1.copy()
>>> with pd.ExcelWriter('output.xlsx') as writer: # doctest: +SKIP
... df1.to_excel(writer, sheet_name='Sheet_name_1')
... df2.to_excel(writer, sheet_name='Sheet_name_2')
ExcelWriter can also be used to append to an existing Excel file:
>>> with pd.ExcelWriter('output.xlsx',
... mode='a') as writer: # doctest: +SKIP
... df.to_excel(writer, sheet_name='Sheet_name_3')
To set the library that is used to write the Excel file,
you can pass the `engine` keyword (the default engine is
automatically chosen depending on the file extension):
>>> df1.to_excel('output1.xlsx', engine='xlsxwriter') # doctest: +SKIP
"""
@Appender(_shared_docs["to_excel"] % dict(klass="object"))
def to_excel(
self,
excel_writer,
sheet_name="Sheet1",
na_rep="",
float_format=None,
columns=None,
header=True,
index=True,
index_label=None,
startrow=0,
startcol=0,
engine=None,
merge_cells=True,
encoding=None,
inf_rep="inf",
verbose=True,
freeze_panes=None,
) -> None:
df = self if isinstance(self, ABCDataFrame) else self.to_frame()
from pandas.io.formats.excel import ExcelFormatter
formatter = ExcelFormatter(
df,
na_rep=na_rep,
cols=columns,
header=header,
float_format=float_format,
index=index,
index_label=index_label,
merge_cells=merge_cells,
inf_rep=inf_rep,
)
formatter.write(
excel_writer,
sheet_name=sheet_name,
startrow=startrow,
startcol=startcol,
freeze_panes=freeze_panes,
engine=engine,
)
def to_json(
self,
path_or_buf: Optional[FilePathOrBuffer] = None,
orient: Optional[str] = None,
date_format: Optional[str] = None,
double_precision: int = 10,
force_ascii: bool_t = True,
date_unit: str = "ms",
default_handler: Optional[Callable[[Any], JSONSerializable]] = None,
lines: bool_t = False,
compression: Optional[str] = "infer",
index: bool_t = True,
indent: Optional[int] = None,
) -> Optional[str]:
"""
Convert the object to a JSON string.
Note NaN's and None will be converted to null and datetime objects
will be converted to UNIX timestamps.
Parameters
----------
path_or_buf : str or file handle, optional
File path or object. If not specified, the result is returned as
a string.
orient : str
Indication of expected JSON string format.
* Series:
- default is 'index'
- allowed values are: {'split','records','index','table'}.
* DataFrame:
- default is 'columns'
- allowed values are: {'split', 'records', 'index', 'columns',
'values', 'table'}.
* The format of the JSON string:
- 'split' : dict like {'index' -> [index], 'columns' -> [columns],
'data' -> [values]}
- 'records' : list like [{column -> value}, ... , {column -> value}]
- 'index' : dict like {index -> {column -> value}}
- 'columns' : dict like {column -> {index -> value}}
- 'values' : just the values array
- 'table' : dict like {'schema': {schema}, 'data': {data}}
Describing the data, where data component is like ``orient='records'``.
.. versionchanged:: 0.20.0
date_format : {None, 'epoch', 'iso'}
Type of date conversion. 'epoch' = epoch milliseconds,
'iso' = ISO8601. The default depends on the `orient`. For
``orient='table'``, the default is 'iso'. For all other orients,
the default is 'epoch'.
double_precision : int, default 10
The number of decimal places to use when encoding
floating point values.
force_ascii : bool, default True
Force encoded string to be ASCII.
date_unit : str, default 'ms' (milliseconds)
The time unit to encode to, governs timestamp and ISO8601
precision. One of 's', 'ms', 'us', 'ns' for second, millisecond,
microsecond, and nanosecond respectively.
default_handler : callable, default None
Handler to call if object cannot otherwise be converted to a
suitable format for JSON. Should receive a single argument which is
the object to convert and return a serialisable object.
lines : bool, default False
If 'orient' is 'records' write out line delimited json format. Will
throw ValueError if incorrect 'orient' since others are not list
like.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}
A string representing the compression to use in the output file,
only used when the first argument is a filename. By default, the
compression is inferred from the filename.
.. versionadded:: 0.21.0
.. versionchanged:: 0.24.0
'infer' option added and set to default
index : bool, default True
Whether to include the index values in the JSON string. Not
including the index (``index=False``) is only supported when
orient is 'split' or 'table'.
.. versionadded:: 0.23.0
indent : int, optional
Length of whitespace used to indent each record.
.. versionadded:: 1.0.0
Returns
-------
None or str
If path_or_buf is None, returns the resulting json format as a
string. Otherwise returns None.
See Also
--------
read_json : Convert a JSON string to pandas object.
Notes
-----
The behavior of ``indent=0`` varies from the stdlib, which does not
indent the output but does insert newlines. Currently, ``indent=0``
and the default ``indent=None`` are equivalent in pandas, though this
may change in a future release.
Examples
--------
>>> df = pd.DataFrame([['a', 'b'], ['c', 'd']],
... index=['row 1', 'row 2'],
... columns=['col 1', 'col 2'])
>>> df.to_json(orient='split')
'{"columns":["col 1","col 2"],
"index":["row 1","row 2"],
"data":[["a","b"],["c","d"]]}'
Encoding/decoding a Dataframe using ``'records'`` formatted JSON.
Note that index labels are not preserved with this encoding.
>>> df.to_json(orient='records')
'[{"col 1":"a","col 2":"b"},{"col 1":"c","col 2":"d"}]'
Encoding/decoding a Dataframe using ``'index'`` formatted JSON:
>>> df.to_json(orient='index')
'{"row 1":{"col 1":"a","col 2":"b"},"row 2":{"col 1":"c","col 2":"d"}}'
Encoding/decoding a Dataframe using ``'columns'`` formatted JSON:
>>> df.to_json(orient='columns')
'{"col 1":{"row 1":"a","row 2":"c"},"col 2":{"row 1":"b","row 2":"d"}}'
Encoding/decoding a Dataframe using ``'values'`` formatted JSON:
>>> df.to_json(orient='values')
'[["a","b"],["c","d"]]'
Encoding with Table Schema
>>> df.to_json(orient='table')
'{"schema": {"fields": [{"name": "index", "type": "string"},
{"name": "col 1", "type": "string"},
{"name": "col 2", "type": "string"}],
"primaryKey": "index",
"pandas_version": "0.20.0"},
"data": [{"index": "row 1", "col 1": "a", "col 2": "b"},
{"index": "row 2", "col 1": "c", "col 2": "d"}]}'
"""
from pandas.io import json
if date_format is None and orient == "table":
date_format = "iso"
elif date_format is None:
date_format = "epoch"
config.is_nonnegative_int(indent)
indent = indent or 0
return json.to_json(
path_or_buf=path_or_buf,
obj=self,
orient=orient,
date_format=date_format,
double_precision=double_precision,
force_ascii=force_ascii,
date_unit=date_unit,
default_handler=default_handler,
lines=lines,
compression=compression,
index=index,
indent=indent,
)
def to_hdf(
self,
path_or_buf,
key: str,
mode: str = "a",
complevel: Optional[int] = None,
complib: Optional[str] = None,
append: bool_t = False,
format: Optional[str] = None,
index: bool_t = True,
min_itemsize: Optional[Union[int, Dict[str, int]]] = None,
nan_rep=None,
dropna: Optional[bool_t] = None,
data_columns: Optional[List[str]] = None,
errors: str = "strict",
encoding: str = "UTF-8",
) -> None:
"""
Write the contained data to an HDF5 file using HDFStore.
Hierarchical Data Format (HDF) is self-describing, allowing an
application to interpret the structure and contents of a file with
no outside information. One HDF file can hold a mix of related objects
which can be accessed as a group or as individual objects.
In order to add another DataFrame or Series to an existing HDF file
please use append mode and a different a key.
For more information see the :ref:`user guide <io.hdf5>`.
Parameters
----------
path_or_buf : str or pandas.HDFStore
File path or HDFStore object.
key : str
Identifier for the group in the store.
mode : {'a', 'w', 'r+'}, default 'a'
Mode to open file:
- 'w': write, a new file is created (an existing file with
the same name would be deleted).
- 'a': append, an existing file is opened for reading and
writing, and if the file does not exist it is created.
- 'r+': similar to 'a', but the file must already exist.
complevel : {0-9}, optional
Specifies a compression level for data.
A value of 0 disables compression.
complib : {'zlib', 'lzo', 'bzip2', 'blosc'}, default 'zlib'
Specifies the compression library to be used.
As of v0.20.2 these additional compressors for Blosc are supported
(default if no compressor specified: 'blosc:blosclz'):
{'blosc:blosclz', 'blosc:lz4', 'blosc:lz4hc', 'blosc:snappy',
'blosc:zlib', 'blosc:zstd'}.
Specifying a compression library which is not available issues
a ValueError.
append : bool, default False
For Table formats, append the input data to the existing.
format : {'fixed', 'table', None}, default 'fixed'
Possible values:
- 'fixed': Fixed format. Fast writing/reading. Not-appendable,
nor searchable.
- 'table': Table format. Write as a PyTables Table structure
which may perform worse but allow more flexible operations
like searching / selecting subsets of the data.
- If None, pd.get_option('io.hdf.default_format') is checked,
followed by fallback to "fixed"
errors : str, default 'strict'
Specifies how encoding and decoding errors are to be handled.
See the errors argument for :func:`open` for a full list
of options.
encoding : str, default "UTF-8"
min_itemsize : dict or int, optional
Map column names to minimum string sizes for columns.
nan_rep : Any, optional
How to represent null values as str.
Not allowed with append=True.
data_columns : list of columns or True, optional
List of columns to create as indexed data columns for on-disk
queries, or True to use all columns. By default only the axes
of the object are indexed. See :ref:`io.hdf5-query-data-columns`.
Applicable only to format='table'.
See Also
--------
DataFrame.read_hdf : Read from HDF file.
DataFrame.to_parquet : Write a DataFrame to the binary parquet format.
DataFrame.to_sql : Write to a sql table.
DataFrame.to_feather : Write out feather-format for DataFrames.
DataFrame.to_csv : Write out to a csv file.
Examples
--------
>>> df = pd.DataFrame({'A': [1, 2, 3], 'B': [4, 5, 6]},
... index=['a', 'b', 'c'])
>>> df.to_hdf('data.h5', key='df', mode='w')
We can add another object to the same file:
>>> s = pd.Series([1, 2, 3, 4])
>>> s.to_hdf('data.h5', key='s')
Reading from HDF file:
>>> pd.read_hdf('data.h5', 'df')
A B
a 1 4
b 2 5
c 3 6
>>> pd.read_hdf('data.h5', 's')
0 1
1 2
2 3
3 4
dtype: int64
Deleting file with data:
>>> import os
>>> os.remove('data.h5')
"""
from pandas.io import pytables
pytables.to_hdf(
path_or_buf,
key,
self,
mode=mode,
complevel=complevel,
complib=complib,
append=append,
format=format,
index=index,
min_itemsize=min_itemsize,
nan_rep=nan_rep,
dropna=dropna,
data_columns=data_columns,
errors=errors,
encoding=encoding,
)
def to_sql(
self,
name: str,
con,
schema=None,
if_exists: str = "fail",
index: bool_t = True,
index_label=None,
chunksize=None,
dtype=None,
method=None,
) -> None:
"""
Write records stored in a DataFrame to a SQL database.
Databases supported by SQLAlchemy [1]_ are supported. Tables can be
newly created, appended to, or overwritten.
Parameters
----------
name : str
Name of SQL table.
con : sqlalchemy.engine.Engine or sqlite3.Connection
Using SQLAlchemy makes it possible to use any DB supported by that
library. Legacy support is provided for sqlite3.Connection objects. The user
is responsible for engine disposal and connection closure for the SQLAlchemy
connectable See `here \
<https://docs.sqlalchemy.org/en/13/core/connections.html>`_.
schema : str, optional
Specify the schema (if database flavor supports this). If None, use
default schema.
if_exists : {'fail', 'replace', 'append'}, default 'fail'
How to behave if the table already exists.
* fail: Raise a ValueError.
* replace: Drop the table before inserting new values.
* append: Insert new values to the existing table.
index : bool, default True
Write DataFrame index as a column. Uses `index_label` as the column
name in the table.
index_label : str or sequence, default None
Column label for index column(s). If None is given (default) and
`index` is True, then the index names are used.
A sequence should be given if the DataFrame uses MultiIndex.
chunksize : int, optional
Specify the number of rows in each batch to be written at a time.
By default, all rows will be written at once.
dtype : dict or scalar, optional
Specifying the datatype for columns. If a dictionary is used, the
keys should be the column names and the values should be the
SQLAlchemy types or strings for the sqlite3 legacy mode. If a
scalar is provided, it will be applied to all columns.
method : {None, 'multi', callable}, optional
Controls the SQL insertion clause used:
* None : Uses standard SQL ``INSERT`` clause (one per row).
* 'multi': Pass multiple values in a single ``INSERT`` clause.
* callable with signature ``(pd_table, conn, keys, data_iter)``.
Details and a sample callable implementation can be found in the
section :ref:`insert method <io.sql.method>`.
.. versionadded:: 0.24.0
Raises
------
ValueError
When the table already exists and `if_exists` is 'fail' (the
default).
See Also
--------
read_sql : Read a DataFrame from a table.
Notes
-----
Timezone aware datetime columns will be written as
``Timestamp with timezone`` type with SQLAlchemy if supported by the
database. Otherwise, the datetimes will be stored as timezone unaware
timestamps local to the original timezone.
.. versionadded:: 0.24.0
References
----------
.. [1] https://docs.sqlalchemy.org
.. [2] https://www.python.org/dev/peps/pep-0249/
Examples
--------
Create an in-memory SQLite database.
>>> from sqlalchemy import create_engine
>>> engine = create_engine('sqlite://', echo=False)
Create a table from scratch with 3 rows.
>>> df = pd.DataFrame({'name' : ['User 1', 'User 2', 'User 3']})
>>> df
name
0 User 1
1 User 2
2 User 3
>>> df.to_sql('users', con=engine)
>>> engine.execute("SELECT * FROM users").fetchall()
[(0, 'User 1'), (1, 'User 2'), (2, 'User 3')]
>>> df1 = pd.DataFrame({'name' : ['User 4', 'User 5']})
>>> df1.to_sql('users', con=engine, if_exists='append')
>>> engine.execute("SELECT * FROM users").fetchall()
[(0, 'User 1'), (1, 'User 2'), (2, 'User 3'),
(0, 'User 4'), (1, 'User 5')]
Overwrite the table with just ``df1``.
>>> df1.to_sql('users', con=engine, if_exists='replace',
... index_label='id')
>>> engine.execute("SELECT * FROM users").fetchall()
[(0, 'User 4'), (1, 'User 5')]
Specify the dtype (especially useful for integers with missing values).
Notice that while pandas is forced to store the data as floating point,
the database supports nullable integers. When fetching the data with
Python, we get back integer scalars.
>>> df = pd.DataFrame({"A": [1, None, 2]})
>>> df
A
0 1.0
1 NaN
2 2.0
>>> from sqlalchemy.types import Integer
>>> df.to_sql('integers', con=engine, index=False,
... dtype={"A": Integer()})
>>> engine.execute("SELECT * FROM integers").fetchall()
[(1,), (None,), (2,)]
"""
from pandas.io import sql
sql.to_sql(
self,
name,
con,
schema=schema,
if_exists=if_exists,
index=index,
index_label=index_label,
chunksize=chunksize,
dtype=dtype,
method=method,
)
def to_pickle(
self,
path,
compression: Optional[str] = "infer",
protocol: int = pickle.HIGHEST_PROTOCOL,
) -> None:
"""
Pickle (serialize) object to file.
Parameters
----------
path : str
File path where the pickled object will be stored.
compression : {'infer', 'gzip', 'bz2', 'zip', 'xz', None}, \
default 'infer'
A string representing the compression to use in the output file. By
default, infers from the file extension in specified path.
protocol : int
Int which indicates which protocol should be used by the pickler,
default HIGHEST_PROTOCOL (see [1]_ paragraph 12.1.2). The possible
values are 0, 1, 2, 3, 4. A negative value for the protocol
parameter is equivalent to setting its value to HIGHEST_PROTOCOL.
.. [1] https://docs.python.org/3/library/pickle.html.
.. versionadded:: 0.21.0.
See Also
--------
read_pickle : Load pickled pandas object (or any object) from file.
DataFrame.to_hdf : Write DataFrame to an HDF5 file.
DataFrame.to_sql : Write DataFrame to a SQL database.
DataFrame.to_parquet : Write a DataFrame to the binary parquet format.
Examples
--------
>>> original_df = pd.DataFrame({"foo": range(5), "bar": range(5, 10)})
>>> original_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> original_df.to_pickle("./dummy.pkl")
>>> unpickled_df = pd.read_pickle("./dummy.pkl")
>>> unpickled_df
foo bar
0 0 5
1 1 6
2 2 7
3 3 8
4 4 9
>>> import os
>>> os.remove("./dummy.pkl")
"""
from pandas.io.pickle import to_pickle
to_pickle(self, path, compression=compression, protocol=protocol)
def to_clipboard(
self, excel: bool_t = True, sep: Optional[str] = None, **kwargs
) -> None:
r"""
Copy object to the system clipboard.
Write a text representation of object to the system clipboard.
This can be pasted into Excel, for example.
Parameters
----------
excel : bool, default True
Produce output in a csv format for easy pasting into excel.
- True, use the provided separator for csv pasting.
- False, write a string representation of the object to the clipboard.
sep : str, default ``'\t'``
Field delimiter.
**kwargs
These parameters will be passed to DataFrame.to_csv.
See Also
--------
DataFrame.to_csv : Write a DataFrame to a comma-separated values
(csv) file.
read_clipboard : Read text from clipboard and pass to read_table.
Notes
-----
Requirements for your platform.
- Linux : `xclip`, or `xsel` (with `PyQt4` modules)
- Windows : none
- OS X : none
Examples
--------
Copy the contents of a DataFrame to the clipboard.
>>> df = pd.DataFrame([[1, 2, 3], [4, 5, 6]], columns=['A', 'B', 'C'])
>>> df.to_clipboard(sep=',')
... # Wrote the following to the system clipboard:
... # ,A,B,C
... # 0,1,2,3
... # 1,4,5,6
We can omit the index by passing the keyword `index` and setting
it to false.
>>> df.to_clipboard(sep=',', index=False)
... # Wrote the following to the system clipboard:
... # A,B,C
... # 1,2,3
... # 4,5,6
"""
from pandas.io import clipboards
clipboards.to_clipboard(self, excel=excel, sep=sep, **kwargs)
def to_xarray(self):
"""
Return an xarray object from the pandas object.
Returns
-------
xarray.DataArray or xarray.Dataset
Data in the pandas structure converted to Dataset if the object is
a DataFrame, or a DataArray if the object is a Series.
See Also
--------
DataFrame.to_hdf : Write DataFrame to an HDF5 file.
DataFrame.to_parquet : Write a DataFrame to the binary parquet format.
Notes
-----
See the `xarray docs <https://xarray.pydata.org/en/stable/>`__
Examples
--------
>>> df = pd.DataFrame([('falcon', 'bird', 389.0, 2),
... ('parrot', 'bird', 24.0, 2),
... ('lion', 'mammal', 80.5, 4),
... ('monkey', 'mammal', np.nan, 4)],
... columns=['name', 'class', 'max_speed',
... 'num_legs'])
>>> df
name class max_speed num_legs
0 falcon bird 389.0 2
1 parrot bird 24.0 2
2 lion mammal 80.5 4
3 monkey mammal NaN 4
>>> df.to_xarray()
<xarray.Dataset>
Dimensions: (index: 4)
Coordinates:
* index (index) int64 0 1 2 3
Data variables:
name (index) object 'falcon' 'parrot' 'lion' 'monkey'
class (index) object 'bird' 'bird' 'mammal' 'mammal'
max_speed (index) float64 389.0 24.0 80.5 nan
num_legs (index) int64 2 2 4 4
>>> df['max_speed'].to_xarray()
<xarray.DataArray 'max_speed' (index: 4)>
array([389. , 24. , 80.5, nan])
Coordinates:
* index (index) int64 0 1 2 3
>>> dates = pd.to_datetime(['2018-01-01', '2018-01-01',
... '2018-01-02', '2018-01-02'])
>>> df_multiindex = pd.DataFrame({'date': dates,
... 'animal': ['falcon', 'parrot',
... 'falcon', 'parrot'],
... 'speed': [350, 18, 361, 15]})
>>> df_multiindex = df_multiindex.set_index(['date', 'animal'])
>>> df_multiindex
speed
date animal
2018-01-01 falcon 350
parrot 18
2018-01-02 falcon 361
parrot 15
>>> df_multiindex.to_xarray()
<xarray.Dataset>
Dimensions: (animal: 2, date: 2)
Coordinates:
* date (date) datetime64[ns] 2018-01-01 2018-01-02
* animal (animal) object 'falcon' 'parrot'
Data variables:
speed (date, animal) int64 350 18 361 15
"""
xarray = import_optional_dependency("xarray")
if self.ndim == 1:
return xarray.DataArray.from_series(self)
else:
return xarray.Dataset.from_dataframe(self)
@Substitution(returns=fmt.return_docstring)
def to_latex(
self,
buf=None,
columns=None,
col_space=None,
header=True,
index=True,
na_rep="NaN",
formatters=None,
float_format=None,
sparsify=None,
index_names=True,
bold_rows=False,
column_format=None,
longtable=None,
escape=None,
encoding=None,
decimal=".",
multicolumn=None,
multicolumn_format=None,
multirow=None,
caption=None,
label=None,
):
r"""
Render object to a LaTeX tabular, longtable, or nested table/tabular.
Requires ``\usepackage{booktabs}``. The output can be copy/pasted
into a main LaTeX document or read from an external file
with ``\input{table.tex}``.
.. versionchanged:: 0.20.2
Added to Series.
.. versionchanged:: 1.0.0
Added caption and label arguments.
Parameters
----------
buf : str, Path or StringIO-like, optional, default None
Buffer to write to. If None, the output is returned as a string.
columns : list of label, optional
The subset of columns to write. Writes all columns by default.
col_space : int, optional
The minimum width of each column.
header : bool or list of str, default True
Write out the column names. If a list of strings is given,
it is assumed to be aliases for the column names.
index : bool, default True
Write row names (index).
na_rep : str, default 'NaN'
Missing data representation.
formatters : list of functions or dict of {str: function}, optional
Formatter functions to apply to columns' elements by position or
name. The result of each function must be a unicode string.
List must be of length equal to the number of columns.
float_format : one-parameter function or str, optional, default None
Formatter for floating point numbers. For example
``float_format="%%.2f"`` and ``float_format="{:0.2f}".format`` will
both result in 0.1234 being formatted as 0.12.
sparsify : bool, optional
Set to False for a DataFrame with a hierarchical index to print
every multiindex key at each row. By default, the value will be
read from the config module.
index_names : bool, default True
Prints the names of the indexes.
bold_rows : bool, default False
Make the row labels bold in the output.
column_format : str, optional
The columns format as specified in `LaTeX table format
<https://en.wikibooks.org/wiki/LaTeX/Tables>`__ e.g. 'rcl' for 3
columns. By default, 'l' will be used for all columns except
columns of numbers, which default to 'r'.
longtable : bool, optional
By default, the value will be read from the pandas config
module. Use a longtable environment instead of tabular. Requires
adding a \usepackage{longtable} to your LaTeX preamble.
escape : bool, optional
By default, the value will be read from the pandas config
module. When set to False prevents from escaping latex special
characters in column names.
encoding : str, optional
A string representing the encoding to use in the output file,
defaults to 'utf-8'.
decimal : str, default '.'
Character recognized as decimal separator, e.g. ',' in Europe.
multicolumn : bool, default True
Use \multicolumn to enhance MultiIndex columns.
The default will be read from the config module.
multicolumn_format : str, default 'l'
The alignment for multicolumns, similar to `column_format`
The default will be read from the config module.
multirow : bool, default False
Use \multirow to enhance MultiIndex rows. Requires adding a
\usepackage{multirow} to your LaTeX preamble. Will print
centered labels (instead of top-aligned) across the contained
rows, separating groups via clines. The default will be read
from the pandas config module.
caption : str, optional
The LaTeX caption to be placed inside ``\caption{}`` in the output.
.. versionadded:: 1.0.0
label : str, optional
The LaTeX label to be placed inside ``\label{}`` in the output.
This is used with ``\ref{}`` in the main ``.tex`` file.
.. versionadded:: 1.0.0
%(returns)s
See Also
--------
DataFrame.to_string : Render a DataFrame to a console-friendly
tabular output.
DataFrame.to_html : Render a DataFrame as an HTML table.
Examples
--------
>>> df = pd.DataFrame({'name': ['Raphael', 'Donatello'],
... 'mask': ['red', 'purple'],
... 'weapon': ['sai', 'bo staff']})
>>> print(df.to_latex(index=False)) # doctest: +NORMALIZE_WHITESPACE
\begin{tabular}{lll}
\toprule
name & mask & weapon \\
\midrule
Raphael & red & sai \\
Donatello & purple & bo staff \\
\bottomrule
\end{tabular}
"""
# Get defaults from the pandas config
if self.ndim == 1:
self = self.to_frame()
if longtable is None:
longtable = config.get_option("display.latex.longtable")
if escape is None:
escape = config.get_option("display.latex.escape")
if multicolumn is None:
multicolumn = config.get_option("display.latex.multicolumn")
if multicolumn_format is None:
multicolumn_format = config.get_option("display.latex.multicolumn_format")
if multirow is None:
multirow = config.get_option("display.latex.multirow")
formatter = DataFrameFormatter(
self,
columns=columns,
col_space=col_space,
na_rep=na_rep,
header=header,
index=index,
formatters=formatters,
float_format=float_format,
bold_rows=bold_rows,
sparsify=sparsify,
index_names=index_names,
escape=escape,
decimal=decimal,
)
return formatter.to_latex(
buf=buf,
column_format=column_format,
longtable=longtable,
encoding=encoding,
multicolumn=multicolumn,
multicolumn_format=multicolumn_format,
multirow=multirow,
caption=caption,
label=label,
)
def to_csv(
self,
path_or_buf: Optional[FilePathOrBuffer] = None,
sep: str = ",",
na_rep: str = "",
float_format: Optional[str] = None,
columns: Optional[Sequence[Label]] = None,
header: Union[bool_t, List[str]] = True,
index: bool_t = True,
index_label: Optional[Union[bool_t, str, Sequence[Label]]] = None,
mode: str = "w",
encoding: Optional[str] = None,
compression: Optional[Union[str, Mapping[str, str]]] = "infer",
quoting: Optional[int] = None,
quotechar: str = '"',
line_terminator: Optional[str] = None,
chunksize: Optional[int] = None,
date_format: Optional[str] = None,
doublequote: bool_t = True,
escapechar: Optional[str] = None,
decimal: Optional[str] = ".",
) -> Optional[str]:
r"""
Write object to a comma-separated values (csv) file.
.. versionchanged:: 0.24.0
The order of arguments for Series was changed.
Parameters
----------
path_or_buf : str or file handle, default None
File path or object, if None is provided the result is returned as
a string. If a file object is passed it should be opened with
`newline=''`, disabling universal newlines.
.. versionchanged:: 0.24.0
Was previously named "path" for Series.
sep : str, default ','
String of length 1. Field delimiter for the output file.
na_rep : str, default ''
Missing data representation.
float_format : str, default None
Format string for floating point numbers.
columns : sequence, optional
Columns to write.
header : bool or list of str, default True
Write out the column names. If a list of strings is given it is
assumed to be aliases for the column names.
.. versionchanged:: 0.24.0
Previously defaulted to False for Series.
index : bool, default True
Write row names (index).
index_label : str or sequence, or False, default None
Column label for index column(s) if desired. If None is given, and
`header` and `index` are True, then the index names are used. A
sequence should be given if the object uses MultiIndex. If
False do not print fields for index names. Use index_label=False
for easier importing in R.
mode : str
Python write mode, default 'w'.
encoding : str, optional
A string representing the encoding to use in the output file,
defaults to 'utf-8'.
compression : str or dict, default 'infer'
If str, represents compression mode. If dict, value at 'method' is
the compression mode. Compression mode may be any of the following
possible values: {'infer', 'gzip', 'bz2', 'zip', 'xz', None}. If
compression mode is 'infer' and `path_or_buf` is path-like, then
detect compression mode from the following extensions: '.gz',
'.bz2', '.zip' or '.xz'. (otherwise no compression). If dict given
and mode is 'zip' or inferred as 'zip', other entries passed as
additional compression options.
.. versionchanged:: 1.0.0
May now be a dict with key 'method' as compression mode
and other entries as additional compression options if
compression mode is 'zip'.
quoting : optional constant from csv module
Defaults to csv.QUOTE_MINIMAL. If you have set a `float_format`
then floats are converted to strings and thus csv.QUOTE_NONNUMERIC
will treat them as non-numeric.
quotechar : str, default '\"'
String of length 1. Character used to quote fields.
line_terminator : str, optional
The newline character or character sequence to use in the output
file. Defaults to `os.linesep`, which depends on the OS in which
this method is called ('\n' for linux, '\r\n' for Windows, i.e.).
.. versionchanged:: 0.24.0
chunksize : int or None
Rows to write at a time.
date_format : str, default None
Format string for datetime objects.
doublequote : bool, default True
Control quoting of `quotechar` inside a field.
escapechar : str, default None
String of length 1. Character used to escape `sep` and `quotechar`
when appropriate.
decimal : str, default '.'
Character recognized as decimal separator. E.g. use ',' for
European data.
Returns
-------
None or str
If path_or_buf is None, returns the resulting csv format as a
string. Otherwise returns None.
See Also
--------
read_csv : Load a CSV file into a DataFrame.
to_excel : Write DataFrame to an Excel file.
Examples
--------
>>> df = pd.DataFrame({'name': ['Raphael', 'Donatello'],
... 'mask': ['red', 'purple'],
... 'weapon': ['sai', 'bo staff']})
>>> df.to_csv(index=False)
'name,mask,weapon\nRaphael,red,sai\nDonatello,purple,bo staff\n'
Create 'out.zip' containing 'out.csv'
>>> compression_opts = dict(method='zip',
... archive_name='out.csv') # doctest: +SKIP
>>> df.to_csv('out.zip', index=False,
... compression=compression_opts) # doctest: +SKIP
"""
df = self if isinstance(self, ABCDataFrame) else self.to_frame()
from pandas.io.formats.csvs import CSVFormatter
formatter = CSVFormatter(
df,
path_or_buf,
line_terminator=line_terminator,
sep=sep,
encoding=encoding,
compression=compression,
quoting=quoting,
na_rep=na_rep,
float_format=float_format,
cols=columns,
header=header,
index=index,
index_label=index_label,
mode=mode,
chunksize=chunksize,
quotechar=quotechar,
date_format=date_format,
doublequote=doublequote,
escapechar=escapechar,
decimal=decimal,
)
formatter.save()
if path_or_buf is None:
return formatter.path_or_buf.getvalue()
return None
# ----------------------------------------------------------------------
# Lookup Caching
def _set_as_cached(self, item, cacher) -> None:
"""
Set the _cacher attribute on the calling object with a weakref to
cacher.
"""
self._cacher = (item, weakref.ref(cacher))
def _reset_cacher(self) -> None:
"""
Reset the cacher.
"""
if hasattr(self, "_cacher"):
del self._cacher
def _maybe_cache_changed(self, item, value) -> None:
"""
The object has called back to us saying maybe it has changed.
"""
self._data.set(item, value)
@property
def _is_cached(self) -> bool_t:
"""Return boolean indicating if self is cached or not."""
return getattr(self, "_cacher", None) is not None
def _get_cacher(self):
"""return my cacher or None"""
cacher = getattr(self, "_cacher", None)
if cacher is not None:
cacher = cacher[1]()
return cacher
def _maybe_update_cacher(
self, clear: bool_t = False, verify_is_copy: bool_t = True
) -> None:
"""
See if we need to update our parent cacher if clear, then clear our
cache.
Parameters
----------
clear : bool, default False
Clear the item cache.
verify_is_copy : bool, default True
Provide is_copy checks.
"""
cacher = getattr(self, "_cacher", None)
if cacher is not None:
ref = cacher[1]()
# we are trying to reference a dead referant, hence
# a copy
if ref is None:
del self._cacher
else:
# Note: we need to call ref._maybe_cache_changed even in the
# case where it will raise. (Uh, not clear why)
try:
ref._maybe_cache_changed(cacher[0], self)
except AssertionError:
# ref._data.setitem can raise
# AssertionError because of shape mismatch
pass
if verify_is_copy:
self._check_setitem_copy(stacklevel=5, t="referant")
if clear:
self._clear_item_cache()
def _clear_item_cache(self) -> None:
self._item_cache.clear()
# ----------------------------------------------------------------------
# Indexing Methods
def take(
self: FrameOrSeries, indices, axis=0, is_copy: Optional[bool_t] = None, **kwargs
) -> FrameOrSeries:
"""
Return the elements in the given *positional* indices along an axis.
This means that we are not indexing according to actual values in
the index attribute of the object. We are indexing according to the
actual position of the element in the object.
Parameters
----------
indices : array-like
An array of ints indicating which positions to take.
axis : {0 or 'index', 1 or 'columns', None}, default 0
The axis on which to select elements. ``0`` means that we are
selecting rows, ``1`` means that we are selecting columns.
is_copy : bool
Before pandas 1.0, ``is_copy=False`` can be specified to ensure
that the return value is an actual copy. Starting with pandas 1.0,
``take`` always returns a copy, and the keyword is therefore
deprecated.
.. deprecated:: 1.0.0
**kwargs
For compatibility with :meth:`numpy.take`. Has no effect on the
output.
Returns
-------
taken : same type as caller
An array-like containing the elements taken from the object.
See Also
--------
DataFrame.loc : Select a subset of a DataFrame by labels.
DataFrame.iloc : Select a subset of a DataFrame by positions.
numpy.take : Take elements from an array along an axis.
Examples
--------
>>> df = pd.DataFrame([('falcon', 'bird', 389.0),
... ('parrot', 'bird', 24.0),
... ('lion', 'mammal', 80.5),
... ('monkey', 'mammal', np.nan)],
... columns=['name', 'class', 'max_speed'],
... index=[0, 2, 3, 1])
>>> df
name class max_speed
0 falcon bird 389.0
2 parrot bird 24.0
3 lion mammal 80.5
1 monkey mammal NaN
Take elements at positions 0 and 3 along the axis 0 (default).
Note how the actual indices selected (0 and 1) do not correspond to
our selected indices 0 and 3. That's because we are selecting the 0th
and 3rd rows, not rows whose indices equal 0 and 3.
>>> df.take([0, 3])
name class max_speed
0 falcon bird 389.0
1 monkey mammal NaN
Take elements at indices 1 and 2 along the axis 1 (column selection).
>>> df.take([1, 2], axis=1)
class max_speed
0 bird 389.0
2 bird 24.0
3 mammal 80.5
1 mammal NaN
We may take elements using negative integers for positive indices,
starting from the end of the object, just like with Python lists.
>>> df.take([-1, -2])
name class max_speed
1 monkey mammal NaN
3 lion mammal 80.5
"""
if is_copy is not None:
warnings.warn(
"is_copy is deprecated and will be removed in a future version. "
"'take' always returns a copy, so there is no need to specify this.",
FutureWarning,
stacklevel=2,
)
nv.validate_take(tuple(), kwargs)
self._consolidate_inplace()
new_data = self._data.take(
indices, axis=self._get_block_manager_axis(axis), verify=True
)
return self._constructor(new_data).__finalize__(self)
def _take_with_is_copy(self: FrameOrSeries, indices, axis=0) -> FrameOrSeries:
"""
Internal version of the `take` method that sets the `_is_copy`
attribute to keep track of the parent dataframe (using in indexing
for the SettingWithCopyWarning).
See the docstring of `take` for full explanation of the parameters.
"""
result = self.take(indices=indices, axis=axis)
# Maybe set copy if we didn't actually change the index.
if not result._get_axis(axis).equals(self._get_axis(axis)):
result._set_is_copy(self)
return result
def xs(self, key, axis=0, level=None, drop_level: bool_t = True):
"""
Return cross-section from the Series/DataFrame.
This method takes a `key` argument to select data at a particular
level of a MultiIndex.
Parameters
----------
key : label or tuple of label
Label contained in the index, or partially in a MultiIndex.
axis : {0 or 'index', 1 or 'columns'}, default 0
Axis to retrieve cross-section on.
level : object, defaults to first n levels (n=1 or len(key))
In case of a key partially contained in a MultiIndex, indicate
which levels are used. Levels can be referred by label or position.
drop_level : bool, default True
If False, returns object with same levels as self.
Returns
-------
Series or DataFrame
Cross-section from the original Series or DataFrame
corresponding to the selected index levels.
See Also
--------
DataFrame.loc : Access a group of rows and columns
by label(s) or a boolean array.
DataFrame.iloc : Purely integer-location based indexing
for selection by position.
Notes
-----
`xs` can not be used to set values.
MultiIndex Slicers is a generic way to get/set values on
any level or levels.
It is a superset of `xs` functionality, see
:ref:`MultiIndex Slicers <advanced.mi_slicers>`.
Examples
--------
>>> d = {'num_legs': [4, 4, 2, 2],
... 'num_wings': [0, 0, 2, 2],
... 'class': ['mammal', 'mammal', 'mammal', 'bird'],
... 'animal': ['cat', 'dog', 'bat', 'penguin'],
... 'locomotion': ['walks', 'walks', 'flies', 'walks']}
>>> df = pd.DataFrame(data=d)
>>> df = df.set_index(['class', 'animal', 'locomotion'])
>>> df
num_legs num_wings
class animal locomotion
mammal cat walks 4 0
dog walks 4 0
bat flies 2 2
bird penguin walks 2 2
Get values at specified index
>>> df.xs('mammal')
num_legs num_wings
animal locomotion
cat walks 4 0
dog walks 4 0
bat flies 2 2
Get values at several indexes
>>> df.xs(('mammal', 'dog'))
num_legs num_wings
locomotion
walks 4 0
Get values at specified index and level
>>> df.xs('cat', level=1)
num_legs num_wings
class locomotion
mammal walks 4 0
Get values at several indexes and levels
>>> df.xs(('bird', 'walks'),
... level=[0, 'locomotion'])
num_legs num_wings
animal
penguin 2 2
Get values at specified column and axis
>>> df.xs('num_wings', axis=1)
class animal locomotion
mammal cat walks 0
dog walks 0
bat flies 2
bird penguin walks 2
Name: num_wings, dtype: int64
"""
axis = self._get_axis_number(axis)
labels = self._get_axis(axis)
if level is not None:
loc, new_ax = labels.get_loc_level(key, level=level, drop_level=drop_level)
# create the tuple of the indexer
_indexer = [slice(None)] * self.ndim
_indexer[axis] = loc
indexer = tuple(_indexer)
result = self.iloc[indexer]
setattr(result, result._get_axis_name(axis), new_ax)
return result
if axis == 1:
return self[key]
self._consolidate_inplace()
index = self.index
if isinstance(index, MultiIndex):
loc, new_index = self.index.get_loc_level(key, drop_level=drop_level)
else:
loc = self.index.get_loc(key)
if isinstance(loc, np.ndarray):
if loc.dtype == np.bool_:
(inds,) = loc.nonzero()
return self._take_with_is_copy(inds, axis=axis)
else:
return self._take_with_is_copy(loc, axis=axis)
if not is_scalar(loc):
new_index = self.index[loc]
if is_scalar(loc):
# In this case loc should be an integer
if self.ndim == 1:
# if we encounter an array-like and we only have 1 dim
# that means that their are list/ndarrays inside the Series!
# so just return them (GH 6394)
return self._values[loc]
new_values = self._data.fast_xs(loc)
result = self._constructor_sliced(
new_values,
index=self.columns,
name=self.index[loc],
dtype=new_values.dtype,
)
else:
result = self.iloc[loc]
result.index = new_index
# this could be a view
# but only in a single-dtyped view sliceable case
result._set_is_copy(self, copy=not result._is_view)
return result
_xs: Callable = xs
def __getitem__(self, item):
raise AbstractMethodError(self)
def _get_item_cache(self, item):
"""Return the cached item, item represents a label indexer."""
cache = self._item_cache
res = cache.get(item)
if res is None:
values = self._data.get(item)
res = self._box_item_values(item, values)
cache[item] = res
res._set_as_cached(item, self)
# for a chain
res._is_copy = self._is_copy
return res
def _box_item_values(self, key, values):
raise AbstractMethodError(self)
def _slice(self: FrameOrSeries, slobj: slice, axis=0) -> FrameOrSeries:
"""
Construct a slice of this container.
Slicing with this method is *always* positional.
"""
assert isinstance(slobj, slice), type(slobj)
axis = self._get_block_manager_axis(axis)
result = self._constructor(self._data.get_slice(slobj, axis=axis))
result = result.__finalize__(self)
# this could be a view
# but only in a single-dtyped view sliceable case
is_copy = axis != 0 or result._is_view
result._set_is_copy(self, copy=is_copy)
return result
def _set_item(self, key, value) -> None:
self._data.set(key, value)
self._clear_item_cache()
def _set_is_copy(self, ref, copy: bool_t = True) -> None:
if not copy:
self._is_copy = None
else:
assert ref is not None
self._is_copy = weakref.ref(ref)
def _check_is_chained_assignment_possible(self) -> bool_t:
"""
Check if we are a view, have a cacher, and are of mixed type.
If so, then force a setitem_copy check.
Should be called just near setting a value
Will return a boolean if it we are a view and are cached, but a
single-dtype meaning that the cacher should be updated following
setting.
"""
if self._is_view and self._is_cached:
ref = self._get_cacher()
if ref is not None and ref._is_mixed_type:
self._check_setitem_copy(stacklevel=4, t="referant", force=True)
return True
elif self._is_copy:
self._check_setitem_copy(stacklevel=4, t="referant")
return False
def _check_setitem_copy(self, stacklevel=4, t="setting", force=False):
"""
Parameters
----------
stacklevel : int, default 4
the level to show of the stack when the error is output
t : str, the type of setting error
force : bool, default False
If True, then force showing an error.
validate if we are doing a setitem on a chained copy.
If you call this function, be sure to set the stacklevel such that the
user will see the error *at the level of setting*
It is technically possible to figure out that we are setting on
a copy even WITH a multi-dtyped pandas object. In other words, some
blocks may be views while other are not. Currently _is_view will ALWAYS
return False for multi-blocks to avoid having to handle this case.
df = DataFrame(np.arange(0,9), columns=['count'])
df['group'] = 'b'
# This technically need not raise SettingWithCopy if both are view
# (which is not # generally guaranteed but is usually True. However,
# this is in general not a good practice and we recommend using .loc.
df.iloc[0:5]['group'] = 'a'
"""
# return early if the check is not needed
if not (force or self._is_copy):
return
value = config.get_option("mode.chained_assignment")
if value is None:
return
# see if the copy is not actually referred; if so, then dissolve
# the copy weakref
if self._is_copy is not None and not isinstance(self._is_copy, str):
r = self._is_copy()
if not gc.get_referents(r) or r.shape == self.shape:
self._is_copy = None
return
# a custom message
if isinstance(self._is_copy, str):
t = self._is_copy
elif t == "referant":
t = (
"\n"
"A value is trying to be set on a copy of a slice from a "
"DataFrame\n\n"
"See the caveats in the documentation: "
"https://pandas.pydata.org/pandas-docs/stable/user_guide/"
"indexing.html#returning-a-view-versus-a-copy"
)
else:
t = (
"\n"
"A value is trying to be set on a copy of a slice from a "
"DataFrame.\n"
"Try using .loc[row_indexer,col_indexer] = value "
"instead\n\nSee the caveats in the documentation: "
"https://pandas.pydata.org/pandas-docs/stable/user_guide/"
"indexing.html#returning-a-view-versus-a-copy"
)
if value == "raise":
raise com.SettingWithCopyError(t)
elif value == "warn":
warnings.warn(t, com.SettingWithCopyWarning, stacklevel=stacklevel)
def __delitem__(self, key) -> None:
"""
Delete item
"""
deleted = False
maybe_shortcut = False
if self.ndim == 2 and isinstance(self.columns, MultiIndex):
try:
maybe_shortcut = key not in self.columns._engine
except TypeError:
pass
if maybe_shortcut:
# Allow shorthand to delete all columns whose first len(key)
# elements match key:
if not isinstance(key, tuple):
key = (key,)
for col in self.columns:
if isinstance(col, tuple) and col[: len(key)] == key:
del self[col]
deleted = True
if not deleted:
# If the above loop ran and didn't delete anything because
# there was no match, this call should raise the appropriate
# exception:
self._data.delete(key)
# delete from the caches
try:
del self._item_cache[key]
except KeyError:
pass
# ----------------------------------------------------------------------
# Unsorted
def get(self, key, default=None):
"""
Get item from object for given key (ex: DataFrame column).
Returns default value if not found.
Parameters
----------
key : object
Returns
-------
value : same type as items contained in object
"""
try:
return self[key]
except (KeyError, ValueError, IndexError):
return default
@property
def _is_view(self) -> bool_t:
"""Return boolean indicating if self is view of another array """
return self._data.is_view
def reindex_like(
self: FrameOrSeries,
other,
method: Optional[str] = None,
copy: bool_t = True,
limit=None,
tolerance=None,
) -> FrameOrSeries:
"""
Return an object with matching indices as other object.
Conform the object to the same index on all axes. Optional
filling logic, placing NaN in locations having no value
in the previous index. A new object is produced unless the
new index is equivalent to the current one and copy=False.
Parameters
----------
other : Object of the same data type
Its row and column indices are used to define the new indices
of this object.
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for filling holes in reindexed DataFrame.
Please note: this is only applicable to DataFrames/Series with a
monotonically increasing/decreasing index.
* None (default): don't fill gaps
* pad / ffill: propagate last valid observation forward to next
valid
* backfill / bfill: use next valid observation to fill gap
* nearest: use nearest valid observations to fill gap.
copy : bool, default True
Return a new object, even if the passed indexes are the same.
limit : int, default None
Maximum number of consecutive labels to fill for inexact matches.
tolerance : optional
Maximum distance between original and new labels for inexact
matches. The values of the index at the matching locations most
satisfy the equation ``abs(index[indexer] - target) <= tolerance``.
Tolerance may be a scalar value, which applies the same tolerance
to all values, or list-like, which applies variable tolerance per
element. List-like includes list, tuple, array, Series, and must be
the same size as the index and its dtype must exactly match the
index's type.
.. versionadded:: 0.21.0 (list-like tolerance)
Returns
-------
Series or DataFrame
Same type as caller, but with changed indices on each axis.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex : Change to new indices or expand indices.
Notes
-----
Same as calling
``.reindex(index=other.index, columns=other.columns,...)``.
Examples
--------
>>> df1 = pd.DataFrame([[24.3, 75.7, 'high'],
... [31, 87.8, 'high'],
... [22, 71.6, 'medium'],
... [35, 95, 'medium']],
... columns=['temp_celsius', 'temp_fahrenheit',
... 'windspeed'],
... index=pd.date_range(start='2014-02-12',
... end='2014-02-15', freq='D'))
>>> df1
temp_celsius temp_fahrenheit windspeed
2014-02-12 24.3 75.7 high
2014-02-13 31.0 87.8 high
2014-02-14 22.0 71.6 medium
2014-02-15 35.0 95.0 medium
>>> df2 = pd.DataFrame([[28, 'low'],
... [30, 'low'],
... [35.1, 'medium']],
... columns=['temp_celsius', 'windspeed'],
... index=pd.DatetimeIndex(['2014-02-12', '2014-02-13',
... '2014-02-15']))
>>> df2
temp_celsius windspeed
2014-02-12 28.0 low
2014-02-13 30.0 low
2014-02-15 35.1 medium
>>> df2.reindex_like(df1)
temp_celsius temp_fahrenheit windspeed
2014-02-12 28.0 NaN low
2014-02-13 30.0 NaN low
2014-02-14 NaN NaN NaN
2014-02-15 35.1 NaN medium
"""
d = other._construct_axes_dict(
axes=self._AXIS_ORDERS,
method=method,
copy=copy,
limit=limit,
tolerance=tolerance,
)
return self.reindex(**d)
def drop(
self,
labels=None,
axis=0,
index=None,
columns=None,
level=None,
inplace: bool_t = False,
errors: str = "raise",
):
inplace = validate_bool_kwarg(inplace, "inplace")
if labels is not None:
if index is not None or columns is not None:
raise ValueError("Cannot specify both 'labels' and 'index'/'columns'")
axis_name = self._get_axis_name(axis)
axes = {axis_name: labels}
elif index is not None or columns is not None:
axes, _ = self._construct_axes_from_arguments((index, columns), {})
else:
raise ValueError(
"Need to specify at least one of 'labels', 'index' or 'columns'"
)
obj = self
for axis, labels in axes.items():
if labels is not None:
obj = obj._drop_axis(labels, axis, level=level, errors=errors)
if inplace:
self._update_inplace(obj)
else:
return obj
def _drop_axis(
self: FrameOrSeries, labels, axis, level=None, errors: str = "raise"
) -> FrameOrSeries:
"""
Drop labels from specified axis. Used in the ``drop`` method
internally.
Parameters
----------
labels : single label or list-like
axis : int or axis name
level : int or level name, default None
For MultiIndex
errors : {'ignore', 'raise'}, default 'raise'
If 'ignore', suppress error and existing labels are dropped.
"""
axis = self._get_axis_number(axis)
axis_name = self._get_axis_name(axis)
axis = self._get_axis(axis)
if axis.is_unique:
if level is not None:
if not isinstance(axis, MultiIndex):
raise AssertionError("axis must be a MultiIndex")
new_axis = axis.drop(labels, level=level, errors=errors)
else:
new_axis = axis.drop(labels, errors=errors)
result = self.reindex(**{axis_name: new_axis})
# Case for non-unique axis
else:
labels = ensure_object(com.index_labels_to_array(labels))
if level is not None:
if not isinstance(axis, MultiIndex):
raise AssertionError("axis must be a MultiIndex")
indexer = ~axis.get_level_values(level).isin(labels)
# GH 18561 MultiIndex.drop should raise if label is absent
if errors == "raise" and indexer.all():
raise KeyError(f"{labels} not found in axis")
else:
indexer = ~axis.isin(labels)
# Check if label doesn't exist along axis
labels_missing = (axis.get_indexer_for(labels) == -1).any()
if errors == "raise" and labels_missing:
raise KeyError(f"{labels} not found in axis")
slicer = [slice(None)] * self.ndim
slicer[self._get_axis_number(axis_name)] = indexer
result = self.loc[tuple(slicer)]
return result
def _update_inplace(self, result, verify_is_copy: bool_t = True) -> None:
"""
Replace self internals with result.
Parameters
----------
verify_is_copy : bool, default True
Provide is_copy checks.
"""
# NOTE: This does *not* call __finalize__ and that's an explicit
# decision that we may revisit in the future.
self._reset_cache()
self._clear_item_cache()
self._data = getattr(result, "_data", result)
self._maybe_update_cacher(verify_is_copy=verify_is_copy)
def add_prefix(self: FrameOrSeries, prefix: str) -> FrameOrSeries:
"""
Prefix labels with string `prefix`.
For Series, the row labels are prefixed.
For DataFrame, the column labels are prefixed.
Parameters
----------
prefix : str
The string to add before each label.
Returns
-------
Series or DataFrame
New Series or DataFrame with updated labels.
See Also
--------
Series.add_suffix: Suffix row labels with string `suffix`.
DataFrame.add_suffix: Suffix column labels with string `suffix`.
Examples
--------
>>> s = pd.Series([1, 2, 3, 4])
>>> s
0 1
1 2
2 3
3 4
dtype: int64
>>> s.add_prefix('item_')
item_0 1
item_1 2
item_2 3
item_3 4
dtype: int64
>>> df = pd.DataFrame({'A': [1, 2, 3, 4], 'B': [3, 4, 5, 6]})
>>> df
A B
0 1 3
1 2 4
2 3 5
3 4 6
>>> df.add_prefix('col_')
col_A col_B
0 1 3
1 2 4
2 3 5
3 4 6
"""
f = functools.partial("{prefix}{}".format, prefix=prefix)
mapper = {self._info_axis_name: f}
return self.rename(**mapper) # type: ignore
def add_suffix(self: FrameOrSeries, suffix: str) -> FrameOrSeries:
"""
Suffix labels with string `suffix`.
For Series, the row labels are suffixed.
For DataFrame, the column labels are suffixed.
Parameters
----------
suffix : str
The string to add after each label.
Returns
-------
Series or DataFrame
New Series or DataFrame with updated labels.
See Also
--------
Series.add_prefix: Prefix row labels with string `prefix`.
DataFrame.add_prefix: Prefix column labels with string `prefix`.
Examples
--------
>>> s = pd.Series([1, 2, 3, 4])
>>> s
0 1
1 2
2 3
3 4
dtype: int64
>>> s.add_suffix('_item')
0_item 1
1_item 2
2_item 3
3_item 4
dtype: int64
>>> df = pd.DataFrame({'A': [1, 2, 3, 4], 'B': [3, 4, 5, 6]})
>>> df
A B
0 1 3
1 2 4
2 3 5
3 4 6
>>> df.add_suffix('_col')
A_col B_col
0 1 3
1 2 4
2 3 5
3 4 6
"""
f = functools.partial("{}{suffix}".format, suffix=suffix)
mapper = {self._info_axis_name: f}
return self.rename(**mapper) # type: ignore
def sort_values(
self,
axis=0,
ascending=True,
inplace: bool_t = False,
kind: str = "quicksort",
na_position: str = "last",
ignore_index: bool_t = False,
):
"""
Sort by the values along either axis.
Parameters
----------%(optional_by)s
axis : %(axes_single_arg)s, default 0
Axis to be sorted.
ascending : bool or list of bool, default True
Sort ascending vs. descending. Specify list for multiple sort
orders. If this is a list of bools, must match the length of
the by.
inplace : bool, default False
If True, perform operation in-place.
kind : {'quicksort', 'mergesort', 'heapsort'}, default 'quicksort'
Choice of sorting algorithm. See also ndarray.np.sort for more
information. `mergesort` is the only stable algorithm. For
DataFrames, this option is only applied when sorting on a single
column or label.
na_position : {'first', 'last'}, default 'last'
Puts NaNs at the beginning if `first`; `last` puts NaNs at the
end.
ignore_index : bool, default False
If True, the resulting axis will be labeled 0, 1, …, n - 1.
.. versionadded:: 1.0.0
Returns
-------
sorted_obj : DataFrame or None
DataFrame with sorted values if inplace=False, None otherwise.
Examples
--------
>>> df = pd.DataFrame({
... 'col1': ['A', 'A', 'B', np.nan, 'D', 'C'],
... 'col2': [2, 1, 9, 8, 7, 4],
... 'col3': [0, 1, 9, 4, 2, 3],
... })
>>> df
col1 col2 col3
0 A 2 0
1 A 1 1
2 B 9 9
3 NaN 8 4
4 D 7 2
5 C 4 3
Sort by col1
>>> df.sort_values(by=['col1'])
col1 col2 col3
0 A 2 0
1 A 1 1
2 B 9 9
5 C 4 3
4 D 7 2
3 NaN 8 4
Sort by multiple columns
>>> df.sort_values(by=['col1', 'col2'])
col1 col2 col3
1 A 1 1
0 A 2 0
2 B 9 9
5 C 4 3
4 D 7 2
3 NaN 8 4
Sort Descending
>>> df.sort_values(by='col1', ascending=False)
col1 col2 col3
4 D 7 2
5 C 4 3
2 B 9 9
0 A 2 0
1 A 1 1
3 NaN 8 4
Putting NAs first
>>> df.sort_values(by='col1', ascending=False, na_position='first')
col1 col2 col3
3 NaN 8 4
4 D 7 2
5 C 4 3
2 B 9 9
0 A 2 0
1 A 1 1
"""
raise AbstractMethodError(self)
def reindex(self: FrameOrSeries, *args, **kwargs) -> FrameOrSeries:
"""
Conform %(klass)s to new index with optional filling logic.
Places NA/NaN in locations having no value in the previous index. A new object
is produced unless the new index is equivalent to the current one and
``copy=False``.
Parameters
----------
%(optional_labels)s
%(axes)s : array-like, optional
New labels / index to conform to, should be specified using
keywords. Preferably an Index object to avoid duplicating data.
%(optional_axis)s
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for filling holes in reindexed DataFrame.
Please note: this is only applicable to DataFrames/Series with a
monotonically increasing/decreasing index.
* None (default): don't fill gaps
* pad / ffill: Propagate last valid observation forward to next
valid.
* backfill / bfill: Use next valid observation to fill gap.
* nearest: Use nearest valid observations to fill gap.
copy : bool, default True
Return a new object, even if the passed indexes are the same.
level : int or name
Broadcast across a level, matching Index values on the
passed MultiIndex level.
fill_value : scalar, default np.NaN
Value to use for missing values. Defaults to NaN, but can be any
"compatible" value.
limit : int, default None
Maximum number of consecutive elements to forward or backward fill.
tolerance : optional
Maximum distance between original and new labels for inexact
matches. The values of the index at the matching locations most
satisfy the equation ``abs(index[indexer] - target) <= tolerance``.
Tolerance may be a scalar value, which applies the same tolerance
to all values, or list-like, which applies variable tolerance per
element. List-like includes list, tuple, array, Series, and must be
the same size as the index and its dtype must exactly match the
index's type.
.. versionadded:: 0.21.0 (list-like tolerance)
Returns
-------
%(klass)s with changed index.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex_like : Change to same indices as other DataFrame.
Examples
--------
``DataFrame.reindex`` supports two calling conventions
* ``(index=index_labels, columns=column_labels, ...)``
* ``(labels, axis={'index', 'columns'}, ...)``
We *highly* recommend using keyword arguments to clarify your
intent.
Create a dataframe with some fictional data.
>>> index = ['Firefox', 'Chrome', 'Safari', 'IE10', 'Konqueror']
>>> df = pd.DataFrame({'http_status': [200, 200, 404, 404, 301],
... 'response_time': [0.04, 0.02, 0.07, 0.08, 1.0]},
... index=index)
>>> df
http_status response_time
Firefox 200 0.04
Chrome 200 0.02
Safari 404 0.07
IE10 404 0.08
Konqueror 301 1.00
Create a new index and reindex the dataframe. By default
values in the new index that do not have corresponding
records in the dataframe are assigned ``NaN``.
>>> new_index = ['Safari', 'Iceweasel', 'Comodo Dragon', 'IE10',
... 'Chrome']
>>> df.reindex(new_index)
http_status response_time
Safari 404.0 0.07
Iceweasel NaN NaN
Comodo Dragon NaN NaN
IE10 404.0 0.08
Chrome 200.0 0.02
We can fill in the missing values by passing a value to
the keyword ``fill_value``. Because the index is not monotonically
increasing or decreasing, we cannot use arguments to the keyword
``method`` to fill the ``NaN`` values.
>>> df.reindex(new_index, fill_value=0)
http_status response_time
Safari 404 0.07
Iceweasel 0 0.00
Comodo Dragon 0 0.00
IE10 404 0.08
Chrome 200 0.02
>>> df.reindex(new_index, fill_value='missing')
http_status response_time
Safari 404 0.07
Iceweasel missing missing
Comodo Dragon missing missing
IE10 404 0.08
Chrome 200 0.02
We can also reindex the columns.
>>> df.reindex(columns=['http_status', 'user_agent'])
http_status user_agent
Firefox 200 NaN
Chrome 200 NaN
Safari 404 NaN
IE10 404 NaN
Konqueror 301 NaN
Or we can use "axis-style" keyword arguments
>>> df.reindex(['http_status', 'user_agent'], axis="columns")
http_status user_agent
Firefox 200 NaN
Chrome 200 NaN
Safari 404 NaN
IE10 404 NaN
Konqueror 301 NaN
To further illustrate the filling functionality in
``reindex``, we will create a dataframe with a
monotonically increasing index (for example, a sequence
of dates).
>>> date_index = pd.date_range('1/1/2010', periods=6, freq='D')
>>> df2 = pd.DataFrame({"prices": [100, 101, np.nan, 100, 89, 88]},
... index=date_index)
>>> df2
prices
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
Suppose we decide to expand the dataframe to cover a wider
date range.
>>> date_index2 = pd.date_range('12/29/2009', periods=10, freq='D')
>>> df2.reindex(date_index2)
prices
2009-12-29 NaN
2009-12-30 NaN
2009-12-31 NaN
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
The index entries that did not have a value in the original data frame
(for example, '2009-12-29') are by default filled with ``NaN``.
If desired, we can fill in the missing values using one of several
options.
For example, to back-propagate the last valid value to fill the ``NaN``
values, pass ``bfill`` as an argument to the ``method`` keyword.
>>> df2.reindex(date_index2, method='bfill')
prices
2009-12-29 100.0
2009-12-30 100.0
2009-12-31 100.0
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
Please note that the ``NaN`` value present in the original dataframe
(at index value 2010-01-03) will not be filled by any of the
value propagation schemes. This is because filling while reindexing
does not look at dataframe values, but only compares the original and
desired indexes. If you do want to fill in the ``NaN`` values present
in the original dataframe, use the ``fillna()`` method.
See the :ref:`user guide <basics.reindexing>` for more.
"""
# TODO: Decide if we care about having different examples for different
# kinds
# construct the args
axes, kwargs = self._construct_axes_from_arguments(args, kwargs)
method = missing.clean_reindex_fill_method(kwargs.pop("method", None))
level = kwargs.pop("level", None)
copy = kwargs.pop("copy", True)
limit = kwargs.pop("limit", None)
tolerance = kwargs.pop("tolerance", None)
fill_value = kwargs.pop("fill_value", None)
# Series.reindex doesn't use / need the axis kwarg
# We pop and ignore it here, to make writing Series/Frame generic code
# easier
kwargs.pop("axis", None)
if kwargs:
raise TypeError(
"reindex() got an unexpected keyword "
f'argument "{list(kwargs.keys())[0]}"'
)
self._consolidate_inplace()
# if all axes that are requested to reindex are equal, then only copy
# if indicated must have index names equal here as well as values
if all(
self._get_axis(axis).identical(ax)
for axis, ax in axes.items()
if ax is not None
):
if copy:
return self.copy()
return self
# check if we are a multi reindex
if self._needs_reindex_multi(axes, method, level):
return self._reindex_multi(axes, copy, fill_value)
# perform the reindex on the axes
return self._reindex_axes(
axes, level, limit, tolerance, method, fill_value, copy
).__finalize__(self)
def _reindex_axes(
self: FrameOrSeries, axes, level, limit, tolerance, method, fill_value, copy
) -> FrameOrSeries:
"""Perform the reindex for all the axes."""
obj = self
for a in self._AXIS_ORDERS:
labels = axes[a]
if labels is None:
continue
ax = self._get_axis(a)
new_index, indexer = ax.reindex(
labels, level=level, limit=limit, tolerance=tolerance, method=method
)
axis = self._get_axis_number(a)
obj = obj._reindex_with_indexers(
{axis: [new_index, indexer]},
fill_value=fill_value,
copy=copy,
allow_dups=False,
)
return obj
def _needs_reindex_multi(self, axes, method, level) -> bool_t:
"""Check if we do need a multi reindex."""
return (
(com.count_not_none(*axes.values()) == self._AXIS_LEN)
and method is None
and level is None
and not self._is_mixed_type
)
def _reindex_multi(self, axes, copy, fill_value):
raise AbstractMethodError(self)
def _reindex_with_indexers(
self: FrameOrSeries,
reindexers,
fill_value=None,
copy: bool_t = False,
allow_dups: bool_t = False,
) -> FrameOrSeries:
"""allow_dups indicates an internal call here """
# reindex doing multiple operations on different axes if indicated
new_data = self._data
for axis in sorted(reindexers.keys()):
index, indexer = reindexers[axis]
baxis = self._get_block_manager_axis(axis)
if index is None:
continue
index = ensure_index(index)
if indexer is not None:
indexer = ensure_int64(indexer)
# TODO: speed up on homogeneous DataFrame objects
new_data = new_data.reindex_indexer(
index,
indexer,
axis=baxis,
fill_value=fill_value,
allow_dups=allow_dups,
copy=copy,
)
if copy and new_data is self._data:
new_data = new_data.copy()
return self._constructor(new_data).__finalize__(self)
def filter(
self: FrameOrSeries,
items=None,
like: Optional[str] = None,
regex: Optional[str] = None,
axis=None,
) -> FrameOrSeries:
"""
Subset the dataframe rows or columns according to the specified index labels.
Note that this routine does not filter a dataframe on its
contents. The filter is applied to the labels of the index.
Parameters
----------
items : list-like
Keep labels from axis which are in items.
like : str
Keep labels from axis for which "like in label == True".
regex : str (regular expression)
Keep labels from axis for which re.search(regex, label) == True.
axis : {0 or ‘index’, 1 or ‘columns’, None}, default None
The axis to filter on, expressed either as an index (int)
or axis name (str). By default this is the info axis,
'index' for Series, 'columns' for DataFrame.
Returns
-------
same type as input object
See Also
--------
DataFrame.loc : Access a group of rows and columns
by label(s) or a boolean array.
Notes
-----
The ``items``, ``like``, and ``regex`` parameters are
enforced to be mutually exclusive.
``axis`` defaults to the info axis that is used when indexing
with ``[]``.
Examples
--------
>>> df = pd.DataFrame(np.array(([1, 2, 3], [4, 5, 6])),
... index=['mouse', 'rabbit'],
... columns=['one', 'two', 'three'])
>>> # select columns by name
>>> df.filter(items=['one', 'three'])
one three
mouse 1 3
rabbit 4 6
>>> # select columns by regular expression
>>> df.filter(regex='e$', axis=1)
one three
mouse 1 3
rabbit 4 6
>>> # select rows containing 'bbi'
>>> df.filter(like='bbi', axis=0)
one two three
rabbit 4 5 6
"""
nkw = com.count_not_none(items, like, regex)
if nkw > 1:
raise TypeError(
"Keyword arguments `items`, `like`, or `regex` "
"are mutually exclusive"
)
if axis is None:
axis = self._info_axis_name
labels = self._get_axis(axis)
if items is not None:
name = self._get_axis_name(axis)
return self.reindex(**{name: [r for r in items if r in labels]})
elif like:
def f(x):
return like in ensure_str(x)
values = labels.map(f)
return self.loc(axis=axis)[values]
elif regex:
def f(x):
return matcher.search(ensure_str(x)) is not None
matcher = re.compile(regex)
values = labels.map(f)
return self.loc(axis=axis)[values]
else:
raise TypeError("Must pass either `items`, `like`, or `regex`")
def head(self: FrameOrSeries, n: int = 5) -> FrameOrSeries:
"""
Return the first `n` rows.
This function returns the first `n` rows for the object based
on position. It is useful for quickly testing if your object
has the right type of data in it.
For negative values of `n`, this function returns all rows except
the last `n` rows, equivalent to ``df[:-n]``.
Parameters
----------
n : int, default 5
Number of rows to select.
Returns
-------
same type as caller
The first `n` rows of the caller object.
See Also
--------
DataFrame.tail: Returns the last `n` rows.
Examples
--------
>>> df = pd.DataFrame({'animal': ['alligator', 'bee', 'falcon', 'lion',
... 'monkey', 'parrot', 'shark', 'whale', 'zebra']})
>>> df
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
5 parrot
6 shark
7 whale
8 zebra
Viewing the first 5 lines
>>> df.head()
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
Viewing the first `n` lines (three in this case)
>>> df.head(3)
animal
0 alligator
1 bee
2 falcon
For negative values of `n`
>>> df.head(-3)
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
5 parrot
"""
return self.iloc[:n]
def tail(self: FrameOrSeries, n: int = 5) -> FrameOrSeries:
"""
Return the last `n` rows.
This function returns last `n` rows from the object based on
position. It is useful for quickly verifying data, for example,
after sorting or appending rows.
For negative values of `n`, this function returns all rows except
the first `n` rows, equivalent to ``df[n:]``.
Parameters
----------
n : int, default 5
Number of rows to select.
Returns
-------
type of caller
The last `n` rows of the caller object.
See Also
--------
DataFrame.head : The first `n` rows of the caller object.
Examples
--------
>>> df = pd.DataFrame({'animal': ['alligator', 'bee', 'falcon', 'lion',
... 'monkey', 'parrot', 'shark', 'whale', 'zebra']})
>>> df
animal
0 alligator
1 bee
2 falcon
3 lion
4 monkey
5 parrot
6 shark
7 whale
8 zebra
Viewing the last 5 lines
>>> df.tail()
animal
4 monkey
5 parrot
6 shark
7 whale
8 zebra
Viewing the last `n` lines (three in this case)
>>> df.tail(3)
animal
6 shark
7 whale
8 zebra
For negative values of `n`
>>> df.tail(-3)
animal
3 lion
4 monkey
5 parrot
6 shark
7 whale
8 zebra
"""
if n == 0:
return self.iloc[0:0]
return self.iloc[-n:]
def sample(
self: FrameOrSeries,
n=None,
frac=None,
replace=False,
weights=None,
random_state=None,
axis=None,
) -> FrameOrSeries:
"""
Return a random sample of items from an axis of object.
You can use `random_state` for reproducibility.
Parameters
----------
n : int, optional
Number of items from axis to return. Cannot be used with `frac`.
Default = 1 if `frac` = None.
frac : float, optional
Fraction of axis items to return. Cannot be used with `n`.
replace : bool, default False
Allow or disallow sampling of the same row more than once.
weights : str or ndarray-like, optional
Default 'None' results in equal probability weighting.
If passed a Series, will align with target object on index. Index
values in weights not found in sampled object will be ignored and
index values in sampled object not in weights will be assigned
weights of zero.
If called on a DataFrame, will accept the name of a column
when axis = 0.
Unless weights are a Series, weights must be same length as axis
being sampled.
If weights do not sum to 1, they will be normalized to sum to 1.
Missing values in the weights column will be treated as zero.
Infinite values not allowed.
random_state : int or numpy.random.RandomState, optional
Seed for the random number generator (if int), or numpy RandomState
object.
axis : {0 or ‘index’, 1 or ‘columns’, None}, default None
Axis to sample. Accepts axis number or name. Default is stat axis
for given data type (0 for Series and DataFrames).
Returns
-------
Series or DataFrame
A new object of same type as caller containing `n` items randomly
sampled from the caller object.
See Also
--------
numpy.random.choice: Generates a random sample from a given 1-D numpy
array.
Notes
-----
If `frac` > 1, `replacement` should be set to `True`.
Examples
--------
>>> df = pd.DataFrame({'num_legs': [2, 4, 8, 0],
... 'num_wings': [2, 0, 0, 0],
... 'num_specimen_seen': [10, 2, 1, 8]},
... index=['falcon', 'dog', 'spider', 'fish'])
>>> df
num_legs num_wings num_specimen_seen
falcon 2 2 10
dog 4 0 2
spider 8 0 1
fish 0 0 8
Extract 3 random elements from the ``Series`` ``df['num_legs']``:
Note that we use `random_state` to ensure the reproducibility of
the examples.
>>> df['num_legs'].sample(n=3, random_state=1)
fish 0
spider 8
falcon 2
Name: num_legs, dtype: int64
A random 50% sample of the ``DataFrame`` with replacement:
>>> df.sample(frac=0.5, replace=True, random_state=1)
num_legs num_wings num_specimen_seen
dog 4 0 2
fish 0 0 8
An upsample sample of the ``DataFrame`` with replacement:
Note that `replace` parameter has to be `True` for `frac` parameter > 1.
>>> df.sample(frac=2, replace=True, random_state=1)
num_legs num_wings num_specimen_seen
dog 4 0 2
fish 0 0 8
falcon 2 2 10
falcon 2 2 10
fish 0 0 8
dog 4 0 2
fish 0 0 8
dog 4 0 2
Using a DataFrame column as weights. Rows with larger value in the
`num_specimen_seen` column are more likely to be sampled.
>>> df.sample(n=2, weights='num_specimen_seen', random_state=1)
num_legs num_wings num_specimen_seen
falcon 2 2 10
fish 0 0 8
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
axis_length = self.shape[axis]
# Process random_state argument
rs = com.random_state(random_state)
# Check weights for compliance
if weights is not None:
# If a series, align with frame
if isinstance(weights, ABCSeries):
weights = weights.reindex(self.axes[axis])
# Strings acceptable if a dataframe and axis = 0
if isinstance(weights, str):
if isinstance(self, ABCDataFrame):
if axis == 0:
try:
weights = self[weights]
except KeyError as err:
raise KeyError(
"String passed to weights not a valid column"
) from err
else:
raise ValueError(
"Strings can only be passed to "
"weights when sampling from rows on "
"a DataFrame"
)
else:
raise ValueError(
"Strings cannot be passed as weights "
"when sampling from a Series."
)
weights = pd.Series(weights, dtype="float64")
if len(weights) != axis_length:
raise ValueError(
"Weights and axis to be sampled must be of same length"
)
if (weights == np.inf).any() or (weights == -np.inf).any():
raise ValueError("weight vector may not include `inf` values")
if (weights < 0).any():
raise ValueError("weight vector many not include negative values")
# If has nan, set to zero.
weights = weights.fillna(0)
# Renormalize if don't sum to 1
if weights.sum() != 1:
if weights.sum() != 0:
weights = weights / weights.sum()
else:
raise ValueError("Invalid weights: weights sum to zero")
weights = weights.values
# If no frac or n, default to n=1.
if n is None and frac is None:
n = 1
elif frac is not None and frac > 1 and not replace:
raise ValueError(
"Replace has to be set to `True` when "
"upsampling the population `frac` > 1."
)
elif n is not None and frac is None and n % 1 != 0:
raise ValueError("Only integers accepted as `n` values")
elif n is None and frac is not None:
n = int(round(frac * axis_length))
elif n is not None and frac is not None:
raise ValueError("Please enter a value for `frac` OR `n`, not both")
# Check for negative sizes
if n < 0:
raise ValueError(
"A negative number of rows requested. Please provide positive value."
)
locs = rs.choice(axis_length, size=n, replace=replace, p=weights)
return self.take(locs, axis=axis)
_shared_docs[
"pipe"
] = r"""
Apply func(self, \*args, \*\*kwargs).
Parameters
----------
func : function
Function to apply to the %(klass)s.
``args``, and ``kwargs`` are passed into ``func``.
Alternatively a ``(callable, data_keyword)`` tuple where
``data_keyword`` is a string indicating the keyword of
``callable`` that expects the %(klass)s.
args : iterable, optional
Positional arguments passed into ``func``.
kwargs : mapping, optional
A dictionary of keyword arguments passed into ``func``.
Returns
-------
object : the return type of ``func``.
See Also
--------
DataFrame.apply : Apply a function along input axis of DataFrame.
DataFrame.applymap : Apply a function elementwise on a whole DataFrame.
Series.map : Apply a mapping correspondence on a
:class:`~pandas.Series`.
Notes
-----
Use ``.pipe`` when chaining together functions that expect
Series, DataFrames or GroupBy objects. Instead of writing
>>> f(g(h(df), arg1=a), arg2=b, arg3=c)
You can write
>>> (df.pipe(h)
... .pipe(g, arg1=a)
... .pipe(f, arg2=b, arg3=c)
... )
If you have a function that takes the data as (say) the second
argument, pass a tuple indicating which keyword expects the
data. For example, suppose ``f`` takes its data as ``arg2``:
>>> (df.pipe(h)
... .pipe(g, arg1=a)
... .pipe((f, 'arg2'), arg1=a, arg3=c)
... )
"""
@Appender(_shared_docs["pipe"] % _shared_doc_kwargs)
def pipe(self, func, *args, **kwargs):
return com.pipe(self, func, *args, **kwargs)
_shared_docs["aggregate"] = dedent(
"""
Aggregate using one or more operations over the specified axis.
%(versionadded)s
Parameters
----------
func : function, str, list or dict
Function to use for aggregating the data. If a function, must either
work when passed a %(klass)s or when passed to %(klass)s.apply.
Accepted combinations are:
- function
- string function name
- list of functions and/or function names, e.g. ``[np.sum, 'mean']``
- dict of axis labels -> functions, function names or list of such.
%(axis)s
*args
Positional arguments to pass to `func`.
**kwargs
Keyword arguments to pass to `func`.
Returns
-------
scalar, Series or DataFrame
The return can be:
* scalar : when Series.agg is called with single function
* Series : when DataFrame.agg is called with a single function
* DataFrame : when DataFrame.agg is called with several functions
Return scalar, Series or DataFrame.
%(see_also)s
Notes
-----
`agg` is an alias for `aggregate`. Use the alias.
A passed user-defined-function will be passed a Series for evaluation.
%(examples)s"""
)
_shared_docs[
"transform"
] = """
Call ``func`` on self producing a %(klass)s with transformed values.
Produced %(klass)s will have same axis length as self.
Parameters
----------
func : function, str, list or dict
Function to use for transforming the data. If a function, must either
work when passed a %(klass)s or when passed to %(klass)s.apply.
Accepted combinations are:
- function
- string function name
- list of functions and/or function names, e.g. ``[np.exp. 'sqrt']``
- dict of axis labels -> functions, function names or list of such.
%(axis)s
*args
Positional arguments to pass to `func`.
**kwargs
Keyword arguments to pass to `func`.
Returns
-------
%(klass)s
A %(klass)s that must have the same length as self.
Raises
------
ValueError : If the returned %(klass)s has a different length than self.
See Also
--------
%(klass)s.agg : Only perform aggregating type operations.
%(klass)s.apply : Invoke function on a %(klass)s.
Examples
--------
>>> df = pd.DataFrame({'A': range(3), 'B': range(1, 4)})
>>> df
A B
0 0 1
1 1 2
2 2 3
>>> df.transform(lambda x: x + 1)
A B
0 1 2
1 2 3
2 3 4
Even though the resulting %(klass)s must have the same length as the
input %(klass)s, it is possible to provide several input functions:
>>> s = pd.Series(range(3))
>>> s
0 0
1 1
2 2
dtype: int64
>>> s.transform([np.sqrt, np.exp])
sqrt exp
0 0.000000 1.000000
1 1.000000 2.718282
2 1.414214 7.389056
"""
# ----------------------------------------------------------------------
# Attribute access
def __finalize__(
self: FrameOrSeries, other, method=None, **kwargs
) -> FrameOrSeries:
"""
Propagate metadata from other to self.
Parameters
----------
other : the object from which to get the attributes that we are going
to propagate
method : optional, a passed method name ; possibly to take different
types of propagation actions based on this
"""
if isinstance(other, NDFrame):
for name in other.attrs:
self.attrs[name] = other.attrs[name]
# For subclasses using _metadata.
for name in self._metadata:
assert isinstance(name, str)
object.__setattr__(self, name, getattr(other, name, None))
return self
def __getattr__(self, name: str):
"""
After regular attribute access, try looking up the name
This allows simpler access to columns for interactive use.
"""
# Note: obj.x will always call obj.__getattribute__('x') prior to
# calling obj.__getattr__('x').
if (
name in self._internal_names_set
or name in self._metadata
or name in self._accessors
):
return object.__getattribute__(self, name)
else:
if self._info_axis._can_hold_identifiers_and_holds_name(name):
return self[name]
return object.__getattribute__(self, name)
def __setattr__(self, name: str, value) -> None:
"""
After regular attribute access, try setting the name
This allows simpler access to columns for interactive use.
"""
# first try regular attribute access via __getattribute__, so that
# e.g. ``obj.x`` and ``obj.x = 4`` will always reference/modify
# the same attribute.
try:
object.__getattribute__(self, name)
return object.__setattr__(self, name, value)
except AttributeError:
pass
# if this fails, go on to more involved attribute setting
# (note that this matches __getattr__, above).
if name in self._internal_names_set:
object.__setattr__(self, name, value)
elif name in self._metadata:
object.__setattr__(self, name, value)
else:
try:
existing = getattr(self, name)
if isinstance(existing, Index):
object.__setattr__(self, name, value)
elif name in self._info_axis:
self[name] = value
else:
object.__setattr__(self, name, value)
except (AttributeError, TypeError):
if isinstance(self, ABCDataFrame) and (is_list_like(value)):
warnings.warn(
"Pandas doesn't allow columns to be "
"created via a new attribute name - see "
"https://pandas.pydata.org/pandas-docs/"
"stable/indexing.html#attribute-access",
stacklevel=2,
)
object.__setattr__(self, name, value)
def _dir_additions(self):
"""
add the string-like attributes from the info_axis.
If info_axis is a MultiIndex, it's first level values are used.
"""
additions = {
c
for c in self._info_axis.unique(level=0)[:100]
if isinstance(c, str) and c.isidentifier()
}
return super()._dir_additions().union(additions)
# ----------------------------------------------------------------------
# Consolidation of internals
def _protect_consolidate(self, f):
"""
Consolidate _data -- if the blocks have changed, then clear the
cache
"""
blocks_before = len(self._data.blocks)
result = f()
if len(self._data.blocks) != blocks_before:
self._clear_item_cache()
return result
def _consolidate_inplace(self) -> None:
"""Consolidate data in place and return None"""
def f():
self._data = self._data.consolidate()
self._protect_consolidate(f)
def _consolidate(self, inplace: bool_t = False):
"""
Compute NDFrame with "consolidated" internals (data of each dtype
grouped together in a single ndarray).
Parameters
----------
inplace : bool, default False
If False return new object, otherwise modify existing object.
Returns
-------
consolidated : same type as caller
"""
inplace = validate_bool_kwarg(inplace, "inplace")
if inplace:
self._consolidate_inplace()
else:
f = lambda: self._data.consolidate()
cons_data = self._protect_consolidate(f)
return self._constructor(cons_data).__finalize__(self)
@property
def _is_mixed_type(self) -> bool_t:
f = lambda: self._data.is_mixed_type
return self._protect_consolidate(f)
@property
def _is_numeric_mixed_type(self) -> bool_t:
f = lambda: self._data.is_numeric_mixed_type
return self._protect_consolidate(f)
def _check_inplace_setting(self, value) -> bool_t:
""" check whether we allow in-place setting with this type of value """
if self._is_mixed_type:
if not self._is_numeric_mixed_type:
# allow an actual np.nan thru
if is_float(value) and np.isnan(value):
return True
raise TypeError(
"Cannot do inplace boolean setting on "
"mixed-types with a non np.nan value"
)
return True
def _get_numeric_data(self):
return self._constructor(self._data.get_numeric_data()).__finalize__(self)
def _get_bool_data(self):
return self._constructor(self._data.get_bool_data()).__finalize__(self)
# ----------------------------------------------------------------------
# Internal Interface Methods
@property
def values(self) -> np.ndarray:
"""
Return a Numpy representation of the DataFrame.
.. warning::
We recommend using :meth:`DataFrame.to_numpy` instead.
Only the values in the DataFrame will be returned, the axes labels
will be removed.
Returns
-------
numpy.ndarray
The values of the DataFrame.
See Also
--------
DataFrame.to_numpy : Recommended alternative to this method.
DataFrame.index : Retrieve the index labels.
DataFrame.columns : Retrieving the column names.
Notes
-----
The dtype will be a lower-common-denominator dtype (implicit
upcasting); that is to say if the dtypes (even of numeric types)
are mixed, the one that accommodates all will be chosen. Use this
with care if you are not dealing with the blocks.
e.g. If the dtypes are float16 and float32, dtype will be upcast to
float32. If dtypes are int32 and uint8, dtype will be upcast to
int32. By :func:`numpy.find_common_type` convention, mixing int64
and uint64 will result in a float64 dtype.
Examples
--------
A DataFrame where all columns are the same type (e.g., int64) results
in an array of the same type.
>>> df = pd.DataFrame({'age': [ 3, 29],
... 'height': [94, 170],
... 'weight': [31, 115]})
>>> df
age height weight
0 3 94 31
1 29 170 115
>>> df.dtypes
age int64
height int64
weight int64
dtype: object
>>> df.values
array([[ 3, 94, 31],
[ 29, 170, 115]], dtype=int64)
A DataFrame with mixed type columns(e.g., str/object, int64, float32)
results in an ndarray of the broadest type that accommodates these
mixed types (e.g., object).
>>> df2 = pd.DataFrame([('parrot', 24.0, 'second'),
... ('lion', 80.5, 1),
... ('monkey', np.nan, None)],
... columns=('name', 'max_speed', 'rank'))
>>> df2.dtypes
name object
max_speed float64
rank object
dtype: object
>>> df2.values
array([['parrot', 24.0, 'second'],
['lion', 80.5, 1],
['monkey', nan, None]], dtype=object)
"""
self._consolidate_inplace()
return self._data.as_array(transpose=self._AXIS_REVERSED)
@property
def _values(self) -> np.ndarray:
"""internal implementation"""
return self.values
def _internal_get_values(self) -> np.ndarray:
"""
Return an ndarray after converting sparse values to dense.
This is the same as ``.values`` for non-sparse data. For sparse
data contained in a `SparseArray`, the data are first
converted to a dense representation.
Returns
-------
numpy.ndarray
Numpy representation of DataFrame.
See Also
--------
values : Numpy representation of DataFrame.
SparseArray : Container for sparse data.
"""
return self.values
@property
def dtypes(self):
"""
Return the dtypes in the DataFrame.
This returns a Series with the data type of each column.
The result's index is the original DataFrame's columns. Columns
with mixed types are stored with the ``object`` dtype. See
:ref:`the User Guide <basics.dtypes>` for more.
Returns
-------
pandas.Series
The data type of each column.
Examples
--------
>>> df = pd.DataFrame({'float': [1.0],
... 'int': [1],
... 'datetime': [pd.Timestamp('20180310')],
... 'string': ['foo']})
>>> df.dtypes
float float64
int int64
datetime datetime64[ns]
string object
dtype: object
"""
from pandas import Series
return Series(self._data.get_dtypes(), index=self._info_axis, dtype=np.object_)
def _to_dict_of_blocks(self, copy: bool_t = True):
"""
Return a dict of dtype -> Constructor Types that
each is a homogeneous dtype.
Internal ONLY
"""
return {
k: self._constructor(v).__finalize__(self)
for k, v, in self._data.to_dict(copy=copy).items()
}
def astype(
self: FrameOrSeries, dtype, copy: bool_t = True, errors: str = "raise"
) -> FrameOrSeries:
"""
Cast a pandas object to a specified dtype ``dtype``.
Parameters
----------
dtype : data type, or dict of column name -> data type
Use a numpy.dtype or Python type to cast entire pandas object to
the same type. Alternatively, use {col: dtype, ...}, where col is a
column label and dtype is a numpy.dtype or Python type to cast one
or more of the DataFrame's columns to column-specific types.
copy : bool, default True
Return a copy when ``copy=True`` (be very careful setting
``copy=False`` as changes to values then may propagate to other
pandas objects).
errors : {'raise', 'ignore'}, default 'raise'
Control raising of exceptions on invalid data for provided dtype.
- ``raise`` : allow exceptions to be raised
- ``ignore`` : suppress exceptions. On error return original object.
Returns
-------
casted : same type as caller
See Also
--------
to_datetime : Convert argument to datetime.
to_timedelta : Convert argument to timedelta.
to_numeric : Convert argument to a numeric type.
numpy.ndarray.astype : Cast a numpy array to a specified type.
Examples
--------
Create a DataFrame:
>>> d = {'col1': [1, 2], 'col2': [3, 4]}
>>> df = pd.DataFrame(data=d)
>>> df.dtypes
col1 int64
col2 int64
dtype: object
Cast all columns to int32:
>>> df.astype('int32').dtypes
col1 int32
col2 int32
dtype: object
Cast col1 to int32 using a dictionary:
>>> df.astype({'col1': 'int32'}).dtypes
col1 int32
col2 int64
dtype: object
Create a series:
>>> ser = pd.Series([1, 2], dtype='int32')
>>> ser
0 1
1 2
dtype: int32
>>> ser.astype('int64')
0 1
1 2
dtype: int64
Convert to categorical type:
>>> ser.astype('category')
0 1
1 2
dtype: category
Categories (2, int64): [1, 2]
Convert to ordered categorical type with custom ordering:
>>> cat_dtype = pd.api.types.CategoricalDtype(
... categories=[2, 1], ordered=True)
>>> ser.astype(cat_dtype)
0 1
1 2
dtype: category
Categories (2, int64): [2 < 1]
Note that using ``copy=False`` and changing data on a new
pandas object may propagate changes:
>>> s1 = pd.Series([1, 2])
>>> s2 = s1.astype('int64', copy=False)
>>> s2[0] = 10
>>> s1 # note that s1[0] has changed too
0 10
1 2
dtype: int64
"""
if is_dict_like(dtype):
if self.ndim == 1: # i.e. Series
if len(dtype) > 1 or self.name not in dtype:
raise KeyError(
"Only the Series name can be used for "
"the key in Series dtype mappings."
)
new_type = dtype[self.name]
return self.astype(new_type, copy, errors)
for col_name in dtype.keys():
if col_name not in self:
raise KeyError(
"Only a column name can be used for the "
"key in a dtype mappings argument."
)
results = []
for col_name, col in self.items():
if col_name in dtype:
results.append(
col.astype(dtype=dtype[col_name], copy=copy, errors=errors)
)
else:
results.append(col.copy() if copy else col)
elif is_extension_array_dtype(dtype) and self.ndim > 1:
# GH 18099/22869: columnwise conversion to extension dtype
# GH 24704: use iloc to handle duplicate column names
results = [
self.iloc[:, i].astype(dtype, copy=copy)
for i in range(len(self.columns))
]
else:
# else, only a single dtype is given
new_data = self._data.astype(dtype=dtype, copy=copy, errors=errors)
return self._constructor(new_data).__finalize__(self)
# GH 19920: retain column metadata after concat
result = pd.concat(results, axis=1, copy=False)
result.columns = self.columns
return result
def copy(self: FrameOrSeries, deep: bool_t = True) -> FrameOrSeries:
"""
Make a copy of this object's indices and data.
When ``deep=True`` (default), a new object will be created with a
copy of the calling object's data and indices. Modifications to
the data or indices of the copy will not be reflected in the
original object (see notes below).
When ``deep=False``, a new object will be created without copying
the calling object's data or index (only references to the data
and index are copied). Any changes to the data of the original
will be reflected in the shallow copy (and vice versa).
Parameters
----------
deep : bool, default True
Make a deep copy, including a copy of the data and the indices.
With ``deep=False`` neither the indices nor the data are copied.
Returns
-------
copy : Series or DataFrame
Object type matches caller.
Notes
-----
When ``deep=True``, data is copied but actual Python objects
will not be copied recursively, only the reference to the object.
This is in contrast to `copy.deepcopy` in the Standard Library,
which recursively copies object data (see examples below).
While ``Index`` objects are copied when ``deep=True``, the underlying
numpy array is not copied for performance reasons. Since ``Index`` is
immutable, the underlying data can be safely shared and a copy
is not needed.
Examples
--------
>>> s = pd.Series([1, 2], index=["a", "b"])
>>> s
a 1
b 2
dtype: int64
>>> s_copy = s.copy()
>>> s_copy
a 1
b 2
dtype: int64
**Shallow copy versus default (deep) copy:**
>>> s = pd.Series([1, 2], index=["a", "b"])
>>> deep = s.copy()
>>> shallow = s.copy(deep=False)
Shallow copy shares data and index with original.
>>> s is shallow
False
>>> s.values is shallow.values and s.index is shallow.index
True
Deep copy has own copy of data and index.
>>> s is deep
False
>>> s.values is deep.values or s.index is deep.index
False
Updates to the data shared by shallow copy and original is reflected
in both; deep copy remains unchanged.
>>> s[0] = 3
>>> shallow[1] = 4
>>> s
a 3
b 4
dtype: int64
>>> shallow
a 3
b 4
dtype: int64
>>> deep
a 1
b 2
dtype: int64
Note that when copying an object containing Python objects, a deep copy
will copy the data, but will not do so recursively. Updating a nested
data object will be reflected in the deep copy.
>>> s = pd.Series([[1, 2], [3, 4]])
>>> deep = s.copy()
>>> s[0][0] = 10
>>> s
0 [10, 2]
1 [3, 4]
dtype: object
>>> deep
0 [10, 2]
1 [3, 4]
dtype: object
"""
data = self._data.copy(deep=deep)
return self._constructor(data).__finalize__(self)
def __copy__(self: FrameOrSeries, deep: bool_t = True) -> FrameOrSeries:
return self.copy(deep=deep)
def __deepcopy__(self: FrameOrSeries, memo=None) -> FrameOrSeries:
"""
Parameters
----------
memo, default None
Standard signature. Unused
"""
return self.copy(deep=True)
def _convert(
self: FrameOrSeries,
datetime: bool_t = False,
numeric: bool_t = False,
timedelta: bool_t = False,
coerce: bool_t = False,
copy: bool_t = True,
) -> FrameOrSeries:
"""
Attempt to infer better dtype for object columns
Parameters
----------
datetime : bool, default False
If True, convert to date where possible.
numeric : bool, default False
If True, attempt to convert to numbers (including strings), with
unconvertible values becoming NaN.
timedelta : bool, default False
If True, convert to timedelta where possible.
coerce : bool, default False
If True, force conversion with unconvertible values converted to
nulls (NaN or NaT).
copy : bool, default True
If True, return a copy even if no copy is necessary (e.g. no
conversion was done). Note: This is meant for internal use, and
should not be confused with inplace.
Returns
-------
converted : same as input object
"""
validate_bool_kwarg(datetime, "datetime")
validate_bool_kwarg(numeric, "numeric")
validate_bool_kwarg(timedelta, "timedelta")
validate_bool_kwarg(coerce, "coerce")
validate_bool_kwarg(copy, "copy")
return self._constructor(
self._data.convert(
datetime=datetime,
numeric=numeric,
timedelta=timedelta,
coerce=coerce,
copy=copy,
)
).__finalize__(self)
def infer_objects(self: FrameOrSeries) -> FrameOrSeries:
"""
Attempt to infer better dtypes for object columns.
Attempts soft conversion of object-dtyped
columns, leaving non-object and unconvertible
columns unchanged. The inference rules are the
same as during normal Series/DataFrame construction.
.. versionadded:: 0.21.0
Returns
-------
converted : same type as input object
See Also
--------
to_datetime : Convert argument to datetime.
to_timedelta : Convert argument to timedelta.
to_numeric : Convert argument to numeric type.
convert_dtypes : Convert argument to best possible dtype.
Examples
--------
>>> df = pd.DataFrame({"A": ["a", 1, 2, 3]})
>>> df = df.iloc[1:]
>>> df
A
1 1
2 2
3 3
>>> df.dtypes
A object
dtype: object
>>> df.infer_objects().dtypes
A int64
dtype: object
"""
# numeric=False necessary to only soft convert;
# python objects will still be converted to
# native numpy numeric types
return self._constructor(
self._data.convert(
datetime=True, numeric=False, timedelta=True, coerce=False, copy=True
)
).__finalize__(self)
def convert_dtypes(
self: FrameOrSeries,
infer_objects: bool_t = True,
convert_string: bool_t = True,
|
) -> FrameOrSeries:
"""
Convert columns to best possible dtypes using dtypes supporting ``pd.NA``.
.. versionadded:: 1.0.0
Parameters
----------
infer_objects : bool, default True
Whether object dtypes should be converted to the best possible types.
convert_string : bool, default True
Whether object dtypes should be converted to ``StringDtype()``.
convert_integer : bool, default True
Whether, if possible, conversion can be done to integer extension types.
convert_boolean : bool, defaults True
Whether object dtypes should be converted to ``BooleanDtypes()``.
Returns
-------
Series or DataFrame
Copy of input object with new dtype.
See Also
--------
infer_objects : Infer dtypes of objects.
to_datetime : Convert argument to datetime.
to_timedelta : Convert argument to timedelta.
to_numeric : Convert argument to a numeric type.
Notes
-----
By default, ``convert_dtypes`` will attempt to convert a Series (or each
Series in a DataFrame) to dtypes that support ``pd.NA``. By using the options
``convert_string``, ``convert_integer``, and ``convert_boolean``, it is
possible to turn off individual conversions to ``StringDtype``, the integer
extension types or ``BooleanDtype``, respectively.
For object-dtyped columns, if ``infer_objects`` is ``True``, use the inference
rules as during normal Series/DataFrame construction. Then, if possible,
convert to ``StringDtype``, ``BooleanDtype`` or an appropriate integer extension
type, otherwise leave as ``object``.
If the dtype is integer, convert to an appropriate integer extension type.
If the dtype is numeric, and consists of all integers, convert to an
appropriate integer extension type.
In the future, as new dtypes are added that support ``pd.NA``, the results
of this method will change to support those new dtypes.
Examples
--------
>>> df = pd.DataFrame(
... {
... "a": pd.Series([1, 2, 3], dtype=np.dtype("int32")),
... "b": pd.Series(["x", "y", "z"], dtype=np.dtype("O")),
... "c": pd.Series([True, False, np.nan], dtype=np.dtype("O")),
... "d": pd.Series(["h", "i", np.nan], dtype=np.dtype("O")),
... "e": pd.Series([10, np.nan, 20], dtype=np.dtype("float")),
... "f": pd.Series([np.nan, 100.5, 200], dtype=np.dtype("float")),
... }
... )
Start with a DataFrame with default dtypes.
>>> df
a b c d e f
0 1 x True h 10.0 NaN
1 2 y False i NaN 100.5
2 3 z NaN NaN 20.0 200.0
>>> df.dtypes
a int32
b object
c object
d object
e float64
f float64
dtype: object
Convert the DataFrame to use best possible dtypes.
>>> dfn = df.convert_dtypes()
>>> dfn
a b c d e f
0 1 x True h 10 NaN
1 2 y False i <NA> 100.5
2 3 z <NA> <NA> 20 200.0
>>> dfn.dtypes
a Int32
b string
c boolean
d string
e Int64
f float64
dtype: object
Start with a Series of strings and missing data represented by ``np.nan``.
>>> s = pd.Series(["a", "b", np.nan])
>>> s
0 a
1 b
2 NaN
dtype: object
Obtain a Series with dtype ``StringDtype``.
>>> s.convert_dtypes()
0 a
1 b
2 <NA>
dtype: string
"""
if self.ndim == 1:
return self._convert_dtypes(
infer_objects, convert_string, convert_integer, convert_boolean
)
else:
results = [
col._convert_dtypes(
infer_objects, convert_string, convert_integer, convert_boolean
)
for col_name, col in self.items()
]
result = pd.concat(results, axis=1, copy=False)
return result
# ----------------------------------------------------------------------
# Filling NA's
@doc(**_shared_doc_kwargs)
def fillna(
self: FrameOrSeries,
value=None,
method=None,
axis=None,
inplace: bool_t = False,
limit=None,
downcast=None,
) -> Optional[FrameOrSeries]:
"""
Fill NA/NaN values using the specified method.
Parameters
----------
value : scalar, dict, Series, or DataFrame
Value to use to fill holes (e.g. 0), alternately a
dict/Series/DataFrame of values specifying which value to use for
each index (for a Series) or column (for a DataFrame). Values not
in the dict/Series/DataFrame will not be filled. This value cannot
be a list.
method : {{'backfill', 'bfill', 'pad', 'ffill', None}}, default None
Method to use for filling holes in reindexed Series
pad / ffill: propagate last valid observation forward to next valid
backfill / bfill: use next valid observation to fill gap.
axis : {axes_single_arg}
Axis along which to fill missing values.
inplace : bool, default False
If True, fill in-place. Note: this will modify any
other views on this object (e.g., a no-copy slice for a column in a
DataFrame).
limit : int, default None
If method is specified, this is the maximum number of consecutive
NaN values to forward/backward fill. In other words, if there is
a gap with more than this number of consecutive NaNs, it will only
be partially filled. If method is not specified, this is the
maximum number of entries along the entire axis where NaNs will be
filled. Must be greater than 0 if not None.
downcast : dict, default is None
A dict of item->dtype of what to downcast if possible,
or the string 'infer' which will try to downcast to an appropriate
equal type (e.g. float64 to int64 if possible).
Returns
-------
{klass} or None
Object with missing values filled or None if ``inplace=True``.
See Also
--------
interpolate : Fill NaN values using interpolation.
reindex : Conform object to new index.
asfreq : Convert TimeSeries to specified frequency.
Examples
--------
>>> df = pd.DataFrame([[np.nan, 2, np.nan, 0],
... [3, 4, np.nan, 1],
... [np.nan, np.nan, np.nan, 5],
... [np.nan, 3, np.nan, 4]],
... columns=list('ABCD'))
>>> df
A B C D
0 NaN 2.0 NaN 0
1 3.0 4.0 NaN 1
2 NaN NaN NaN 5
3 NaN 3.0 NaN 4
Replace all NaN elements with 0s.
>>> df.fillna(0)
A B C D
0 0.0 2.0 0.0 0
1 3.0 4.0 0.0 1
2 0.0 0.0 0.0 5
3 0.0 3.0 0.0 4
We can also propagate non-null values forward or backward.
>>> df.fillna(method='ffill')
A B C D
0 NaN 2.0 NaN 0
1 3.0 4.0 NaN 1
2 3.0 4.0 NaN 5
3 3.0 3.0 NaN 4
Replace all NaN elements in column 'A', 'B', 'C', and 'D', with 0, 1,
2, and 3 respectively.
>>> values = {{'A': 0, 'B': 1, 'C': 2, 'D': 3}}
>>> df.fillna(value=values)
A B C D
0 0.0 2.0 2.0 0
1 3.0 4.0 2.0 1
2 0.0 1.0 2.0 5
3 0.0 3.0 2.0 4
Only replace the first NaN element.
>>> df.fillna(value=values, limit=1)
A B C D
0 0.0 2.0 2.0 0
1 3.0 4.0 NaN 1
2 NaN 1.0 NaN 5
3 NaN 3.0 NaN 4
"""
inplace = validate_bool_kwarg(inplace, "inplace")
value, method = validate_fillna_kwargs(value, method)
self._consolidate_inplace()
# set the default here, so functions examining the signaure
# can detect if something was set (e.g. in groupby) (GH9221)
if axis is None:
axis = 0
axis = self._get_axis_number(axis)
if value is None:
if self._is_mixed_type and axis == 1:
if inplace:
raise NotImplementedError()
result = self.T.fillna(method=method, limit=limit).T
# need to downcast here because of all of the transposes
result._data = result._data.downcast()
return result
new_data = self._data.interpolate(
method=method,
axis=axis,
limit=limit,
inplace=inplace,
coerce=True,
downcast=downcast,
)
else:
if len(self._get_axis(axis)) == 0:
return self
if self.ndim == 1:
if isinstance(value, (dict, ABCSeries)):
value = create_series_with_explicit_dtype(
value, dtype_if_empty=object
)
elif not is_list_like(value):
pass
else:
raise TypeError(
'"value" parameter must be a scalar, dict '
"or Series, but you passed a "
f'"{type(value).__name__}"'
)
new_data = self._data.fillna(
value=value, limit=limit, inplace=inplace, downcast=downcast
)
elif isinstance(value, (dict, ABCSeries)):
if axis == 1:
raise NotImplementedError(
"Currently only can fill "
"with dict/Series column "
"by column"
)
result = self if inplace else self.copy()
for k, v in value.items():
if k not in result:
continue
obj = result[k]
obj.fillna(v, limit=limit, inplace=True, downcast=downcast)
return result if not inplace else None
elif not is_list_like(value):
new_data = self._data.fillna(
value=value, limit=limit, inplace=inplace, downcast=downcast
)
elif isinstance(value, ABCDataFrame) and self.ndim == 2:
new_data = self.where(self.notna(), value)
else:
raise ValueError(f"invalid fill value with a {type(value)}")
if inplace:
self._update_inplace(new_data)
return None
else:
return self._constructor(new_data).__finalize__(self)
def ffill(
self: FrameOrSeries,
axis=None,
inplace: bool_t = False,
limit=None,
downcast=None,
) -> Optional[FrameOrSeries]:
"""
Synonym for :meth:`DataFrame.fillna` with ``method='ffill'``.
Returns
-------
%(klass)s or None
Object with missing values filled or None if ``inplace=True``.
"""
return self.fillna(
method="ffill", axis=axis, inplace=inplace, limit=limit, downcast=downcast
)
def bfill(
self: FrameOrSeries,
axis=None,
inplace: bool_t = False,
limit=None,
downcast=None,
) -> Optional[FrameOrSeries]:
"""
Synonym for :meth:`DataFrame.fillna` with ``method='bfill'``.
Returns
-------
%(klass)s or None
Object with missing values filled or None if ``inplace=True``.
"""
return self.fillna(
method="bfill", axis=axis, inplace=inplace, limit=limit, downcast=downcast
)
_shared_docs[
"replace"
] = """
Replace values given in `to_replace` with `value`.
Values of the %(klass)s are replaced with other values dynamically.
This differs from updating with ``.loc`` or ``.iloc``, which require
you to specify a location to update with some value.
Parameters
----------
to_replace : str, regex, list, dict, Series, int, float, or None
How to find the values that will be replaced.
* numeric, str or regex:
- numeric: numeric values equal to `to_replace` will be
replaced with `value`
- str: string exactly matching `to_replace` will be replaced
with `value`
- regex: regexs matching `to_replace` will be replaced with
`value`
* list of str, regex, or numeric:
- First, if `to_replace` and `value` are both lists, they
**must** be the same length.
- Second, if ``regex=True`` then all of the strings in **both**
lists will be interpreted as regexs otherwise they will match
directly. This doesn't matter much for `value` since there
are only a few possible substitution regexes you can use.
- str, regex and numeric rules apply as above.
* dict:
- Dicts can be used to specify different replacement values
for different existing values. For example,
``{'a': 'b', 'y': 'z'}`` replaces the value 'a' with 'b' and
'y' with 'z'. To use a dict in this way the `value`
parameter should be `None`.
- For a DataFrame a dict can specify that different values
should be replaced in different columns. For example,
``{'a': 1, 'b': 'z'}`` looks for the value 1 in column 'a'
and the value 'z' in column 'b' and replaces these values
with whatever is specified in `value`. The `value` parameter
should not be ``None`` in this case. You can treat this as a
special case of passing two lists except that you are
specifying the column to search in.
- For a DataFrame nested dictionaries, e.g.,
``{'a': {'b': np.nan}}``, are read as follows: look in column
'a' for the value 'b' and replace it with NaN. The `value`
parameter should be ``None`` to use a nested dict in this
way. You can nest regular expressions as well. Note that
column names (the top-level dictionary keys in a nested
dictionary) **cannot** be regular expressions.
* None:
- This means that the `regex` argument must be a string,
compiled regular expression, or list, dict, ndarray or
Series of such elements. If `value` is also ``None`` then
this **must** be a nested dictionary or Series.
See the examples section for examples of each of these.
value : scalar, dict, list, str, regex, default None
Value to replace any values matching `to_replace` with.
For a DataFrame a dict of values can be used to specify which
value to use for each column (columns not in the dict will not be
filled). Regular expressions, strings and lists or dicts of such
objects are also allowed.
inplace : bool, default False
If True, in place. Note: this will modify any
other views on this object (e.g. a column from a DataFrame).
Returns the caller if this is True.
limit : int, default None
Maximum size gap to forward or backward fill.
regex : bool or same types as `to_replace`, default False
Whether to interpret `to_replace` and/or `value` as regular
expressions. If this is ``True`` then `to_replace` *must* be a
string. Alternatively, this could be a regular expression or a
list, dict, or array of regular expressions in which case
`to_replace` must be ``None``.
method : {'pad', 'ffill', 'bfill', `None`}
The method to use when for replacement, when `to_replace` is a
scalar, list or tuple and `value` is ``None``.
.. versionchanged:: 0.23.0
Added to DataFrame.
Returns
-------
%(klass)s
Object after replacement.
Raises
------
AssertionError
* If `regex` is not a ``bool`` and `to_replace` is not
``None``.
TypeError
* If `to_replace` is not a scalar, array-like, ``dict``, or ``None``
* If `to_replace` is a ``dict`` and `value` is not a ``list``,
``dict``, ``ndarray``, or ``Series``
* If `to_replace` is ``None`` and `regex` is not compilable
into a regular expression or is a list, dict, ndarray, or
Series.
* When replacing multiple ``bool`` or ``datetime64`` objects and
the arguments to `to_replace` does not match the type of the
value being replaced
ValueError
* If a ``list`` or an ``ndarray`` is passed to `to_replace` and
`value` but they are not the same length.
See Also
--------
%(klass)s.fillna : Fill NA values.
%(klass)s.where : Replace values based on boolean condition.
Series.str.replace : Simple string replacement.
Notes
-----
* Regex substitution is performed under the hood with ``re.sub``. The
rules for substitution for ``re.sub`` are the same.
* Regular expressions will only substitute on strings, meaning you
cannot provide, for example, a regular expression matching floating
point numbers and expect the columns in your frame that have a
numeric dtype to be matched. However, if those floating point
numbers *are* strings, then you can do this.
* This method has *a lot* of options. You are encouraged to experiment
and play with this method to gain intuition about how it works.
* When dict is used as the `to_replace` value, it is like
key(s) in the dict are the to_replace part and
value(s) in the dict are the value parameter.
Examples
--------
**Scalar `to_replace` and `value`**
>>> s = pd.Series([0, 1, 2, 3, 4])
>>> s.replace(0, 5)
0 5
1 1
2 2
3 3
4 4
dtype: int64
>>> df = pd.DataFrame({'A': [0, 1, 2, 3, 4],
... 'B': [5, 6, 7, 8, 9],
... 'C': ['a', 'b', 'c', 'd', 'e']})
>>> df.replace(0, 5)
A B C
0 5 5 a
1 1 6 b
2 2 7 c
3 3 8 d
4 4 9 e
**List-like `to_replace`**
>>> df.replace([0, 1, 2, 3], 4)
A B C
0 4 5 a
1 4 6 b
2 4 7 c
3 4 8 d
4 4 9 e
>>> df.replace([0, 1, 2, 3], [4, 3, 2, 1])
A B C
0 4 5 a
1 3 6 b
2 2 7 c
3 1 8 d
4 4 9 e
>>> s.replace([1, 2], method='bfill')
0 0
1 3
2 3
3 3
4 4
dtype: int64
**dict-like `to_replace`**
>>> df.replace({0: 10, 1: 100})
A B C
0 10 5 a
1 100 6 b
2 2 7 c
3 3 8 d
4 4 9 e
>>> df.replace({'A': 0, 'B': 5}, 100)
A B C
0 100 100 a
1 1 6 b
2 2 7 c
3 3 8 d
4 4 9 e
>>> df.replace({'A': {0: 100, 4: 400}})
A B C
0 100 5 a
1 1 6 b
2 2 7 c
3 3 8 d
4 400 9 e
**Regular expression `to_replace`**
>>> df = pd.DataFrame({'A': ['bat', 'foo', 'bait'],
... 'B': ['abc', 'bar', 'xyz']})
>>> df.replace(to_replace=r'^ba.$', value='new', regex=True)
A B
0 new abc
1 foo new
2 bait xyz
>>> df.replace({'A': r'^ba.$'}, {'A': 'new'}, regex=True)
A B
0 new abc
1 foo bar
2 bait xyz
>>> df.replace(regex=r'^ba.$', value='new')
A B
0 new abc
1 foo new
2 bait xyz
>>> df.replace(regex={r'^ba.$': 'new', 'foo': 'xyz'})
A B
0 new abc
1 xyz new
2 bait xyz
>>> df.replace(regex=[r'^ba.$', 'foo'], value='new')
A B
0 new abc
1 new new
2 bait xyz
Note that when replacing multiple ``bool`` or ``datetime64`` objects,
the data types in the `to_replace` parameter must match the data
type of the value being replaced:
>>> df = pd.DataFrame({'A': [True, False, True],
... 'B': [False, True, False]})
>>> df.replace({'a string': 'new value', True: False}) # raises
Traceback (most recent call last):
...
TypeError: Cannot compare types 'ndarray(dtype=bool)' and 'str'
This raises a ``TypeError`` because one of the ``dict`` keys is not of
the correct type for replacement.
Compare the behavior of ``s.replace({'a': None})`` and
``s.replace('a', None)`` to understand the peculiarities
of the `to_replace` parameter:
>>> s = pd.Series([10, 'a', 'a', 'b', 'a'])
When one uses a dict as the `to_replace` value, it is like the
value(s) in the dict are equal to the `value` parameter.
``s.replace({'a': None})`` is equivalent to
``s.replace(to_replace={'a': None}, value=None, method=None)``:
>>> s.replace({'a': None})
0 10
1 None
2 None
3 b
4 None
dtype: object
When ``value=None`` and `to_replace` is a scalar, list or
tuple, `replace` uses the method parameter (default 'pad') to do the
replacement. So this is why the 'a' values are being replaced by 10
in rows 1 and 2 and 'b' in row 4 in this case.
The command ``s.replace('a', None)`` is actually equivalent to
``s.replace(to_replace='a', value=None, method='pad')``:
>>> s.replace('a', None)
0 10
1 10
2 10
3 b
4 b
dtype: object
"""
@Appender(_shared_docs["replace"] % _shared_doc_kwargs)
def replace(
self,
to_replace=None,
value=None,
inplace=False,
limit=None,
regex=False,
method="pad",
):
if not (
is_scalar(to_replace)
or isinstance(to_replace, pd.Series)
or is_re_compilable(to_replace)
or is_list_like(to_replace)
):
raise TypeError(
"Expecting 'to_replace' to be either a scalar, array-like, "
"dict or None, got invalid type "
f"{repr(type(to_replace).__name__)}"
)
inplace = validate_bool_kwarg(inplace, "inplace")
if not is_bool(regex) and to_replace is not None:
raise AssertionError("'to_replace' must be 'None' if 'regex' is not a bool")
self._consolidate_inplace()
if value is None:
# passing a single value that is scalar like
# when value is None (GH5319), for compat
if not is_dict_like(to_replace) and not is_dict_like(regex):
to_replace = [to_replace]
if isinstance(to_replace, (tuple, list)):
if isinstance(self, ABCDataFrame):
return self.apply(
_single_replace, args=(to_replace, method, inplace, limit)
)
return _single_replace(self, to_replace, method, inplace, limit)
if not is_dict_like(to_replace):
if not is_dict_like(regex):
raise TypeError(
'If "to_replace" and "value" are both None '
'and "to_replace" is not a list, then '
"regex must be a mapping"
)
to_replace = regex
regex = True
items = list(to_replace.items())
keys, values = zip(*items) if items else ([], [])
are_mappings = [is_dict_like(v) for v in values]
if any(are_mappings):
if not all(are_mappings):
raise TypeError(
"If a nested mapping is passed, all values "
"of the top level mapping must be mappings"
)
# passed a nested dict/Series
to_rep_dict = {}
value_dict = {}
for k, v in items:
keys, values = list(zip(*v.items())) or ([], [])
to_rep_dict[k] = list(keys)
value_dict[k] = list(values)
to_replace, value = to_rep_dict, value_dict
else:
to_replace, value = keys, values
return self.replace(
to_replace, value, inplace=inplace, limit=limit, regex=regex
)
else:
# need a non-zero len on all axes
if not self.size:
return self
new_data = self._data
if is_dict_like(to_replace):
if is_dict_like(value): # {'A' : NA} -> {'A' : 0}
res = self if inplace else self.copy()
for c, src in to_replace.items():
if c in value and c in self:
# object conversion is handled in
# series.replace which is called recursively
res[c] = res[c].replace(
to_replace=src,
value=value[c],
inplace=False,
regex=regex,
)
return None if inplace else res
# {'A': NA} -> 0
elif not is_list_like(value):
keys = [(k, src) for k, src in to_replace.items() if k in self]
keys_len = len(keys) - 1
for i, (k, src) in enumerate(keys):
convert = i == keys_len
new_data = new_data.replace(
to_replace=src,
value=value,
filter=[k],
inplace=inplace,
regex=regex,
convert=convert,
)
else:
raise TypeError("value argument must be scalar, dict, or Series")
elif is_list_like(to_replace): # [NA, ''] -> [0, 'missing']
if is_list_like(value):
if len(to_replace) != len(value):
raise ValueError(
f"Replacement lists must match in length. "
f"Expecting {len(to_replace)} got {len(value)} "
)
new_data = self._data.replace_list(
src_list=to_replace,
dest_list=value,
inplace=inplace,
regex=regex,
)
else: # [NA, ''] -> 0
new_data = self._data.replace(
to_replace=to_replace, value=value, inplace=inplace, regex=regex
)
elif to_replace is None:
if not (
is_re_compilable(regex)
or is_list_like(regex)
or is_dict_like(regex)
):
raise TypeError(
f"'regex' must be a string or a compiled regular expression "
f"or a list or dict of strings or regular expressions, "
f"you passed a {repr(type(regex).__name__)}"
)
return self.replace(
regex, value, inplace=inplace, limit=limit, regex=True
)
else:
# dest iterable dict-like
if is_dict_like(value): # NA -> {'A' : 0, 'B' : -1}
new_data = self._data
for k, v in value.items():
if k in self:
new_data = new_data.replace(
to_replace=to_replace,
value=v,
filter=[k],
inplace=inplace,
regex=regex,
)
elif not is_list_like(value): # NA -> 0
new_data = self._data.replace(
to_replace=to_replace, value=value, inplace=inplace, regex=regex
)
else:
raise TypeError(
f'Invalid "to_replace" type: {repr(type(to_replace).__name__)}'
)
if inplace:
self._update_inplace(new_data)
else:
return self._constructor(new_data).__finalize__(self)
_shared_docs[
"interpolate"
] = """
Please note that only ``method='linear'`` is supported for
DataFrame/Series with a MultiIndex.
Parameters
----------
method : str, default 'linear'
Interpolation technique to use. One of:
* 'linear': Ignore the index and treat the values as equally
spaced. This is the only method supported on MultiIndexes.
* 'time': Works on daily and higher resolution data to interpolate
given length of interval.
* 'index', 'values': use the actual numerical values of the index.
* 'pad': Fill in NaNs using existing values.
* 'nearest', 'zero', 'slinear', 'quadratic', 'cubic', 'spline',
'barycentric', 'polynomial': Passed to
`scipy.interpolate.interp1d`. These methods use the numerical
values of the index. Both 'polynomial' and 'spline' require that
you also specify an `order` (int), e.g.
``df.interpolate(method='polynomial', order=5)``.
* 'krogh', 'piecewise_polynomial', 'spline', 'pchip', 'akima':
Wrappers around the SciPy interpolation methods of similar
names. See `Notes`.
* 'from_derivatives': Refers to
`scipy.interpolate.BPoly.from_derivatives` which
replaces 'piecewise_polynomial' interpolation method in
scipy 0.18.
axis : {0 or 'index', 1 or 'columns', None}, default None
Axis to interpolate along.
limit : int, optional
Maximum number of consecutive NaNs to fill. Must be greater than
0.
inplace : bool, default False
Update the data in place if possible.
limit_direction : {'forward', 'backward', 'both'}, default 'forward'
If limit is specified, consecutive NaNs will be filled in this
direction.
limit_area : {`None`, 'inside', 'outside'}, default None
If limit is specified, consecutive NaNs will be filled with this
restriction.
* ``None``: No fill restriction.
* 'inside': Only fill NaNs surrounded by valid values
(interpolate).
* 'outside': Only fill NaNs outside valid values (extrapolate).
.. versionadded:: 0.23.0
downcast : optional, 'infer' or None, defaults to None
Downcast dtypes if possible.
**kwargs
Keyword arguments to pass on to the interpolating function.
Returns
-------
Series or DataFrame
Returns the same object type as the caller, interpolated at
some or all ``NaN`` values.
See Also
--------
fillna : Fill missing values using different methods.
scipy.interpolate.Akima1DInterpolator : Piecewise cubic polynomials
(Akima interpolator).
scipy.interpolate.BPoly.from_derivatives : Piecewise polynomial in the
Bernstein basis.
scipy.interpolate.interp1d : Interpolate a 1-D function.
scipy.interpolate.KroghInterpolator : Interpolate polynomial (Krogh
interpolator).
scipy.interpolate.PchipInterpolator : PCHIP 1-d monotonic cubic
interpolation.
scipy.interpolate.CubicSpline : Cubic spline data interpolator.
Notes
-----
The 'krogh', 'piecewise_polynomial', 'spline', 'pchip' and 'akima'
methods are wrappers around the respective SciPy implementations of
similar names. These use the actual numerical values of the index.
For more information on their behavior, see the
`SciPy documentation
<https://docs.scipy.org/doc/scipy/reference/interpolate.html#univariate-interpolation>`__
and `SciPy tutorial
<https://docs.scipy.org/doc/scipy/reference/tutorial/interpolate.html>`__.
Examples
--------
Filling in ``NaN`` in a :class:`~pandas.Series` via linear
interpolation.
>>> s = pd.Series([0, 1, np.nan, 3])
>>> s
0 0.0
1 1.0
2 NaN
3 3.0
dtype: float64
>>> s.interpolate()
0 0.0
1 1.0
2 2.0
3 3.0
dtype: float64
Filling in ``NaN`` in a Series by padding, but filling at most two
consecutive ``NaN`` at a time.
>>> s = pd.Series([np.nan, "single_one", np.nan,
... "fill_two_more", np.nan, np.nan, np.nan,
... 4.71, np.nan])
>>> s
0 NaN
1 single_one
2 NaN
3 fill_two_more
4 NaN
5 NaN
6 NaN
7 4.71
8 NaN
dtype: object
>>> s.interpolate(method='pad', limit=2)
0 NaN
1 single_one
2 single_one
3 fill_two_more
4 fill_two_more
5 fill_two_more
6 NaN
7 4.71
8 4.71
dtype: object
Filling in ``NaN`` in a Series via polynomial interpolation or splines:
Both 'polynomial' and 'spline' methods require that you also specify
an ``order`` (int).
>>> s = pd.Series([0, 2, np.nan, 8])
>>> s.interpolate(method='polynomial', order=2)
0 0.000000
1 2.000000
2 4.666667
3 8.000000
dtype: float64
Fill the DataFrame forward (that is, going down) along each column
using linear interpolation.
Note how the last entry in column 'a' is interpolated differently,
because there is no entry after it to use for interpolation.
Note how the first entry in column 'b' remains ``NaN``, because there
is no entry before it to use for interpolation.
>>> df = pd.DataFrame([(0.0, np.nan, -1.0, 1.0),
... (np.nan, 2.0, np.nan, np.nan),
... (2.0, 3.0, np.nan, 9.0),
... (np.nan, 4.0, -4.0, 16.0)],
... columns=list('abcd'))
>>> df
a b c d
0 0.0 NaN -1.0 1.0
1 NaN 2.0 NaN NaN
2 2.0 3.0 NaN 9.0
3 NaN 4.0 -4.0 16.0
>>> df.interpolate(method='linear', limit_direction='forward', axis=0)
a b c d
0 0.0 NaN -1.0 1.0
1 1.0 2.0 -2.0 5.0
2 2.0 3.0 -3.0 9.0
3 2.0 4.0 -4.0 16.0
Using polynomial interpolation.
>>> df['d'].interpolate(method='polynomial', order=2)
0 1.0
1 4.0
2 9.0
3 16.0
Name: d, dtype: float64
"""
@Appender(_shared_docs["interpolate"] % _shared_doc_kwargs)
def interpolate(
self,
method="linear",
axis=0,
limit=None,
inplace=False,
limit_direction="forward",
limit_area=None,
downcast=None,
**kwargs,
):
"""
Interpolate values according to different methods.
"""
inplace = validate_bool_kwarg(inplace, "inplace")
axis = self._get_axis_number(axis)
if axis == 0:
ax = self._info_axis_name
_maybe_transposed_self = self
elif axis == 1:
_maybe_transposed_self = self.T
ax = 1
ax = _maybe_transposed_self._get_axis_number(ax)
if _maybe_transposed_self.ndim == 2:
alt_ax = 1 - ax
else:
alt_ax = ax
if isinstance(_maybe_transposed_self.index, MultiIndex) and method != "linear":
raise ValueError(
"Only `method=linear` interpolation is supported on MultiIndexes."
)
if _maybe_transposed_self._data.get_dtype_counts().get("object") == len(
_maybe_transposed_self.T
):
raise TypeError(
"Cannot interpolate with all object-dtype columns "
"in the DataFrame. Try setting at least one "
"column to a numeric dtype."
)
# create/use the index
if method == "linear":
# prior default
index = np.arange(len(_maybe_transposed_self._get_axis(alt_ax)))
else:
index = _maybe_transposed_self._get_axis(alt_ax)
methods = {"index", "values", "nearest", "time"}
is_numeric_or_datetime = (
is_numeric_dtype(index)
or is_datetime64_any_dtype(index)
or is_timedelta64_dtype(index)
)
if method not in methods and not is_numeric_or_datetime:
raise ValueError(
"Index column must be numeric or datetime type when "
f"using {method} method other than linear. "
"Try setting a numeric or datetime index column before "
"interpolating."
)
if isna(index).any():
raise NotImplementedError(
"Interpolation with NaNs in the index "
"has not been implemented. Try filling "
"those NaNs before interpolating."
)
data = _maybe_transposed_self._data
new_data = data.interpolate(
method=method,
axis=ax,
index=index,
limit=limit,
limit_direction=limit_direction,
limit_area=limit_area,
inplace=inplace,
downcast=downcast,
**kwargs,
)
if inplace:
if axis == 1:
new_data = self._constructor(new_data).T._data
self._update_inplace(new_data)
else:
res = self._constructor(new_data).__finalize__(self)
if axis == 1:
res = res.T
return res
# ----------------------------------------------------------------------
# Timeseries methods Methods
def asof(self, where, subset=None):
"""
Return the last row(s) without any NaNs before `where`.
The last row (for each element in `where`, if list) without any
NaN is taken.
In case of a :class:`~pandas.DataFrame`, the last row without NaN
considering only the subset of columns (if not `None`)
If there is no good value, NaN is returned for a Series or
a Series of NaN values for a DataFrame
Parameters
----------
where : date or array-like of dates
Date(s) before which the last row(s) are returned.
subset : str or array-like of str, default `None`
For DataFrame, if not `None`, only use these columns to
check for NaNs.
Returns
-------
scalar, Series, or DataFrame
The return can be:
* scalar : when `self` is a Series and `where` is a scalar
* Series: when `self` is a Series and `where` is an array-like,
or when `self` is a DataFrame and `where` is a scalar
* DataFrame : when `self` is a DataFrame and `where` is an
array-like
Return scalar, Series, or DataFrame.
See Also
--------
merge_asof : Perform an asof merge. Similar to left join.
Notes
-----
Dates are assumed to be sorted. Raises if this is not the case.
Examples
--------
A Series and a scalar `where`.
>>> s = pd.Series([1, 2, np.nan, 4], index=[10, 20, 30, 40])
>>> s
10 1.0
20 2.0
30 NaN
40 4.0
dtype: float64
>>> s.asof(20)
2.0
For a sequence `where`, a Series is returned. The first value is
NaN, because the first element of `where` is before the first
index value.
>>> s.asof([5, 20])
5 NaN
20 2.0
dtype: float64
Missing values are not considered. The following is ``2.0``, not
NaN, even though NaN is at the index location for ``30``.
>>> s.asof(30)
2.0
Take all columns into consideration
>>> df = pd.DataFrame({'a': [10, 20, 30, 40, 50],
... 'b': [None, None, None, None, 500]},
... index=pd.DatetimeIndex(['2018-02-27 09:01:00',
... '2018-02-27 09:02:00',
... '2018-02-27 09:03:00',
... '2018-02-27 09:04:00',
... '2018-02-27 09:05:00']))
>>> df.asof(pd.DatetimeIndex(['2018-02-27 09:03:30',
... '2018-02-27 09:04:30']))
a b
2018-02-27 09:03:30 NaN NaN
2018-02-27 09:04:30 NaN NaN
Take a single column into consideration
>>> df.asof(pd.DatetimeIndex(['2018-02-27 09:03:30',
... '2018-02-27 09:04:30']),
... subset=['a'])
a b
2018-02-27 09:03:30 30.0 NaN
2018-02-27 09:04:30 40.0 NaN
"""
if isinstance(where, str):
where = Timestamp(where)
if not self.index.is_monotonic:
raise ValueError("asof requires a sorted index")
is_series = isinstance(self, ABCSeries)
if is_series:
if subset is not None:
raise ValueError("subset is not valid for Series")
else:
if subset is None:
subset = self.columns
if not is_list_like(subset):
subset = [subset]
is_list = is_list_like(where)
if not is_list:
start = self.index[0]
if isinstance(self.index, PeriodIndex):
where = Period(where, freq=self.index.freq)
if where < start:
if not is_series:
from pandas import Series
return Series(index=self.columns, name=where, dtype=np.float64)
return np.nan
# It's always much faster to use a *while* loop here for
# Series than pre-computing all the NAs. However a
# *while* loop is extremely expensive for DataFrame
# so we later pre-compute all the NAs and use the same
# code path whether *where* is a scalar or list.
# See PR: https://github.com/pandas-dev/pandas/pull/14476
if is_series:
loc = self.index.searchsorted(where, side="right")
if loc > 0:
loc -= 1
values = self._values
while loc > 0 and isna(values[loc]):
loc -= 1
return values[loc]
if not isinstance(where, Index):
where = Index(where) if is_list else Index([where])
nulls = self.isna() if is_series else self[subset].isna().any(1)
if nulls.all():
if is_series:
return self._constructor(np.nan, index=where, name=self.name)
elif is_list:
from pandas import DataFrame
return DataFrame(np.nan, index=where, columns=self.columns)
else:
from pandas import Series
return Series(np.nan, index=self.columns, name=where[0])
locs = self.index.asof_locs(where, ~(nulls.values))
# mask the missing
missing = locs == -1
data = self.take(locs)
data.index = where
data.loc[missing] = np.nan
return data if is_list else data.iloc[-1]
# ----------------------------------------------------------------------
# Action Methods
_shared_docs[
"isna"
] = """
Detect missing values.
Return a boolean same-sized object indicating if the values are NA.
NA values, such as None or :attr:`numpy.NaN`, gets mapped to True
values.
Everything else gets mapped to False values. Characters such as empty
strings ``''`` or :attr:`numpy.inf` are not considered NA values
(unless you set ``pandas.options.mode.use_inf_as_na = True``).
Returns
-------
%(klass)s
Mask of bool values for each element in %(klass)s that
indicates whether an element is not an NA value.
See Also
--------
%(klass)s.isnull : Alias of isna.
%(klass)s.notna : Boolean inverse of isna.
%(klass)s.dropna : Omit axes labels with missing values.
isna : Top-level isna.
Examples
--------
Show which entries in a DataFrame are NA.
>>> df = pd.DataFrame({'age': [5, 6, np.NaN],
... 'born': [pd.NaT, pd.Timestamp('1939-05-27'),
... pd.Timestamp('1940-04-25')],
... 'name': ['Alfred', 'Batman', ''],
... 'toy': [None, 'Batmobile', 'Joker']})
>>> df
age born name toy
0 5.0 NaT Alfred None
1 6.0 1939-05-27 Batman Batmobile
2 NaN 1940-04-25 Joker
>>> df.isna()
age born name toy
0 False True False True
1 False False False False
2 True False False False
Show which entries in a Series are NA.
>>> ser = pd.Series([5, 6, np.NaN])
>>> ser
0 5.0
1 6.0
2 NaN
dtype: float64
>>> ser.isna()
0 False
1 False
2 True
dtype: bool
"""
@Appender(_shared_docs["isna"] % _shared_doc_kwargs)
def isna(self: FrameOrSeries) -> FrameOrSeries:
return isna(self).__finalize__(self)
@Appender(_shared_docs["isna"] % _shared_doc_kwargs)
def isnull(self: FrameOrSeries) -> FrameOrSeries:
return isna(self).__finalize__(self)
_shared_docs[
"notna"
] = """
Detect existing (non-missing) values.
Return a boolean same-sized object indicating if the values are not NA.
Non-missing values get mapped to True. Characters such as empty
strings ``''`` or :attr:`numpy.inf` are not considered NA values
(unless you set ``pandas.options.mode.use_inf_as_na = True``).
NA values, such as None or :attr:`numpy.NaN`, get mapped to False
values.
Returns
-------
%(klass)s
Mask of bool values for each element in %(klass)s that
indicates whether an element is not an NA value.
See Also
--------
%(klass)s.notnull : Alias of notna.
%(klass)s.isna : Boolean inverse of notna.
%(klass)s.dropna : Omit axes labels with missing values.
notna : Top-level notna.
Examples
--------
Show which entries in a DataFrame are not NA.
>>> df = pd.DataFrame({'age': [5, 6, np.NaN],
... 'born': [pd.NaT, pd.Timestamp('1939-05-27'),
... pd.Timestamp('1940-04-25')],
... 'name': ['Alfred', 'Batman', ''],
... 'toy': [None, 'Batmobile', 'Joker']})
>>> df
age born name toy
0 5.0 NaT Alfred None
1 6.0 1939-05-27 Batman Batmobile
2 NaN 1940-04-25 Joker
>>> df.notna()
age born name toy
0 True False True False
1 True True True True
2 False True True True
Show which entries in a Series are not NA.
>>> ser = pd.Series([5, 6, np.NaN])
>>> ser
0 5.0
1 6.0
2 NaN
dtype: float64
>>> ser.notna()
0 True
1 True
2 False
dtype: bool
"""
@Appender(_shared_docs["notna"] % _shared_doc_kwargs)
def notna(self: FrameOrSeries) -> FrameOrSeries:
return notna(self).__finalize__(self)
@Appender(_shared_docs["notna"] % _shared_doc_kwargs)
def notnull(self: FrameOrSeries) -> FrameOrSeries:
return notna(self).__finalize__(self)
def _clip_with_scalar(self, lower, upper, inplace: bool_t = False):
if (lower is not None and np.any(isna(lower))) or (
upper is not None and np.any(isna(upper))
):
raise ValueError("Cannot use an NA value as a clip threshold")
result = self
mask = isna(self.values)
with np.errstate(all="ignore"):
if upper is not None:
subset = self.to_numpy() <= upper
result = result.where(subset, upper, axis=None, inplace=False)
if lower is not None:
subset = self.to_numpy() >= lower
result = result.where(subset, lower, axis=None, inplace=False)
if np.any(mask):
result[mask] = np.nan
if inplace:
self._update_inplace(result)
else:
return result
def _clip_with_one_bound(self, threshold, method, axis, inplace):
if axis is not None:
axis = self._get_axis_number(axis)
# method is self.le for upper bound and self.ge for lower bound
if is_scalar(threshold) and is_number(threshold):
if method.__name__ == "le":
return self._clip_with_scalar(None, threshold, inplace=inplace)
return self._clip_with_scalar(threshold, None, inplace=inplace)
subset = method(threshold, axis=axis) | isna(self)
# GH #15390
# In order for where method to work, the threshold must
# be transformed to NDFrame from other array like structure.
if (not isinstance(threshold, ABCSeries)) and is_list_like(threshold):
if isinstance(self, ABCSeries):
threshold = self._constructor(threshold, index=self.index)
else:
threshold = _align_method_FRAME(self, threshold, axis, flex=None)[1]
return self.where(subset, threshold, axis=axis, inplace=inplace)
def clip(
self: FrameOrSeries,
lower=None,
upper=None,
axis=None,
inplace: bool_t = False,
*args,
**kwargs,
) -> FrameOrSeries:
"""
Trim values at input threshold(s).
Assigns values outside boundary to boundary values. Thresholds
can be singular values or array like, and in the latter case
the clipping is performed element-wise in the specified axis.
Parameters
----------
lower : float or array_like, default None
Minimum threshold value. All values below this
threshold will be set to it.
upper : float or array_like, default None
Maximum threshold value. All values above this
threshold will be set to it.
axis : int or str axis name, optional
Align object with lower and upper along the given axis.
inplace : bool, default False
Whether to perform the operation in place on the data.
.. versionadded:: 0.21.0
*args, **kwargs
Additional keywords have no effect but might be accepted
for compatibility with numpy.
Returns
-------
Series or DataFrame
Same type as calling object with the values outside the
clip boundaries replaced.
Examples
--------
>>> data = {'col_0': [9, -3, 0, -1, 5], 'col_1': [-2, -7, 6, 8, -5]}
>>> df = pd.DataFrame(data)
>>> df
col_0 col_1
0 9 -2
1 -3 -7
2 0 6
3 -1 8
4 5 -5
Clips per column using lower and upper thresholds:
>>> df.clip(-4, 6)
col_0 col_1
0 6 -2
1 -3 -4
2 0 6
3 -1 6
4 5 -4
Clips using specific lower and upper thresholds per column element:
>>> t = pd.Series([2, -4, -1, 6, 3])
>>> t
0 2
1 -4
2 -1
3 6
4 3
dtype: int64
>>> df.clip(t, t + 4, axis=0)
col_0 col_1
0 6 2
1 -3 -4
2 0 3
3 6 8
4 5 3
"""
inplace = validate_bool_kwarg(inplace, "inplace")
axis = nv.validate_clip_with_axis(axis, args, kwargs)
if axis is not None:
axis = self._get_axis_number(axis)
# GH 17276
# numpy doesn't like NaN as a clip value
# so ignore
# GH 19992
# numpy doesn't drop a list-like bound containing NaN
if not is_list_like(lower) and np.any(isna(lower)):
lower = None
if not is_list_like(upper) and np.any(isna(upper)):
upper = None
# GH 2747 (arguments were reversed)
if lower is not None and upper is not None:
if is_scalar(lower) and is_scalar(upper):
lower, upper = min(lower, upper), max(lower, upper)
# fast-path for scalars
if (lower is None or (is_scalar(lower) and is_number(lower))) and (
upper is None or (is_scalar(upper) and is_number(upper))
):
return self._clip_with_scalar(lower, upper, inplace=inplace)
result = self
if lower is not None:
result = result._clip_with_one_bound(
lower, method=self.ge, axis=axis, inplace=inplace
)
if upper is not None:
if inplace:
result = self
result = result._clip_with_one_bound(
upper, method=self.le, axis=axis, inplace=inplace
)
return result
_shared_docs[
"groupby"
] = """
Group %(klass)s using a mapper or by a Series of columns.
A groupby operation involves some combination of splitting the
object, applying a function, and combining the results. This can be
used to group large amounts of data and compute operations on these
groups.
Parameters
----------
by : mapping, function, label, or list of labels
Used to determine the groups for the groupby.
If ``by`` is a function, it's called on each value of the object's
index. If a dict or Series is passed, the Series or dict VALUES
will be used to determine the groups (the Series' values are first
aligned; see ``.align()`` method). If an ndarray is passed, the
values are used as-is determine the groups. A label or list of
labels may be passed to group by the columns in ``self``. Notice
that a tuple is interpreted as a (single) key.
axis : {0 or 'index', 1 or 'columns'}, default 0
Split along rows (0) or columns (1).
level : int, level name, or sequence of such, default None
If the axis is a MultiIndex (hierarchical), group by a particular
level or levels.
as_index : bool, default True
For aggregated output, return object with group labels as the
index. Only relevant for DataFrame input. as_index=False is
effectively "SQL-style" grouped output.
sort : bool, default True
Sort group keys. Get better performance by turning this off.
Note this does not influence the order of observations within each
group. Groupby preserves the order of rows within each group.
group_keys : bool, default True
When calling apply, add group keys to index to identify pieces.
squeeze : bool, default False
Reduce the dimensionality of the return type if possible,
otherwise return a consistent type.
observed : bool, default False
This only applies if any of the groupers are Categoricals.
If True: only show observed values for categorical groupers.
If False: show all values for categorical groupers.
.. versionadded:: 0.23.0
Returns
-------
%(klass)sGroupBy
Returns a groupby object that contains information about the groups.
See Also
--------
resample : Convenience method for frequency conversion and resampling
of time series.
Notes
-----
See the `user guide
<https://pandas.pydata.org/pandas-docs/stable/groupby.html>`_ for more.
"""
def asfreq(
self: FrameOrSeries,
freq,
method=None,
how: Optional[str] = None,
normalize: bool_t = False,
fill_value=None,
) -> FrameOrSeries:
"""
Convert TimeSeries to specified frequency.
Optionally provide filling method to pad/backfill missing values.
Returns the original data conformed to a new index with the specified
frequency. ``resample`` is more appropriate if an operation, such as
summarization, is necessary to represent the data at the new frequency.
Parameters
----------
freq : DateOffset or str
method : {'backfill'/'bfill', 'pad'/'ffill'}, default None
Method to use for filling holes in reindexed Series (note this
does not fill NaNs that already were present):
* 'pad' / 'ffill': propagate last valid observation forward to next
valid
* 'backfill' / 'bfill': use NEXT valid observation to fill.
how : {'start', 'end'}, default end
For PeriodIndex only (see PeriodIndex.asfreq).
normalize : bool, default False
Whether to reset output index to midnight.
fill_value : scalar, optional
Value to use for missing values, applied during upsampling (note
this does not fill NaNs that already were present).
Returns
-------
converted : same type as caller
See Also
--------
reindex
Notes
-----
To learn more about the frequency strings, please see `this link
<https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases>`__.
Examples
--------
Start by creating a series with 4 one minute timestamps.
>>> index = pd.date_range('1/1/2000', periods=4, freq='T')
>>> series = pd.Series([0.0, None, 2.0, 3.0], index=index)
>>> df = pd.DataFrame({'s':series})
>>> df
s
2000-01-01 00:00:00 0.0
2000-01-01 00:01:00 NaN
2000-01-01 00:02:00 2.0
2000-01-01 00:03:00 3.0
Upsample the series into 30 second bins.
>>> df.asfreq(freq='30S')
s
2000-01-01 00:00:00 0.0
2000-01-01 00:00:30 NaN
2000-01-01 00:01:00 NaN
2000-01-01 00:01:30 NaN
2000-01-01 00:02:00 2.0
2000-01-01 00:02:30 NaN
2000-01-01 00:03:00 3.0
Upsample again, providing a ``fill value``.
>>> df.asfreq(freq='30S', fill_value=9.0)
s
2000-01-01 00:00:00 0.0
2000-01-01 00:00:30 9.0
2000-01-01 00:01:00 NaN
2000-01-01 00:01:30 9.0
2000-01-01 00:02:00 2.0
2000-01-01 00:02:30 9.0
2000-01-01 00:03:00 3.0
Upsample again, providing a ``method``.
>>> df.asfreq(freq='30S', method='bfill')
s
2000-01-01 00:00:00 0.0
2000-01-01 00:00:30 NaN
2000-01-01 00:01:00 NaN
2000-01-01 00:01:30 2.0
2000-01-01 00:02:00 2.0
2000-01-01 00:02:30 3.0
2000-01-01 00:03:00 3.0
"""
from pandas.core.resample import asfreq
return asfreq(
self,
freq,
method=method,
how=how,
normalize=normalize,
fill_value=fill_value,
)
def at_time(
self: FrameOrSeries, time, asof: bool_t = False, axis=None
) -> FrameOrSeries:
"""
Select values at particular time of day (e.g., 9:30AM).
Parameters
----------
time : datetime.time or str
axis : {0 or 'index', 1 or 'columns'}, default 0
.. versionadded:: 0.24.0
Returns
-------
Series or DataFrame
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
between_time : Select values between particular times of the day.
first : Select initial periods of time series based on a date offset.
last : Select final periods of time series based on a date offset.
DatetimeIndex.indexer_at_time : Get just the index locations for
values at particular time of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='12H')
>>> ts = pd.DataFrame({'A': [1, 2, 3, 4]}, index=i)
>>> ts
A
2018-04-09 00:00:00 1
2018-04-09 12:00:00 2
2018-04-10 00:00:00 3
2018-04-10 12:00:00 4
>>> ts.at_time('12:00')
A
2018-04-09 12:00:00 2
2018-04-10 12:00:00 4
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
index = self._get_axis(axis)
try:
indexer = index.indexer_at_time(time, asof=asof)
except AttributeError as err:
raise TypeError("Index must be DatetimeIndex") from err
return self._take_with_is_copy(indexer, axis=axis)
def between_time(
self: FrameOrSeries,
start_time,
end_time,
include_start: bool_t = True,
include_end: bool_t = True,
axis=None,
) -> FrameOrSeries:
"""
Select values between particular times of the day (e.g., 9:00-9:30 AM).
By setting ``start_time`` to be later than ``end_time``,
you can get the times that are *not* between the two times.
Parameters
----------
start_time : datetime.time or str
Initial time as a time filter limit.
end_time : datetime.time or str
End time as a time filter limit.
include_start : bool, default True
Whether the start time needs to be included in the result.
include_end : bool, default True
Whether the end time needs to be included in the result.
axis : {0 or 'index', 1 or 'columns'}, default 0
Determine range time on index or columns value.
.. versionadded:: 0.24.0
Returns
-------
Series or DataFrame
Data from the original object filtered to the specified dates range.
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
at_time : Select values at a particular time of the day.
first : Select initial periods of time series based on a date offset.
last : Select final periods of time series based on a date offset.
DatetimeIndex.indexer_between_time : Get just the index locations for
values between particular times of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='1D20min')
>>> ts = pd.DataFrame({'A': [1, 2, 3, 4]}, index=i)
>>> ts
A
2018-04-09 00:00:00 1
2018-04-10 00:20:00 2
2018-04-11 00:40:00 3
2018-04-12 01:00:00 4
>>> ts.between_time('0:15', '0:45')
A
2018-04-10 00:20:00 2
2018-04-11 00:40:00 3
You get the times that are *not* between two times by setting
``start_time`` later than ``end_time``:
>>> ts.between_time('0:45', '0:15')
A
2018-04-09 00:00:00 1
2018-04-12 01:00:00 4
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
index = self._get_axis(axis)
try:
indexer = index.indexer_between_time(
start_time,
end_time,
include_start=include_start,
include_end=include_end,
)
except AttributeError as err:
raise TypeError("Index must be DatetimeIndex") from err
return self._take_with_is_copy(indexer, axis=axis)
def resample(
self,
rule,
axis=0,
closed: Optional[str] = None,
label: Optional[str] = None,
convention: str = "start",
kind: Optional[str] = None,
loffset=None,
base: int = 0,
on=None,
level=None,
) -> "Resampler":
"""
Resample time-series data.
Convenience method for frequency conversion and resampling of time
series. Object must have a datetime-like index (`DatetimeIndex`,
`PeriodIndex`, or `TimedeltaIndex`), or pass datetime-like values
to the `on` or `level` keyword.
Parameters
----------
rule : DateOffset, Timedelta or str
The offset string or object representing target conversion.
axis : {0 or 'index', 1 or 'columns'}, default 0
Which axis to use for up- or down-sampling. For `Series` this
will default to 0, i.e. along the rows. Must be
`DatetimeIndex`, `TimedeltaIndex` or `PeriodIndex`.
closed : {'right', 'left'}, default None
Which side of bin interval is closed. The default is 'left'
for all frequency offsets except for 'M', 'A', 'Q', 'BM',
'BA', 'BQ', and 'W' which all have a default of 'right'.
label : {'right', 'left'}, default None
Which bin edge label to label bucket with. The default is 'left'
for all frequency offsets except for 'M', 'A', 'Q', 'BM',
'BA', 'BQ', and 'W' which all have a default of 'right'.
convention : {'start', 'end', 's', 'e'}, default 'start'
For `PeriodIndex` only, controls whether to use the start or
end of `rule`.
kind : {'timestamp', 'period'}, optional, default None
Pass 'timestamp' to convert the resulting index to a
`DateTimeIndex` or 'period' to convert it to a `PeriodIndex`.
By default the input representation is retained.
loffset : timedelta, default None
Adjust the resampled time labels.
base : int, default 0
For frequencies that evenly subdivide 1 day, the "origin" of the
aggregated intervals. For example, for '5min' frequency, base could
range from 0 through 4. Defaults to 0.
on : str, optional
For a DataFrame, column to use instead of index for resampling.
Column must be datetime-like.
level : str or int, optional
For a MultiIndex, level (name or number) to use for
resampling. `level` must be datetime-like.
Returns
-------
Resampler object
See Also
--------
groupby : Group by mapping, function, label, or list of labels.
Series.resample : Resample a Series.
DataFrame.resample: Resample a DataFrame.
Notes
-----
See the `user guide
<https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#resampling>`_
for more.
To learn more about the offset strings, please see `this link
<https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#dateoffset-objects>`__.
Examples
--------
Start by creating a series with 9 one minute timestamps.
>>> index = pd.date_range('1/1/2000', periods=9, freq='T')
>>> series = pd.Series(range(9), index=index)
>>> series
2000-01-01 00:00:00 0
2000-01-01 00:01:00 1
2000-01-01 00:02:00 2
2000-01-01 00:03:00 3
2000-01-01 00:04:00 4
2000-01-01 00:05:00 5
2000-01-01 00:06:00 6
2000-01-01 00:07:00 7
2000-01-01 00:08:00 8
Freq: T, dtype: int64
Downsample the series into 3 minute bins and sum the values
of the timestamps falling into a bin.
>>> series.resample('3T').sum()
2000-01-01 00:00:00 3
2000-01-01 00:03:00 12
2000-01-01 00:06:00 21
Freq: 3T, dtype: int64
Downsample the series into 3 minute bins as above, but label each
bin using the right edge instead of the left. Please note that the
value in the bucket used as the label is not included in the bucket,
which it labels. For example, in the original series the
bucket ``2000-01-01 00:03:00`` contains the value 3, but the summed
value in the resampled bucket with the label ``2000-01-01 00:03:00``
does not include 3 (if it did, the summed value would be 6, not 3).
To include this value close the right side of the bin interval as
illustrated in the example below this one.
>>> series.resample('3T', label='right').sum()
2000-01-01 00:03:00 3
2000-01-01 00:06:00 12
2000-01-01 00:09:00 21
Freq: 3T, dtype: int64
Downsample the series into 3 minute bins as above, but close the right
side of the bin interval.
>>> series.resample('3T', label='right', closed='right').sum()
2000-01-01 00:00:00 0
2000-01-01 00:03:00 6
2000-01-01 00:06:00 15
2000-01-01 00:09:00 15
Freq: 3T, dtype: int64
Upsample the series into 30 second bins.
>>> series.resample('30S').asfreq()[0:5] # Select first 5 rows
2000-01-01 00:00:00 0.0
2000-01-01 00:00:30 NaN
2000-01-01 00:01:00 1.0
2000-01-01 00:01:30 NaN
2000-01-01 00:02:00 2.0
Freq: 30S, dtype: float64
Upsample the series into 30 second bins and fill the ``NaN``
values using the ``pad`` method.
>>> series.resample('30S').pad()[0:5]
2000-01-01 00:00:00 0
2000-01-01 00:00:30 0
2000-01-01 00:01:00 1
2000-01-01 00:01:30 1
2000-01-01 00:02:00 2
Freq: 30S, dtype: int64
Upsample the series into 30 second bins and fill the
``NaN`` values using the ``bfill`` method.
>>> series.resample('30S').bfill()[0:5]
2000-01-01 00:00:00 0
2000-01-01 00:00:30 1
2000-01-01 00:01:00 1
2000-01-01 00:01:30 2
2000-01-01 00:02:00 2
Freq: 30S, dtype: int64
Pass a custom function via ``apply``
>>> def custom_resampler(array_like):
... return np.sum(array_like) + 5
...
>>> series.resample('3T').apply(custom_resampler)
2000-01-01 00:00:00 8
2000-01-01 00:03:00 17
2000-01-01 00:06:00 26
Freq: 3T, dtype: int64
For a Series with a PeriodIndex, the keyword `convention` can be
used to control whether to use the start or end of `rule`.
Resample a year by quarter using 'start' `convention`. Values are
assigned to the first quarter of the period.
>>> s = pd.Series([1, 2], index=pd.period_range('2012-01-01',
... freq='A',
... periods=2))
>>> s
2012 1
2013 2
Freq: A-DEC, dtype: int64
>>> s.resample('Q', convention='start').asfreq()
2012Q1 1.0
2012Q2 NaN
2012Q3 NaN
2012Q4 NaN
2013Q1 2.0
2013Q2 NaN
2013Q3 NaN
2013Q4 NaN
Freq: Q-DEC, dtype: float64
Resample quarters by month using 'end' `convention`. Values are
assigned to the last month of the period.
>>> q = pd.Series([1, 2, 3, 4], index=pd.period_range('2018-01-01',
... freq='Q',
... periods=4))
>>> q
2018Q1 1
2018Q2 2
2018Q3 3
2018Q4 4
Freq: Q-DEC, dtype: int64
>>> q.resample('M', convention='end').asfreq()
2018-03 1.0
2018-04 NaN
2018-05 NaN
2018-06 2.0
2018-07 NaN
2018-08 NaN
2018-09 3.0
2018-10 NaN
2018-11 NaN
2018-12 4.0
Freq: M, dtype: float64
For DataFrame objects, the keyword `on` can be used to specify the
column instead of the index for resampling.
>>> d = dict({'price': [10, 11, 9, 13, 14, 18, 17, 19],
... 'volume': [50, 60, 40, 100, 50, 100, 40, 50]})
>>> df = pd.DataFrame(d)
>>> df['week_starting'] = pd.date_range('01/01/2018',
... periods=8,
... freq='W')
>>> df
price volume week_starting
0 10 50 2018-01-07
1 11 60 2018-01-14
2 9 40 2018-01-21
3 13 100 2018-01-28
4 14 50 2018-02-04
5 18 100 2018-02-11
6 17 40 2018-02-18
7 19 50 2018-02-25
>>> df.resample('M', on='week_starting').mean()
price volume
week_starting
2018-01-31 10.75 62.5
2018-02-28 17.00 60.0
For a DataFrame with MultiIndex, the keyword `level` can be used to
specify on which level the resampling needs to take place.
>>> days = pd.date_range('1/1/2000', periods=4, freq='D')
>>> d2 = dict({'price': [10, 11, 9, 13, 14, 18, 17, 19],
... 'volume': [50, 60, 40, 100, 50, 100, 40, 50]})
>>> df2 = pd.DataFrame(d2,
... index=pd.MultiIndex.from_product([days,
... ['morning',
... 'afternoon']]
... ))
>>> df2
price volume
2000-01-01 morning 10 50
afternoon 11 60
2000-01-02 morning 9 40
afternoon 13 100
2000-01-03 morning 14 50
afternoon 18 100
2000-01-04 morning 17 40
afternoon 19 50
>>> df2.resample('D', level=0).sum()
price volume
2000-01-01 21 110
2000-01-02 22 140
2000-01-03 32 150
2000-01-04 36 90
"""
from pandas.core.resample import get_resampler
axis = self._get_axis_number(axis)
return get_resampler(
self,
freq=rule,
label=label,
closed=closed,
axis=axis,
kind=kind,
loffset=loffset,
convention=convention,
base=base,
key=on,
level=level,
)
def first(self: FrameOrSeries, offset) -> FrameOrSeries:
"""
Method to subset initial periods of time series data based on a date offset.
Parameters
----------
offset : str, DateOffset, dateutil.relativedelta
Returns
-------
subset : same type as caller
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
last : Select final periods of time series based on a date offset.
at_time : Select values at a particular time of the day.
between_time : Select values between particular times of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='2D')
>>> ts = pd.DataFrame({'A': [1,2,3,4]}, index=i)
>>> ts
A
2018-04-09 1
2018-04-11 2
2018-04-13 3
2018-04-15 4
Get the rows for the first 3 days:
>>> ts.first('3D')
A
2018-04-09 1
2018-04-11 2
Notice the data for 3 first calender days were returned, not the first
3 days observed in the dataset, and therefore data for 2018-04-13 was
not returned.
"""
if not isinstance(self.index, DatetimeIndex):
raise TypeError("'first' only supports a DatetimeIndex index")
if len(self.index) == 0:
return self
offset = to_offset(offset)
end_date = end = self.index[0] + offset
# Tick-like, e.g. 3 weeks
if not offset.is_anchored() and hasattr(offset, "_inc"):
if end_date in self.index:
end = self.index.searchsorted(end_date, side="left")
return self.iloc[:end]
return self.loc[:end]
def last(self: FrameOrSeries, offset) -> FrameOrSeries:
"""
Method to subset final periods of time series data based on a date offset.
Parameters
----------
offset : str, DateOffset, dateutil.relativedelta
Returns
-------
subset : same type as caller
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
first : Select initial periods of time series based on a date offset.
at_time : Select values at a particular time of the day.
between_time : Select values between particular times of the day.
Examples
--------
>>> i = pd.date_range('2018-04-09', periods=4, freq='2D')
>>> ts = pd.DataFrame({'A': [1, 2, 3, 4]}, index=i)
>>> ts
A
2018-04-09 1
2018-04-11 2
2018-04-13 3
2018-04-15 4
Get the rows for the last 3 days:
>>> ts.last('3D')
A
2018-04-13 3
2018-04-15 4
Notice the data for 3 last calender days were returned, not the last
3 observed days in the dataset, and therefore data for 2018-04-11 was
not returned.
"""
if not isinstance(self.index, DatetimeIndex):
raise TypeError("'last' only supports a DatetimeIndex index")
if len(self.index) == 0:
return self
offset = to_offset(offset)
start_date = self.index[-1] - offset
start = self.index.searchsorted(start_date, side="right")
return self.iloc[start:]
def rank(
self: FrameOrSeries,
axis=0,
method: str = "average",
numeric_only: Optional[bool_t] = None,
na_option: str = "keep",
ascending: bool_t = True,
pct: bool_t = False,
) -> FrameOrSeries:
"""
Compute numerical data ranks (1 through n) along axis.
By default, equal values are assigned a rank that is the average of the
ranks of those values.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
Index to direct ranking.
method : {'average', 'min', 'max', 'first', 'dense'}, default 'average'
How to rank the group of records that have the same value (i.e. ties):
* average: average rank of the group
* min: lowest rank in the group
* max: highest rank in the group
* first: ranks assigned in order they appear in the array
* dense: like 'min', but rank always increases by 1 between groups.
numeric_only : bool, optional
For DataFrame objects, rank only numeric columns if set to True.
na_option : {'keep', 'top', 'bottom'}, default 'keep'
How to rank NaN values:
* keep: assign NaN rank to NaN values
* top: assign smallest rank to NaN values if ascending
* bottom: assign highest rank to NaN values if ascending.
ascending : bool, default True
Whether or not the elements should be ranked in ascending order.
pct : bool, default False
Whether or not to display the returned rankings in percentile
form.
Returns
-------
same type as caller
Return a Series or DataFrame with data ranks as values.
See Also
--------
core.groupby.GroupBy.rank : Rank of values within each group.
Examples
--------
>>> df = pd.DataFrame(data={'Animal': ['cat', 'penguin', 'dog',
... 'spider', 'snake'],
... 'Number_legs': [4, 2, 4, 8, np.nan]})
>>> df
Animal Number_legs
0 cat 4.0
1 penguin 2.0
2 dog 4.0
3 spider 8.0
4 snake NaN
The following example shows how the method behaves with the above
parameters:
* default_rank: this is the default behaviour obtained without using
any parameter.
* max_rank: setting ``method = 'max'`` the records that have the
same values are ranked using the highest rank (e.g.: since 'cat'
and 'dog' are both in the 2nd and 3rd position, rank 3 is assigned.)
* NA_bottom: choosing ``na_option = 'bottom'``, if there are records
with NaN values they are placed at the bottom of the ranking.
* pct_rank: when setting ``pct = True``, the ranking is expressed as
percentile rank.
>>> df['default_rank'] = df['Number_legs'].rank()
>>> df['max_rank'] = df['Number_legs'].rank(method='max')
>>> df['NA_bottom'] = df['Number_legs'].rank(na_option='bottom')
>>> df['pct_rank'] = df['Number_legs'].rank(pct=True)
>>> df
Animal Number_legs default_rank max_rank NA_bottom pct_rank
0 cat 4.0 2.5 3.0 2.5 0.625
1 penguin 2.0 1.0 1.0 1.0 0.250
2 dog 4.0 2.5 3.0 2.5 0.625
3 spider 8.0 4.0 4.0 4.0 1.000
4 snake NaN NaN NaN 5.0 NaN
"""
axis = self._get_axis_number(axis)
if na_option not in {"keep", "top", "bottom"}:
msg = "na_option must be one of 'keep', 'top', or 'bottom'"
raise ValueError(msg)
def ranker(data):
ranks = algos.rank(
data.values,
axis=axis,
method=method,
ascending=ascending,
na_option=na_option,
pct=pct,
)
ranks = self._constructor(ranks, **data._construct_axes_dict())
return ranks.__finalize__(self)
# if numeric_only is None, and we can't get anything, we try with
# numeric_only=True
if numeric_only is None:
try:
return ranker(self)
except TypeError:
numeric_only = True
if numeric_only:
data = self._get_numeric_data()
else:
data = self
return ranker(data)
_shared_docs[
"align"
] = """
Align two objects on their axes with the specified join method.
Join method is specified for each axis Index.
Parameters
----------
other : DataFrame or Series
join : {'outer', 'inner', 'left', 'right'}, default 'outer'
axis : allowed axis of the other object, default None
Align on index (0), columns (1), or both (None).
level : int or level name, default None
Broadcast across a level, matching Index values on the
passed MultiIndex level.
copy : bool, default True
Always returns new objects. If copy=False and no reindexing is
required then original objects are returned.
fill_value : scalar, default np.NaN
Value to use for missing values. Defaults to NaN, but can be any
"compatible" value.
method : {'backfill', 'bfill', 'pad', 'ffill', None}, default None
Method to use for filling holes in reindexed Series:
- pad / ffill: propagate last valid observation forward to next valid.
- backfill / bfill: use NEXT valid observation to fill gap.
limit : int, default None
If method is specified, this is the maximum number of consecutive
NaN values to forward/backward fill. In other words, if there is
a gap with more than this number of consecutive NaNs, it will only
be partially filled. If method is not specified, this is the
maximum number of entries along the entire axis where NaNs will be
filled. Must be greater than 0 if not None.
fill_axis : %(axes_single_arg)s, default 0
Filling axis, method and limit.
broadcast_axis : %(axes_single_arg)s, default None
Broadcast values along this axis, if aligning two objects of
different dimensions.
Returns
-------
(left, right) : (%(klass)s, type of other)
Aligned objects.
"""
@Appender(_shared_docs["align"] % _shared_doc_kwargs)
def align(
self,
other,
join="outer",
axis=None,
level=None,
copy=True,
fill_value=None,
method=None,
limit=None,
fill_axis=0,
broadcast_axis=None,
):
method = missing.clean_fill_method(method)
if broadcast_axis == 1 and self.ndim != other.ndim:
if isinstance(self, ABCSeries):
# this means other is a DataFrame, and we need to broadcast
# self
cons = self._constructor_expanddim
df = cons(
{c: self for c in other.columns}, **other._construct_axes_dict()
)
return df._align_frame(
other,
join=join,
axis=axis,
level=level,
copy=copy,
fill_value=fill_value,
method=method,
limit=limit,
fill_axis=fill_axis,
)
elif isinstance(other, ABCSeries):
# this means self is a DataFrame, and we need to broadcast
# other
cons = other._constructor_expanddim
df = cons(
{c: other for c in self.columns}, **self._construct_axes_dict()
)
return self._align_frame(
df,
join=join,
axis=axis,
level=level,
copy=copy,
fill_value=fill_value,
method=method,
limit=limit,
fill_axis=fill_axis,
)
if axis is not None:
axis = self._get_axis_number(axis)
if isinstance(other, ABCDataFrame):
return self._align_frame(
other,
join=join,
axis=axis,
level=level,
copy=copy,
fill_value=fill_value,
method=method,
limit=limit,
fill_axis=fill_axis,
)
elif isinstance(other, ABCSeries):
return self._align_series(
other,
join=join,
axis=axis,
level=level,
copy=copy,
fill_value=fill_value,
method=method,
limit=limit,
fill_axis=fill_axis,
)
else: # pragma: no cover
raise TypeError(f"unsupported type: {type(other)}")
def _align_frame(
self,
other,
join="outer",
axis=None,
level=None,
copy: bool_t = True,
fill_value=None,
method=None,
limit=None,
fill_axis=0,
):
# defaults
join_index, join_columns = None, None
ilidx, iridx = None, None
clidx, cridx = None, None
is_series = isinstance(self, ABCSeries)
if axis is None or axis == 0:
if not self.index.equals(other.index):
join_index, ilidx, iridx = self.index.join(
other.index, how=join, level=level, return_indexers=True
)
if axis is None or axis == 1:
if not is_series and not self.columns.equals(other.columns):
join_columns, clidx, cridx = self.columns.join(
other.columns, how=join, level=level, return_indexers=True
)
if is_series:
reindexers = {0: [join_index, ilidx]}
else:
reindexers = {0: [join_index, ilidx], 1: [join_columns, clidx]}
left = self._reindex_with_indexers(
reindexers, copy=copy, fill_value=fill_value, allow_dups=True
)
# other must be always DataFrame
right = other._reindex_with_indexers(
{0: [join_index, iridx], 1: [join_columns, cridx]},
copy=copy,
fill_value=fill_value,
allow_dups=True,
)
if method is not None:
left = self._ensure_type(
left.fillna(method=method, axis=fill_axis, limit=limit)
)
right = right.fillna(method=method, axis=fill_axis, limit=limit)
# if DatetimeIndex have different tz, convert to UTC
if is_datetime64tz_dtype(left.index):
if left.index.tz != right.index.tz:
if join_index is not None:
left.index = join_index
right.index = join_index
return left.__finalize__(self), right.__finalize__(other)
def _align_series(
self,
other,
join="outer",
axis=None,
level=None,
copy: bool_t = True,
fill_value=None,
method=None,
limit=None,
fill_axis=0,
):
is_series = isinstance(self, ABCSeries)
# series/series compat, other must always be a Series
if is_series:
if axis:
raise ValueError("cannot align series to a series other than axis 0")
# equal
if self.index.equals(other.index):
join_index, lidx, ridx = None, None, None
else:
join_index, lidx, ridx = self.index.join(
other.index, how=join, level=level, return_indexers=True
)
left = self._reindex_indexer(join_index, lidx, copy)
right = other._reindex_indexer(join_index, ridx, copy)
else:
# one has > 1 ndim
fdata = self._data
if axis == 0:
join_index = self.index
lidx, ridx = None, None
if not self.index.equals(other.index):
join_index, lidx, ridx = self.index.join(
other.index, how=join, level=level, return_indexers=True
)
if lidx is not None:
fdata = fdata.reindex_indexer(join_index, lidx, axis=1)
elif axis == 1:
join_index = self.columns
lidx, ridx = None, None
if not self.columns.equals(other.index):
join_index, lidx, ridx = self.columns.join(
other.index, how=join, level=level, return_indexers=True
)
if lidx is not None:
fdata = fdata.reindex_indexer(join_index, lidx, axis=0)
else:
raise ValueError("Must specify axis=0 or 1")
if copy and fdata is self._data:
fdata = fdata.copy()
left = self._constructor(fdata)
if ridx is None:
right = other
else:
right = other.reindex(join_index, level=level)
# fill
fill_na = notna(fill_value) or (method is not None)
if fill_na:
left = left.fillna(fill_value, method=method, limit=limit, axis=fill_axis)
right = right.fillna(fill_value, method=method, limit=limit)
# if DatetimeIndex have different tz, convert to UTC
if is_series or (not is_series and axis == 0):
if is_datetime64tz_dtype(left.index):
if left.index.tz != right.index.tz:
if join_index is not None:
left.index = join_index
right.index = join_index
return left.__finalize__(self), right.__finalize__(other)
def _where(
self,
cond,
other=np.nan,
inplace=False,
axis=None,
level=None,
errors="raise",
try_cast=False,
):
"""
Equivalent to public method `where`, except that `other` is not
applied as a function even if callable. Used in __setitem__.
"""
inplace = validate_bool_kwarg(inplace, "inplace")
# align the cond to same shape as myself
cond = com.apply_if_callable(cond, self)
if isinstance(cond, NDFrame):
cond, _ = cond.align(self, join="right", broadcast_axis=1)
else:
if not hasattr(cond, "shape"):
cond = np.asanyarray(cond)
if cond.shape != self.shape:
raise ValueError("Array conditional must be same shape as self")
cond = self._constructor(cond, **self._construct_axes_dict())
# make sure we are boolean
fill_value = bool(inplace)
cond = cond.fillna(fill_value)
msg = "Boolean array expected for the condition, not {dtype}"
if not isinstance(cond, ABCDataFrame):
# This is a single-dimensional object.
if not is_bool_dtype(cond):
raise ValueError(msg.format(dtype=cond.dtype))
elif not cond.empty:
for dt in cond.dtypes:
if not is_bool_dtype(dt):
raise ValueError(msg.format(dtype=dt))
cond = -cond if inplace else cond
# try to align with other
try_quick = True
if hasattr(other, "align"):
# align with me
if other.ndim <= self.ndim:
_, other = self.align(
other, join="left", axis=axis, level=level, fill_value=np.nan
)
# if we are NOT aligned, raise as we cannot where index
if axis is None and not all(
other._get_axis(i).equals(ax) for i, ax in enumerate(self.axes)
):
raise InvalidIndexError
# slice me out of the other
else:
raise NotImplementedError(
"cannot align with a higher dimensional NDFrame"
)
if isinstance(other, np.ndarray):
if other.shape != self.shape:
if self.ndim == 1:
icond = cond.values
# GH 2745 / GH 4192
# treat like a scalar
if len(other) == 1:
other = np.array(other[0])
# GH 3235
# match True cond to other
elif len(cond[icond]) == len(other):
# try to not change dtype at first (if try_quick)
if try_quick:
new_other = np.asarray(self)
new_other = new_other.copy()
new_other[icond] = other
other = new_other
else:
raise ValueError(
"Length of replacements must equal series length"
)
else:
raise ValueError(
"other must be the same shape as self when an ndarray"
)
# we are the same shape, so create an actual object for alignment
else:
other = self._constructor(other, **self._construct_axes_dict())
if axis is None:
axis = 0
if self.ndim == getattr(other, "ndim", 0):
align = True
else:
align = self._get_axis_number(axis) == 1
block_axis = self._get_block_manager_axis(axis)
if inplace:
# we may have different type blocks come out of putmask, so
# reconstruct the block manager
self._check_inplace_setting(other)
new_data = self._data.putmask(
mask=cond,
new=other,
align=align,
inplace=True,
axis=block_axis,
transpose=self._AXIS_REVERSED,
)
self._update_inplace(new_data)
else:
new_data = self._data.where(
other=other,
cond=cond,
align=align,
errors=errors,
try_cast=try_cast,
axis=block_axis,
)
return self._constructor(new_data).__finalize__(self)
_shared_docs[
"where"
] = """
Replace values where the condition is %(cond_rev)s.
Parameters
----------
cond : bool %(klass)s, array-like, or callable
Where `cond` is %(cond)s, keep the original value. Where
%(cond_rev)s, replace with corresponding value from `other`.
If `cond` is callable, it is computed on the %(klass)s and
should return boolean %(klass)s or array. The callable must
not change input %(klass)s (though pandas doesn't check it).
other : scalar, %(klass)s, or callable
Entries where `cond` is %(cond_rev)s are replaced with
corresponding value from `other`.
If other is callable, it is computed on the %(klass)s and
should return scalar or %(klass)s. The callable must not
change input %(klass)s (though pandas doesn't check it).
inplace : bool, default False
Whether to perform the operation in place on the data.
axis : int, default None
Alignment axis if needed.
level : int, default None
Alignment level if needed.
errors : str, {'raise', 'ignore'}, default 'raise'
Note that currently this parameter won't affect
the results and will always coerce to a suitable dtype.
- 'raise' : allow exceptions to be raised.
- 'ignore' : suppress exceptions. On error return original object.
try_cast : bool, default False
Try to cast the result back to the input type (if possible).
Returns
-------
Same type as caller
See Also
--------
:func:`DataFrame.%(name_other)s` : Return an object of same shape as
self.
Notes
-----
The %(name)s method is an application of the if-then idiom. For each
element in the calling DataFrame, if ``cond`` is ``%(cond)s`` the
element is used; otherwise the corresponding element from the DataFrame
``other`` is used.
The signature for :func:`DataFrame.where` differs from
:func:`numpy.where`. Roughly ``df1.where(m, df2)`` is equivalent to
``np.where(m, df1, df2)``.
For further details and examples see the ``%(name)s`` documentation in
:ref:`indexing <indexing.where_mask>`.
Examples
--------
>>> s = pd.Series(range(5))
>>> s.where(s > 0)
0 NaN
1 1.0
2 2.0
3 3.0
4 4.0
dtype: float64
>>> s.mask(s > 0)
0 0.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
>>> s.where(s > 1, 10)
0 10
1 10
2 2
3 3
4 4
dtype: int64
>>> df = pd.DataFrame(np.arange(10).reshape(-1, 2), columns=['A', 'B'])
>>> df
A B
0 0 1
1 2 3
2 4 5
3 6 7
4 8 9
>>> m = df %% 3 == 0
>>> df.where(m, -df)
A B
0 0 -1
1 -2 3
2 -4 -5
3 6 -7
4 -8 9
>>> df.where(m, -df) == np.where(m, df, -df)
A B
0 True True
1 True True
2 True True
3 True True
4 True True
>>> df.where(m, -df) == df.mask(~m, -df)
A B
0 True True
1 True True
2 True True
3 True True
4 True True
"""
@Appender(
_shared_docs["where"]
% dict(
_shared_doc_kwargs,
cond="True",
cond_rev="False",
name="where",
name_other="mask",
)
)
def where(
self,
cond,
other=np.nan,
inplace=False,
axis=None,
level=None,
errors="raise",
try_cast=False,
):
other = com.apply_if_callable(other, self)
return self._where(
cond, other, inplace, axis, level, errors=errors, try_cast=try_cast
)
@Appender(
_shared_docs["where"]
% dict(
_shared_doc_kwargs,
cond="False",
cond_rev="True",
name="mask",
name_other="where",
)
)
def mask(
self,
cond,
other=np.nan,
inplace=False,
axis=None,
level=None,
errors="raise",
try_cast=False,
):
inplace = validate_bool_kwarg(inplace, "inplace")
cond = com.apply_if_callable(cond, self)
# see gh-21891
if not hasattr(cond, "__invert__"):
cond = np.array(cond)
return self.where(
~cond,
other=other,
inplace=inplace,
axis=axis,
level=level,
try_cast=try_cast,
errors=errors,
)
_shared_docs[
"shift"
] = """
Shift index by desired number of periods with an optional time `freq`.
When `freq` is not passed, shift the index without realigning the data.
If `freq` is passed (in this case, the index must be date or datetime,
or it will raise a `NotImplementedError`), the index will be
increased using the periods and the `freq`.
Parameters
----------
periods : int
Number of periods to shift. Can be positive or negative.
freq : DateOffset, tseries.offsets, timedelta, or str, optional
Offset to use from the tseries module or time rule (e.g. 'EOM').
If `freq` is specified then the index values are shifted but the
data is not realigned. That is, use `freq` if you would like to
extend the index when shifting and preserve the original data.
axis : {0 or 'index', 1 or 'columns', None}, default None
Shift direction.
fill_value : object, optional
The scalar value to use for newly introduced missing values.
the default depends on the dtype of `self`.
For numeric data, ``np.nan`` is used.
For datetime, timedelta, or period data, etc. :attr:`NaT` is used.
For extension dtypes, ``self.dtype.na_value`` is used.
.. versionchanged:: 0.24.0
Returns
-------
%(klass)s
Copy of input object, shifted.
See Also
--------
Index.shift : Shift values of Index.
DatetimeIndex.shift : Shift values of DatetimeIndex.
PeriodIndex.shift : Shift values of PeriodIndex.
tshift : Shift the time index, using the index's frequency if
available.
Examples
--------
>>> df = pd.DataFrame({'Col1': [10, 20, 15, 30, 45],
... 'Col2': [13, 23, 18, 33, 48],
... 'Col3': [17, 27, 22, 37, 52]})
>>> df.shift(periods=3)
Col1 Col2 Col3
0 NaN NaN NaN
1 NaN NaN NaN
2 NaN NaN NaN
3 10.0 13.0 17.0
4 20.0 23.0 27.0
>>> df.shift(periods=1, axis='columns')
Col1 Col2 Col3
0 NaN 10.0 13.0
1 NaN 20.0 23.0
2 NaN 15.0 18.0
3 NaN 30.0 33.0
4 NaN 45.0 48.0
>>> df.shift(periods=3, fill_value=0)
Col1 Col2 Col3
0 0 0 0
1 0 0 0
2 0 0 0
3 10 13 17
4 20 23 27
"""
@Appender(_shared_docs["shift"] % _shared_doc_kwargs)
def shift(
self: FrameOrSeries, periods=1, freq=None, axis=0, fill_value=None
) -> FrameOrSeries:
if periods == 0:
return self.copy()
block_axis = self._get_block_manager_axis(axis)
if freq is None:
new_data = self._data.shift(
periods=periods, axis=block_axis, fill_value=fill_value
)
else:
return self.tshift(periods, freq)
return self._constructor(new_data).__finalize__(self)
def slice_shift(self: FrameOrSeries, periods: int = 1, axis=0) -> FrameOrSeries:
"""
Equivalent to `shift` without copying data.
The shifted data will not include the dropped periods and the
shifted axis will be smaller than the original.
Parameters
----------
periods : int
Number of periods to move, can be positive or negative.
Returns
-------
shifted : same type as caller
Notes
-----
While the `slice_shift` is faster than `shift`, you may pay for it
later during alignment.
"""
if periods == 0:
return self
if periods > 0:
vslicer = slice(None, -periods)
islicer = slice(periods, None)
else:
vslicer = slice(-periods, None)
islicer = slice(None, periods)
new_obj = self._slice(vslicer, axis=axis)
shifted_axis = self._get_axis(axis)[islicer]
new_obj.set_axis(shifted_axis, axis=axis, inplace=True)
return new_obj.__finalize__(self)
def tshift(
self: FrameOrSeries, periods: int = 1, freq=None, axis=0
) -> FrameOrSeries:
"""
Shift the time index, using the index's frequency if available.
Parameters
----------
periods : int
Number of periods to move, can be positive or negative.
freq : DateOffset, timedelta, or str, default None
Increment to use from the tseries module
or time rule expressed as a string (e.g. 'EOM').
axis : {0 or ‘index’, 1 or ‘columns’, None}, default 0
Corresponds to the axis that contains the Index.
Returns
-------
shifted : Series/DataFrame
Notes
-----
If freq is not specified then tries to use the freq or inferred_freq
attributes of the index. If neither of those attributes exist, a
ValueError is thrown
"""
index = self._get_axis(axis)
if freq is None:
freq = getattr(index, "freq", None)
if freq is None:
freq = getattr(index, "inferred_freq", None)
if freq is None:
msg = "Freq was not given and was not set in the index"
raise ValueError(msg)
if periods == 0:
return self
if isinstance(freq, str):
freq = to_offset(freq)
block_axis = self._get_block_manager_axis(axis)
if isinstance(index, PeriodIndex):
orig_freq = to_offset(index.freq)
if freq == orig_freq:
new_data = self._data.copy()
new_data.axes[block_axis] = index.shift(periods)
elif orig_freq is not None:
raise ValueError(
f"Given freq {freq.rule_code} does not match "
f"PeriodIndex freq {orig_freq.rule_code}"
)
else:
new_data = self._data.copy()
new_data.axes[block_axis] = index.shift(periods, freq)
return self._constructor(new_data).__finalize__(self)
def truncate(
self: FrameOrSeries, before=None, after=None, axis=None, copy: bool_t = True
) -> FrameOrSeries:
"""
Truncate a Series or DataFrame before and after some index value.
This is a useful shorthand for boolean indexing based on index
values above or below certain thresholds.
Parameters
----------
before : date, str, int
Truncate all rows before this index value.
after : date, str, int
Truncate all rows after this index value.
axis : {0 or 'index', 1 or 'columns'}, optional
Axis to truncate. Truncates the index (rows) by default.
copy : bool, default is True,
Return a copy of the truncated section.
Returns
-------
type of caller
The truncated Series or DataFrame.
See Also
--------
DataFrame.loc : Select a subset of a DataFrame by label.
DataFrame.iloc : Select a subset of a DataFrame by position.
Notes
-----
If the index being truncated contains only datetime values,
`before` and `after` may be specified as strings instead of
Timestamps.
Examples
--------
>>> df = pd.DataFrame({'A': ['a', 'b', 'c', 'd', 'e'],
... 'B': ['f', 'g', 'h', 'i', 'j'],
... 'C': ['k', 'l', 'm', 'n', 'o']},
... index=[1, 2, 3, 4, 5])
>>> df
A B C
1 a f k
2 b g l
3 c h m
4 d i n
5 e j o
>>> df.truncate(before=2, after=4)
A B C
2 b g l
3 c h m
4 d i n
The columns of a DataFrame can be truncated.
>>> df.truncate(before="A", after="B", axis="columns")
A B
1 a f
2 b g
3 c h
4 d i
5 e j
For Series, only rows can be truncated.
>>> df['A'].truncate(before=2, after=4)
2 b
3 c
4 d
Name: A, dtype: object
The index values in ``truncate`` can be datetimes or string
dates.
>>> dates = pd.date_range('2016-01-01', '2016-02-01', freq='s')
>>> df = pd.DataFrame(index=dates, data={'A': 1})
>>> df.tail()
A
2016-01-31 23:59:56 1
2016-01-31 23:59:57 1
2016-01-31 23:59:58 1
2016-01-31 23:59:59 1
2016-02-01 00:00:00 1
>>> df.truncate(before=pd.Timestamp('2016-01-05'),
... after=pd.Timestamp('2016-01-10')).tail()
A
2016-01-09 23:59:56 1
2016-01-09 23:59:57 1
2016-01-09 23:59:58 1
2016-01-09 23:59:59 1
2016-01-10 00:00:00 1
Because the index is a DatetimeIndex containing only dates, we can
specify `before` and `after` as strings. They will be coerced to
Timestamps before truncation.
>>> df.truncate('2016-01-05', '2016-01-10').tail()
A
2016-01-09 23:59:56 1
2016-01-09 23:59:57 1
2016-01-09 23:59:58 1
2016-01-09 23:59:59 1
2016-01-10 00:00:00 1
Note that ``truncate`` assumes a 0 value for any unspecified time
component (midnight). This differs from partial string slicing, which
returns any partially matching dates.
>>> df.loc['2016-01-05':'2016-01-10', :].tail()
A
2016-01-10 23:59:55 1
2016-01-10 23:59:56 1
2016-01-10 23:59:57 1
2016-01-10 23:59:58 1
2016-01-10 23:59:59 1
"""
if axis is None:
axis = self._stat_axis_number
axis = self._get_axis_number(axis)
ax = self._get_axis(axis)
# GH 17935
# Check that index is sorted
if not ax.is_monotonic_increasing and not ax.is_monotonic_decreasing:
raise ValueError("truncate requires a sorted index")
# if we have a date index, convert to dates, otherwise
# treat like a slice
if ax.is_all_dates:
from pandas.core.tools.datetimes import to_datetime
before = to_datetime(before)
after = to_datetime(after)
if before is not None and after is not None:
if before > after:
raise ValueError(f"Truncate: {after} must be after {before}")
slicer = [slice(None, None)] * self._AXIS_LEN
slicer[axis] = slice(before, after)
result = self.loc[tuple(slicer)]
if isinstance(ax, MultiIndex):
setattr(result, self._get_axis_name(axis), ax.truncate(before, after))
if copy:
result = result.copy()
return result
def tz_convert(
self: FrameOrSeries, tz, axis=0, level=None, copy: bool_t = True
) -> FrameOrSeries:
"""
Convert tz-aware axis to target time zone.
Parameters
----------
tz : str or tzinfo object
axis : the axis to convert
level : int, str, default None
If axis is a MultiIndex, convert a specific level. Otherwise
must be None.
copy : bool, default True
Also make a copy of the underlying data.
Returns
-------
%(klass)s
Object with time zone converted axis.
Raises
------
TypeError
If the axis is tz-naive.
"""
axis = self._get_axis_number(axis)
ax = self._get_axis(axis)
def _tz_convert(ax, tz):
if not hasattr(ax, "tz_convert"):
if len(ax) > 0:
ax_name = self._get_axis_name(axis)
raise TypeError(
f"{ax_name} is not a valid DatetimeIndex or PeriodIndex"
)
else:
ax = DatetimeIndex([], tz=tz)
else:
ax = ax.tz_convert(tz)
return ax
# if a level is given it must be a MultiIndex level or
# equivalent to the axis name
if isinstance(ax, MultiIndex):
level = ax._get_level_number(level)
new_level = _tz_convert(ax.levels[level], tz)
ax = ax.set_levels(new_level, level=level)
else:
if level not in (None, 0, ax.name):
raise ValueError(f"The level {level} is not valid")
ax = _tz_convert(ax, tz)
result = self._constructor(self._data, copy=copy)
result = result.set_axis(ax, axis=axis, inplace=False)
return result.__finalize__(self)
def tz_localize(
self: FrameOrSeries,
tz,
axis=0,
level=None,
copy: bool_t = True,
ambiguous="raise",
nonexistent: str = "raise",
) -> FrameOrSeries:
"""
Localize tz-naive index of a Series or DataFrame to target time zone.
This operation localizes the Index. To localize the values in a
timezone-naive Series, use :meth:`Series.dt.tz_localize`.
Parameters
----------
tz : str or tzinfo
axis : the axis to localize
level : int, str, default None
If axis ia a MultiIndex, localize a specific level. Otherwise
must be None.
copy : bool, default True
Also make a copy of the underlying data.
ambiguous : 'infer', bool-ndarray, 'NaT', default 'raise'
When clocks moved backward due to DST, ambiguous times may arise.
For example in Central European Time (UTC+01), when going from
03:00 DST to 02:00 non-DST, 02:30:00 local time occurs both at
00:30:00 UTC and at 01:30:00 UTC. In such a situation, the
`ambiguous` parameter dictates how ambiguous times should be
handled.
- 'infer' will attempt to infer fall dst-transition hours based on
order
- bool-ndarray where True signifies a DST time, False designates
a non-DST time (note that this flag is only applicable for
ambiguous times)
- 'NaT' will return NaT where there are ambiguous times
- 'raise' will raise an AmbiguousTimeError if there are ambiguous
times.
nonexistent : str, default 'raise'
A nonexistent time does not exist in a particular timezone
where clocks moved forward due to DST. Valid values are:
- 'shift_forward' will shift the nonexistent time forward to the
closest existing time
- 'shift_backward' will shift the nonexistent time backward to the
closest existing time
- 'NaT' will return NaT where there are nonexistent times
- timedelta objects will shift nonexistent times by the timedelta
- 'raise' will raise an NonExistentTimeError if there are
nonexistent times.
.. versionadded:: 0.24.0
Returns
-------
Series or DataFrame
Same type as the input.
Raises
------
TypeError
If the TimeSeries is tz-aware and tz is not None.
Examples
--------
Localize local times:
>>> s = pd.Series([1],
... index=pd.DatetimeIndex(['2018-09-15 01:30:00']))
>>> s.tz_localize('CET')
2018-09-15 01:30:00+02:00 1
dtype: int64
Be careful with DST changes. When there is sequential data, pandas
can infer the DST time:
>>> s = pd.Series(range(7),
... index=pd.DatetimeIndex(['2018-10-28 01:30:00',
... '2018-10-28 02:00:00',
... '2018-10-28 02:30:00',
... '2018-10-28 02:00:00',
... '2018-10-28 02:30:00',
... '2018-10-28 03:00:00',
... '2018-10-28 03:30:00']))
>>> s.tz_localize('CET', ambiguous='infer')
2018-10-28 01:30:00+02:00 0
2018-10-28 02:00:00+02:00 1
2018-10-28 02:30:00+02:00 2
2018-10-28 02:00:00+01:00 3
2018-10-28 02:30:00+01:00 4
2018-10-28 03:00:00+01:00 5
2018-10-28 03:30:00+01:00 6
dtype: int64
In some cases, inferring the DST is impossible. In such cases, you can
pass an ndarray to the ambiguous parameter to set the DST explicitly
>>> s = pd.Series(range(3),
... index=pd.DatetimeIndex(['2018-10-28 01:20:00',
... '2018-10-28 02:36:00',
... '2018-10-28 03:46:00']))
>>> s.tz_localize('CET', ambiguous=np.array([True, True, False]))
2018-10-28 01:20:00+02:00 0
2018-10-28 02:36:00+02:00 1
2018-10-28 03:46:00+01:00 2
dtype: int64
If the DST transition causes nonexistent times, you can shift these
dates forward or backwards with a timedelta object or `'shift_forward'`
or `'shift_backwards'`.
>>> s = pd.Series(range(2),
... index=pd.DatetimeIndex(['2015-03-29 02:30:00',
... '2015-03-29 03:30:00']))
>>> s.tz_localize('Europe/Warsaw', nonexistent='shift_forward')
2015-03-29 03:00:00+02:00 0
2015-03-29 03:30:00+02:00 1
dtype: int64
>>> s.tz_localize('Europe/Warsaw', nonexistent='shift_backward')
2015-03-29 01:59:59.999999999+01:00 0
2015-03-29 03:30:00+02:00 1
dtype: int64
>>> s.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1H'))
2015-03-29 03:30:00+02:00 0
2015-03-29 03:30:00+02:00 1
dtype: int64
"""
nonexistent_options = ("raise", "NaT", "shift_forward", "shift_backward")
if nonexistent not in nonexistent_options and not isinstance(
nonexistent, timedelta
):
raise ValueError(
"The nonexistent argument must be one of 'raise', "
"'NaT', 'shift_forward', 'shift_backward' or "
"a timedelta object"
)
axis = self._get_axis_number(axis)
ax = self._get_axis(axis)
def _tz_localize(ax, tz, ambiguous, nonexistent):
if not hasattr(ax, "tz_localize"):
if len(ax) > 0:
ax_name = self._get_axis_name(axis)
raise TypeError(
f"{ax_name} is not a valid DatetimeIndex or PeriodIndex"
)
else:
ax = DatetimeIndex([], tz=tz)
else:
ax = ax.tz_localize(tz, ambiguous=ambiguous, nonexistent=nonexistent)
return ax
# if a level is given it must be a MultiIndex level or
# equivalent to the axis name
if isinstance(ax, MultiIndex):
level = ax._get_level_number(level)
new_level = _tz_localize(ax.levels[level], tz, ambiguous, nonexistent)
ax = ax.set_levels(new_level, level=level)
else:
if level not in (None, 0, ax.name):
raise ValueError(f"The level {level} is not valid")
ax = _tz_localize(ax, tz, ambiguous, nonexistent)
result = self._constructor(self._data, copy=copy)
result = result.set_axis(ax, axis=axis, inplace=False)
return result.__finalize__(self)
# ----------------------------------------------------------------------
# Numeric Methods
def abs(self: FrameOrSeries) -> FrameOrSeries:
"""
Return a Series/DataFrame with absolute numeric value of each element.
This function only applies to elements that are all numeric.
Returns
-------
abs
Series/DataFrame containing the absolute value of each element.
See Also
--------
numpy.absolute : Calculate the absolute value element-wise.
Notes
-----
For ``complex`` inputs, ``1.2 + 1j``, the absolute value is
:math:`\\sqrt{ a^2 + b^2 }`.
Examples
--------
Absolute numeric values in a Series.
>>> s = pd.Series([-1.10, 2, -3.33, 4])
>>> s.abs()
0 1.10
1 2.00
2 3.33
3 4.00
dtype: float64
Absolute numeric values in a Series with complex numbers.
>>> s = pd.Series([1.2 + 1j])
>>> s.abs()
0 1.56205
dtype: float64
Absolute numeric values in a Series with a Timedelta element.
>>> s = pd.Series([pd.Timedelta('1 days')])
>>> s.abs()
0 1 days
dtype: timedelta64[ns]
Select rows with data closest to certain value using argsort (from
`StackOverflow <https://stackoverflow.com/a/17758115>`__).
>>> df = pd.DataFrame({
... 'a': [4, 5, 6, 7],
... 'b': [10, 20, 30, 40],
... 'c': [100, 50, -30, -50]
... })
>>> df
a b c
0 4 10 100
1 5 20 50
2 6 30 -30
3 7 40 -50
>>> df.loc[(df.c - 43).abs().argsort()]
a b c
1 5 20 50
0 4 10 100
2 6 30 -30
3 7 40 -50
"""
return np.abs(self)
def describe(
self: FrameOrSeries, percentiles=None, include=None, exclude=None
) -> FrameOrSeries:
"""
Generate descriptive statistics.
Descriptive statistics include those that summarize the central
tendency, dispersion and shape of a
dataset's distribution, excluding ``NaN`` values.
Analyzes both numeric and object series, as well
as ``DataFrame`` column sets of mixed data types. The output
will vary depending on what is provided. Refer to the notes
below for more detail.
Parameters
----------
percentiles : list-like of numbers, optional
The percentiles to include in the output. All should
fall between 0 and 1. The default is
``[.25, .5, .75]``, which returns the 25th, 50th, and
75th percentiles.
include : 'all', list-like of dtypes or None (default), optional
A white list of data types to include in the result. Ignored
for ``Series``. Here are the options:
- 'all' : All columns of the input will be included in the output.
- A list-like of dtypes : Limits the results to the
provided data types.
To limit the result to numeric types submit
``numpy.number``. To limit it instead to object columns submit
the ``numpy.object`` data type. Strings
can also be used in the style of
``select_dtypes`` (e.g. ``df.describe(include=['O'])``). To
select pandas categorical columns, use ``'category'``
- None (default) : The result will include all numeric columns.
exclude : list-like of dtypes or None (default), optional,
A black list of data types to omit from the result. Ignored
for ``Series``. Here are the options:
- A list-like of dtypes : Excludes the provided data types
from the result. To exclude numeric types submit
``numpy.number``. To exclude object columns submit the data
type ``numpy.object``. Strings can also be used in the style of
``select_dtypes`` (e.g. ``df.describe(include=['O'])``). To
exclude pandas categorical columns, use ``'category'``
- None (default) : The result will exclude nothing.
Returns
-------
Series or DataFrame
Summary statistics of the Series or Dataframe provided.
See Also
--------
DataFrame.count: Count number of non-NA/null observations.
DataFrame.max: Maximum of the values in the object.
DataFrame.min: Minimum of the values in the object.
DataFrame.mean: Mean of the values.
DataFrame.std: Standard deviation of the observations.
DataFrame.select_dtypes: Subset of a DataFrame including/excluding
columns based on their dtype.
Notes
-----
For numeric data, the result's index will include ``count``,
``mean``, ``std``, ``min``, ``max`` as well as lower, ``50`` and
upper percentiles. By default the lower percentile is ``25`` and the
upper percentile is ``75``. The ``50`` percentile is the
same as the median.
For object data (e.g. strings or timestamps), the result's index
will include ``count``, ``unique``, ``top``, and ``freq``. The ``top``
is the most common value. The ``freq`` is the most common value's
frequency. Timestamps also include the ``first`` and ``last`` items.
If multiple object values have the highest count, then the
``count`` and ``top`` results will be arbitrarily chosen from
among those with the highest count.
For mixed data types provided via a ``DataFrame``, the default is to
return only an analysis of numeric columns. If the dataframe consists
only of object and categorical data without any numeric columns, the
default is to return an analysis of both the object and categorical
columns. If ``include='all'`` is provided as an option, the result
will include a union of attributes of each type.
The `include` and `exclude` parameters can be used to limit
which columns in a ``DataFrame`` are analyzed for the output.
The parameters are ignored when analyzing a ``Series``.
Examples
--------
Describing a numeric ``Series``.
>>> s = pd.Series([1, 2, 3])
>>> s.describe()
count 3.0
mean 2.0
std 1.0
min 1.0
25% 1.5
50% 2.0
75% 2.5
max 3.0
dtype: float64
Describing a categorical ``Series``.
>>> s = pd.Series(['a', 'a', 'b', 'c'])
>>> s.describe()
count 4
unique 3
top a
freq 2
dtype: object
Describing a timestamp ``Series``.
>>> s = pd.Series([
... np.datetime64("2000-01-01"),
... np.datetime64("2010-01-01"),
... np.datetime64("2010-01-01")
... ])
>>> s.describe()
count 3
unique 2
top 2010-01-01 00:00:00
freq 2
first 2000-01-01 00:00:00
last 2010-01-01 00:00:00
dtype: object
Describing a ``DataFrame``. By default only numeric fields
are returned.
>>> df = pd.DataFrame({'categorical': pd.Categorical(['d','e','f']),
... 'numeric': [1, 2, 3],
... 'object': ['a', 'b', 'c']
... })
>>> df.describe()
numeric
count 3.0
mean 2.0
std 1.0
min 1.0
25% 1.5
50% 2.0
75% 2.5
max 3.0
Describing all columns of a ``DataFrame`` regardless of data type.
>>> df.describe(include='all')
categorical numeric object
count 3 3.0 3
unique 3 NaN 3
top f NaN c
freq 1 NaN 1
mean NaN 2.0 NaN
std NaN 1.0 NaN
min NaN 1.0 NaN
25% NaN 1.5 NaN
50% NaN 2.0 NaN
75% NaN 2.5 NaN
max NaN 3.0 NaN
Describing a column from a ``DataFrame`` by accessing it as
an attribute.
>>> df.numeric.describe()
count 3.0
mean 2.0
std 1.0
min 1.0
25% 1.5
50% 2.0
75% 2.5
max 3.0
Name: numeric, dtype: float64
Including only numeric columns in a ``DataFrame`` description.
>>> df.describe(include=[np.number])
numeric
count 3.0
mean 2.0
std 1.0
min 1.0
25% 1.5
50% 2.0
75% 2.5
max 3.0
Including only string columns in a ``DataFrame`` description.
>>> df.describe(include=[np.object])
object
count 3
unique 3
top c
freq 1
Including only categorical columns from a ``DataFrame`` description.
>>> df.describe(include=['category'])
categorical
count 3
unique 3
top f
freq 1
Excluding numeric columns from a ``DataFrame`` description.
>>> df.describe(exclude=[np.number])
categorical object
count 3 3
unique 3 3
top f c
freq 1 1
Excluding object columns from a ``DataFrame`` description.
>>> df.describe(exclude=[np.object])
categorical numeric
count 3 3.0
unique 3 NaN
top f NaN
freq 1 NaN
mean NaN 2.0
std NaN 1.0
min NaN 1.0
25% NaN 1.5
50% NaN 2.0
75% NaN 2.5
max NaN 3.0
"""
if self.ndim == 2 and self.columns.size == 0:
raise ValueError("Cannot describe a DataFrame without columns")
if percentiles is not None:
# explicit conversion of `percentiles` to list
percentiles = list(percentiles)
# get them all to be in [0, 1]
validate_percentile(percentiles)
# median should always be included
if 0.5 not in percentiles:
percentiles.append(0.5)
percentiles = np.asarray(percentiles)
else:
percentiles = np.array([0.25, 0.5, 0.75])
# sort and check for duplicates
unique_pcts = np.unique(percentiles)
if len(unique_pcts) < len(percentiles):
raise ValueError("percentiles cannot contain duplicates")
percentiles = unique_pcts
formatted_percentiles = format_percentiles(percentiles)
def describe_numeric_1d(series):
stat_index = (
["count", "mean", "std", "min"] + formatted_percentiles + ["max"]
)
d = (
[series.count(), series.mean(), series.std(), series.min()]
+ series.quantile(percentiles).tolist()
+ [series.max()]
)
return pd.Series(d, index=stat_index, name=series.name)
def describe_categorical_1d(data):
names = ["count", "unique"]
objcounts = data.value_counts()
count_unique = len(objcounts[objcounts != 0])
result = [data.count(), count_unique]
dtype = None
if result[1] > 0:
top, freq = objcounts.index[0], objcounts.iloc[0]
names += ["top", "freq"]
result += [top, freq]
# If the DataFrame is empty, set 'top' and 'freq' to None
# to maintain output shape consistency
else:
names += ["top", "freq"]
result += [np.nan, np.nan]
dtype = "object"
return pd.Series(result, index=names, name=data.name, dtype=dtype)
def describe_timestamp_1d(data):
# GH-30164
stat_index = ["count", "mean", "min"] + formatted_percentiles + ["max"]
d = (
[data.count(), data.mean(), data.min()]
+ data.quantile(percentiles).tolist()
+ [data.max()]
)
return pd.Series(d, index=stat_index, name=data.name)
def describe_1d(data):
if is_bool_dtype(data):
return describe_categorical_1d(data)
elif is_numeric_dtype(data):
return describe_numeric_1d(data)
elif is_datetime64_any_dtype(data):
return describe_timestamp_1d(data)
elif is_timedelta64_dtype(data):
return describe_numeric_1d(data)
else:
return describe_categorical_1d(data)
if self.ndim == 1:
return describe_1d(self)
elif (include is None) and (exclude is None):
# when some numerics are found, keep only numerics
data = self.select_dtypes(include=[np.number])
if len(data.columns) == 0:
data = self
elif include == "all":
if exclude is not None:
msg = "exclude must be None when include is 'all'"
raise ValueError(msg)
data = self
else:
data = self.select_dtypes(include=include, exclude=exclude)
ldesc = [describe_1d(s) for _, s in data.items()]
# set a convenient order for rows
names: List[Label] = []
ldesc_indexes = sorted((x.index for x in ldesc), key=len)
for idxnames in ldesc_indexes:
for name in idxnames:
if name not in names:
names.append(name)
d = pd.concat([x.reindex(names, copy=False) for x in ldesc], axis=1, sort=False)
d.columns = data.columns.copy()
return d
_shared_docs[
"pct_change"
] = """
Percentage change between the current and a prior element.
Computes the percentage change from the immediately previous row by
default. This is useful in comparing the percentage of change in a time
series of elements.
Parameters
----------
periods : int, default 1
Periods to shift for forming percent change.
fill_method : str, default 'pad'
How to handle NAs before computing percent changes.
limit : int, default None
The number of consecutive NAs to fill before stopping.
freq : DateOffset, timedelta, or str, optional
Increment to use from time series API (e.g. 'M' or BDay()).
**kwargs
Additional keyword arguments are passed into
`DataFrame.shift` or `Series.shift`.
Returns
-------
chg : Series or DataFrame
The same type as the calling object.
See Also
--------
Series.diff : Compute the difference of two elements in a Series.
DataFrame.diff : Compute the difference of two elements in a DataFrame.
Series.shift : Shift the index by some number of periods.
DataFrame.shift : Shift the index by some number of periods.
Examples
--------
**Series**
>>> s = pd.Series([90, 91, 85])
>>> s
0 90
1 91
2 85
dtype: int64
>>> s.pct_change()
0 NaN
1 0.011111
2 -0.065934
dtype: float64
>>> s.pct_change(periods=2)
0 NaN
1 NaN
2 -0.055556
dtype: float64
See the percentage change in a Series where filling NAs with last
valid observation forward to next valid.
>>> s = pd.Series([90, 91, None, 85])
>>> s
0 90.0
1 91.0
2 NaN
3 85.0
dtype: float64
>>> s.pct_change(fill_method='ffill')
0 NaN
1 0.011111
2 0.000000
3 -0.065934
dtype: float64
**DataFrame**
Percentage change in French franc, Deutsche Mark, and Italian lira from
1980-01-01 to 1980-03-01.
>>> df = pd.DataFrame({
... 'FR': [4.0405, 4.0963, 4.3149],
... 'GR': [1.7246, 1.7482, 1.8519],
... 'IT': [804.74, 810.01, 860.13]},
... index=['1980-01-01', '1980-02-01', '1980-03-01'])
>>> df
FR GR IT
1980-01-01 4.0405 1.7246 804.74
1980-02-01 4.0963 1.7482 810.01
1980-03-01 4.3149 1.8519 860.13
>>> df.pct_change()
FR GR IT
1980-01-01 NaN NaN NaN
1980-02-01 0.013810 0.013684 0.006549
1980-03-01 0.053365 0.059318 0.061876
Percentage of change in GOOG and APPL stock volume. Shows computing
the percentage change between columns.
>>> df = pd.DataFrame({
... '2016': [1769950, 30586265],
... '2015': [1500923, 40912316],
... '2014': [1371819, 41403351]},
... index=['GOOG', 'APPL'])
>>> df
2016 2015 2014
GOOG 1769950 1500923 1371819
APPL 30586265 40912316 41403351
>>> df.pct_change(axis='columns')
2016 2015 2014
GOOG NaN -0.151997 -0.086016
APPL NaN 0.337604 0.012002
"""
@Appender(_shared_docs["pct_change"] % _shared_doc_kwargs)
def pct_change(
self: FrameOrSeries,
periods=1,
fill_method="pad",
limit=None,
freq=None,
**kwargs,
) -> FrameOrSeries:
# TODO: Not sure if above is correct - need someone to confirm.
axis = self._get_axis_number(kwargs.pop("axis", self._stat_axis_name))
if fill_method is None:
data = self
else:
data = self._ensure_type(
self.fillna(method=fill_method, axis=axis, limit=limit)
)
rs = data.div(data.shift(periods=periods, freq=freq, axis=axis, **kwargs)) - 1
if freq is not None:
# Shift method is implemented differently when freq is not None
# We want to restore the original index
rs = rs.loc[~rs.index.duplicated()]
rs = rs.reindex_like(data)
return rs
def _agg_by_level(self, name, axis=0, level=0, skipna=True, **kwargs):
if axis is None:
raise ValueError("Must specify 'axis' when aggregating by level.")
grouped = self.groupby(level=level, axis=axis, sort=False)
if hasattr(grouped, name) and skipna:
return getattr(grouped, name)(**kwargs)
axis = self._get_axis_number(axis)
method = getattr(type(self), name)
applyf = lambda x: method(x, axis=axis, skipna=skipna, **kwargs)
return grouped.aggregate(applyf)
@classmethod
def _add_numeric_operations(cls):
"""
Add the operations to the cls; evaluate the doc strings again
"""
axis_descr, name1, name2 = _doc_parms(cls)
cls.any = _make_logical_function(
cls,
"any",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc=_any_desc,
func=nanops.nanany,
see_also=_any_see_also,
examples=_any_examples,
empty_value=False,
)
cls.all = _make_logical_function(
cls,
"all",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc=_all_desc,
func=nanops.nanall,
see_also=_all_see_also,
examples=_all_examples,
empty_value=True,
)
@Substitution(
desc="Return the mean absolute deviation of the values "
"for the requested axis.",
name1=name1,
name2=name2,
axis_descr=axis_descr,
min_count="",
see_also="",
examples="",
)
@Appender(_num_doc_mad)
def mad(self, axis=None, skipna=None, level=None):
if skipna is None:
skipna = True
if axis is None:
axis = self._stat_axis_number
if level is not None:
return self._agg_by_level("mad", axis=axis, level=level, skipna=skipna)
data = self._get_numeric_data()
if axis == 0:
demeaned = data - data.mean(axis=0)
else:
demeaned = data.sub(data.mean(axis=1), axis=0)
return np.abs(demeaned).mean(axis=axis, skipna=skipna)
cls.mad = mad
cls.sem = _make_stat_function_ddof(
cls,
"sem",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return unbiased standard error of the mean over requested "
"axis.\n\nNormalized by N-1 by default. This can be changed "
"using the ddof argument",
func=nanops.nansem,
)
cls.var = _make_stat_function_ddof(
cls,
"var",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return unbiased variance over requested axis.\n\nNormalized by "
"N-1 by default. This can be changed using the ddof argument",
func=nanops.nanvar,
)
cls.std = _make_stat_function_ddof(
cls,
"std",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return sample standard deviation over requested axis."
"\n\nNormalized by N-1 by default. This can be changed using the "
"ddof argument",
func=nanops.nanstd,
)
cls.cummin = _make_cum_function(
cls,
"cummin",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="minimum",
accum_func=np.minimum.accumulate,
accum_func_name="min",
mask_a=np.inf,
mask_b=np.nan,
examples=_cummin_examples,
)
cls.cumsum = _make_cum_function(
cls,
"cumsum",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="sum",
accum_func=np.cumsum,
accum_func_name="sum",
mask_a=0.0,
mask_b=np.nan,
examples=_cumsum_examples,
)
cls.cumprod = _make_cum_function(
cls,
"cumprod",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="product",
accum_func=np.cumprod,
accum_func_name="prod",
mask_a=1.0,
mask_b=np.nan,
examples=_cumprod_examples,
)
cls.cummax = _make_cum_function(
cls,
"cummax",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="maximum",
accum_func=np.maximum.accumulate,
accum_func_name="max",
mask_a=-np.inf,
mask_b=np.nan,
examples=_cummax_examples,
)
cls.sum = _make_min_count_stat_function(
cls,
"sum",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return the sum of the values for the requested axis.\n\n"
"This is equivalent to the method ``numpy.sum``.",
func=nanops.nansum,
see_also=_stat_func_see_also,
examples=_sum_examples,
)
cls.mean = _make_stat_function(
cls,
"mean",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return the mean of the values for the requested axis.",
func=nanops.nanmean,
)
cls.skew = _make_stat_function(
cls,
"skew",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return unbiased skew over requested axis.\n\nNormalized by N-1.",
func=nanops.nanskew,
)
cls.kurt = _make_stat_function(
cls,
"kurt",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return unbiased kurtosis over requested axis.\n\n"
"Kurtosis obtained using Fisher's definition of\n"
"kurtosis (kurtosis of normal == 0.0). Normalized "
"by N-1.",
func=nanops.nankurt,
)
cls.kurtosis = cls.kurt
cls.prod = _make_min_count_stat_function(
cls,
"prod",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return the product of the values for the requested axis.",
func=nanops.nanprod,
examples=_prod_examples,
)
cls.product = cls.prod
cls.median = _make_stat_function(
cls,
"median",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return the median of the values for the requested axis.",
func=nanops.nanmedian,
)
cls.max = _make_stat_function(
cls,
"max",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return the maximum of the values for the requested axis.\n\n"
"If you want the *index* of the maximum, use ``idxmax``. This is"
"the equivalent of the ``numpy.ndarray`` method ``argmax``.",
func=nanops.nanmax,
see_also=_stat_func_see_also,
examples=_max_examples,
)
cls.min = _make_stat_function(
cls,
"min",
name1=name1,
name2=name2,
axis_descr=axis_descr,
desc="Return the minimum of the values for the requested axis.\n\n"
"If you want the *index* of the minimum, use ``idxmin``. This is"
"the equivalent of the ``numpy.ndarray`` method ``argmin``.",
func=nanops.nanmin,
see_also=_stat_func_see_also,
examples=_min_examples,
)
@classmethod
def _add_series_or_dataframe_operations(cls):
"""
Add the series or dataframe only operations to the cls; evaluate
the doc strings again.
"""
from pandas.core.window import EWM, Expanding, Rolling, Window
@Appender(Rolling.__doc__)
def rolling(
self,
window,
min_periods=None,
center=False,
win_type=None,
on=None,
axis=0,
closed=None,
):
axis = self._get_axis_number(axis)
if win_type is not None:
return Window(
self,
window=window,
min_periods=min_periods,
center=center,
win_type=win_type,
on=on,
axis=axis,
closed=closed,
)
return Rolling(
self,
window=window,
min_periods=min_periods,
center=center,
win_type=win_type,
on=on,
axis=axis,
closed=closed,
)
cls.rolling = rolling
@Appender(Expanding.__doc__)
def expanding(self, min_periods=1, center=False, axis=0):
axis = self._get_axis_number(axis)
return Expanding(self, min_periods=min_periods, center=center, axis=axis)
cls.expanding = expanding
@Appender(EWM.__doc__)
def ewm(
self,
com=None,
span=None,
halflife=None,
alpha=None,
min_periods=0,
adjust=True,
ignore_na=False,
axis=0,
):
axis = self._get_axis_number(axis)
return EWM(
self,
com=com,
span=span,
halflife=halflife,
alpha=alpha,
min_periods=min_periods,
adjust=adjust,
ignore_na=ignore_na,
axis=axis,
)
cls.ewm = ewm
@Appender(_shared_docs["transform"] % dict(axis="", **_shared_doc_kwargs))
def transform(self, func, *args, **kwargs):
result = self.agg(func, *args, **kwargs)
if is_scalar(result) or len(result) != len(self):
raise ValueError("transforms cannot produce aggregated results")
return result
# ----------------------------------------------------------------------
# Misc methods
_shared_docs[
"valid_index"
] = """
Return index for %(position)s non-NA/null value.
Returns
-------
scalar : type of index
Notes
-----
If all elements are non-NA/null, returns None.
Also returns None for empty %(klass)s.
"""
def _find_valid_index(self, how: str):
"""
Retrieves the index of the first valid value.
Parameters
----------
how : {'first', 'last'}
Use this parameter to change between the first or last valid index.
Returns
-------
idx_first_valid : type of index
"""
idxpos = find_valid_index(self._values, how)
if idxpos is None:
return None
return self.index[idxpos]
@Appender(
_shared_docs["valid_index"] % {"position": "first", "klass": "Series/DataFrame"}
)
def first_valid_index(self):
return self._find_valid_index("first")
@Appender(
_shared_docs["valid_index"] % {"position": "last", "klass": "Series/DataFrame"}
)
def last_valid_index(self):
return self._find_valid_index("last")
def _doc_parms(cls):
"""Return a tuple of the doc parms."""
axis_descr = (
f"{{{', '.join(f'{a} ({i})' for i, a in enumerate(cls._AXIS_ORDERS))}}}"
)
name = cls._constructor_sliced.__name__ if cls._AXIS_LEN > 1 else "scalar"
name2 = cls.__name__
return axis_descr, name, name2
_num_doc = """
%(desc)s
Parameters
----------
axis : %(axis_descr)s
Axis for the function to be applied on.
skipna : bool, default True
Exclude NA/null values when computing the result.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a %(name1)s.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
%(min_count)s\
**kwargs
Additional keyword arguments to be passed to the function.
Returns
-------
%(name1)s or %(name2)s (if level specified)\
%(see_also)s\
%(examples)s
"""
_num_doc_mad = """
%(desc)s
Parameters
----------
axis : %(axis_descr)s
Axis for the function to be applied on.
skipna : bool, default None
Exclude NA/null values when computing the result.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a %(name1)s.
Returns
-------
%(name1)s or %(name2)s (if level specified)\
%(see_also)s\
%(examples)s
"""
_num_ddof_doc = """
%(desc)s
Parameters
----------
axis : %(axis_descr)s
skipna : bool, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a %(name1)s.
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations is N - ddof,
where N represents the number of elements.
numeric_only : bool, default None
Include only float, int, boolean columns. If None, will attempt to use
everything, then use only numeric data. Not implemented for Series.
Returns
-------
%(name1)s or %(name2)s (if level specified)\n"""
_bool_doc = """
%(desc)s
Parameters
----------
axis : {0 or 'index', 1 or 'columns', None}, default 0
Indicate which axis or axes should be reduced.
* 0 / 'index' : reduce the index, return a Series whose index is the
original column labels.
* 1 / 'columns' : reduce the columns, return a Series whose index is the
original index.
* None : reduce all axes, return a scalar.
bool_only : bool, default None
Include only boolean columns. If None, will attempt to use everything,
then use only boolean data. Not implemented for Series.
skipna : bool, default True
Exclude NA/null values. If the entire row/column is NA and skipna is
True, then the result will be %(empty_value)s, as for an empty row/column.
If skipna is False, then NA are treated as True, because these are not
equal to zero.
level : int or level name, default None
If the axis is a MultiIndex (hierarchical), count along a
particular level, collapsing into a %(name1)s.
**kwargs : any, default None
Additional keywords have no effect but might be accepted for
compatibility with NumPy.
Returns
-------
%(name1)s or %(name2)s
If level is specified, then, %(name2)s is returned; otherwise, %(name1)s
is returned.
%(see_also)s
%(examples)s"""
_all_desc = """\
Return whether all elements are True, potentially over an axis.
Returns True unless there at least one element within a series or
along a Dataframe axis that is False or equivalent (e.g. zero or
empty)."""
_all_examples = """\
Examples
--------
**Series**
>>> pd.Series([True, True]).all()
True
>>> pd.Series([True, False]).all()
False
>>> pd.Series([]).all()
True
>>> pd.Series([np.nan]).all()
True
>>> pd.Series([np.nan]).all(skipna=False)
True
**DataFrames**
Create a dataframe from a dictionary.
>>> df = pd.DataFrame({'col1': [True, True], 'col2': [True, False]})
>>> df
col1 col2
0 True True
1 True False
Default behaviour checks if column-wise values all return True.
>>> df.all()
col1 True
col2 False
dtype: bool
Specify ``axis='columns'`` to check if row-wise values all return True.
>>> df.all(axis='columns')
0 True
1 False
dtype: bool
Or ``axis=None`` for whether every value is True.
>>> df.all(axis=None)
False
"""
_all_see_also = """\
See Also
--------
Series.all : Return True if all elements are True.
DataFrame.any : Return True if one (or more) elements are True.
"""
_cnum_doc = """
Return cumulative %(desc)s over a DataFrame or Series axis.
Returns a DataFrame or Series of the same size containing the cumulative
%(desc)s.
Parameters
----------
axis : {0 or 'index', 1 or 'columns'}, default 0
The index or the name of the axis. 0 is equivalent to None or 'index'.
skipna : bool, default True
Exclude NA/null values. If an entire row/column is NA, the result
will be NA.
*args, **kwargs :
Additional keywords have no effect but might be accepted for
compatibility with NumPy.
Returns
-------
%(name1)s or %(name2)s
See Also
--------
core.window.Expanding.%(accum_func_name)s : Similar functionality
but ignores ``NaN`` values.
%(name2)s.%(accum_func_name)s : Return the %(desc)s over
%(name2)s axis.
%(name2)s.cummax : Return cumulative maximum over %(name2)s axis.
%(name2)s.cummin : Return cumulative minimum over %(name2)s axis.
%(name2)s.cumsum : Return cumulative sum over %(name2)s axis.
%(name2)s.cumprod : Return cumulative product over %(name2)s axis.
%(examples)s"""
_cummin_examples = """\
Examples
--------
**Series**
>>> s = pd.Series([2, np.nan, 5, -1, 0])
>>> s
0 2.0
1 NaN
2 5.0
3 -1.0
4 0.0
dtype: float64
By default, NA values are ignored.
>>> s.cummin()
0 2.0
1 NaN
2 2.0
3 -1.0
4 -1.0
dtype: float64
To include NA values in the operation, use ``skipna=False``
>>> s.cummin(skipna=False)
0 2.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
**DataFrame**
>>> df = pd.DataFrame([[2.0, 1.0],
... [3.0, np.nan],
... [1.0, 0.0]],
... columns=list('AB'))
>>> df
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
By default, iterates over rows and finds the minimum
in each column. This is equivalent to ``axis=None`` or ``axis='index'``.
>>> df.cummin()
A B
0 2.0 1.0
1 2.0 NaN
2 1.0 0.0
To iterate over columns and find the minimum in each row,
use ``axis=1``
>>> df.cummin(axis=1)
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
"""
_cumsum_examples = """\
Examples
--------
**Series**
>>> s = pd.Series([2, np.nan, 5, -1, 0])
>>> s
0 2.0
1 NaN
2 5.0
3 -1.0
4 0.0
dtype: float64
By default, NA values are ignored.
>>> s.cumsum()
0 2.0
1 NaN
2 7.0
3 6.0
4 6.0
dtype: float64
To include NA values in the operation, use ``skipna=False``
>>> s.cumsum(skipna=False)
0 2.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
**DataFrame**
>>> df = pd.DataFrame([[2.0, 1.0],
... [3.0, np.nan],
... [1.0, 0.0]],
... columns=list('AB'))
>>> df
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
By default, iterates over rows and finds the sum
in each column. This is equivalent to ``axis=None`` or ``axis='index'``.
>>> df.cumsum()
A B
0 2.0 1.0
1 5.0 NaN
2 6.0 1.0
To iterate over columns and find the sum in each row,
use ``axis=1``
>>> df.cumsum(axis=1)
A B
0 2.0 3.0
1 3.0 NaN
2 1.0 1.0
"""
_cumprod_examples = """\
Examples
--------
**Series**
>>> s = pd.Series([2, np.nan, 5, -1, 0])
>>> s
0 2.0
1 NaN
2 5.0
3 -1.0
4 0.0
dtype: float64
By default, NA values are ignored.
>>> s.cumprod()
0 2.0
1 NaN
2 10.0
3 -10.0
4 -0.0
dtype: float64
To include NA values in the operation, use ``skipna=False``
>>> s.cumprod(skipna=False)
0 2.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
**DataFrame**
>>> df = pd.DataFrame([[2.0, 1.0],
... [3.0, np.nan],
... [1.0, 0.0]],
... columns=list('AB'))
>>> df
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
By default, iterates over rows and finds the product
in each column. This is equivalent to ``axis=None`` or ``axis='index'``.
>>> df.cumprod()
A B
0 2.0 1.0
1 6.0 NaN
2 6.0 0.0
To iterate over columns and find the product in each row,
use ``axis=1``
>>> df.cumprod(axis=1)
A B
0 2.0 2.0
1 3.0 NaN
2 1.0 0.0
"""
_cummax_examples = """\
Examples
--------
**Series**
>>> s = pd.Series([2, np.nan, 5, -1, 0])
>>> s
0 2.0
1 NaN
2 5.0
3 -1.0
4 0.0
dtype: float64
By default, NA values are ignored.
>>> s.cummax()
0 2.0
1 NaN
2 5.0
3 5.0
4 5.0
dtype: float64
To include NA values in the operation, use ``skipna=False``
>>> s.cummax(skipna=False)
0 2.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
**DataFrame**
>>> df = pd.DataFrame([[2.0, 1.0],
... [3.0, np.nan],
... [1.0, 0.0]],
... columns=list('AB'))
>>> df
A B
0 2.0 1.0
1 3.0 NaN
2 1.0 0.0
By default, iterates over rows and finds the maximum
in each column. This is equivalent to ``axis=None`` or ``axis='index'``.
>>> df.cummax()
A B
0 2.0 1.0
1 3.0 NaN
2 3.0 1.0
To iterate over columns and find the maximum in each row,
use ``axis=1``
>>> df.cummax(axis=1)
A B
0 2.0 2.0
1 3.0 NaN
2 1.0 1.0
"""
_any_see_also = """\
See Also
--------
numpy.any : Numpy version of this method.
Series.any : Return whether any element is True.
Series.all : Return whether all elements are True.
DataFrame.any : Return whether any element is True over requested axis.
DataFrame.all : Return whether all elements are True over requested axis.
"""
_any_desc = """\
Return whether any element is True, potentially over an axis.
Returns False unless there at least one element within a series or
along a Dataframe axis that is True or equivalent (e.g. non-zero or
non-empty)."""
_any_examples = """\
Examples
--------
**Series**
For Series input, the output is a scalar indicating whether any element
is True.
>>> pd.Series([False, False]).any()
False
>>> pd.Series([True, False]).any()
True
>>> pd.Series([]).any()
False
>>> pd.Series([np.nan]).any()
False
>>> pd.Series([np.nan]).any(skipna=False)
True
**DataFrame**
Whether each column contains at least one True element (the default).
>>> df = pd.DataFrame({"A": [1, 2], "B": [0, 2], "C": [0, 0]})
>>> df
A B C
0 1 0 0
1 2 2 0
>>> df.any()
A True
B True
C False
dtype: bool
Aggregating over the columns.
>>> df = pd.DataFrame({"A": [True, False], "B": [1, 2]})
>>> df
A B
0 True 1
1 False 2
>>> df.any(axis='columns')
0 True
1 True
dtype: bool
>>> df = pd.DataFrame({"A": [True, False], "B": [1, 0]})
>>> df
A B
0 True 1
1 False 0
>>> df.any(axis='columns')
0 True
1 False
dtype: bool
Aggregating over the entire DataFrame with ``axis=None``.
>>> df.any(axis=None)
True
`any` for an empty DataFrame is an empty Series.
>>> pd.DataFrame([]).any()
Series([], dtype: bool)
"""
_shared_docs[
"stat_func_example"
] = """
Examples
--------
>>> idx = pd.MultiIndex.from_arrays([
... ['warm', 'warm', 'cold', 'cold'],
... ['dog', 'falcon', 'fish', 'spider']],
... names=['blooded', 'animal'])
>>> s = pd.Series([4, 2, 0, 8], name='legs', index=idx)
>>> s
blooded animal
warm dog 4
falcon 2
cold fish 0
spider 8
Name: legs, dtype: int64
>>> s.{stat_func}()
{default_output}
{verb} using level names, as well as indices.
>>> s.{stat_func}(level='blooded')
blooded
warm {level_output_0}
cold {level_output_1}
Name: legs, dtype: int64
>>> s.{stat_func}(level=0)
blooded
warm {level_output_0}
cold {level_output_1}
Name: legs, dtype: int64"""
_sum_examples = _shared_docs["stat_func_example"].format(
stat_func="sum", verb="Sum", default_output=14, level_output_0=6, level_output_1=8
)
_sum_examples += """
By default, the sum of an empty or all-NA Series is ``0``.
>>> pd.Series([]).sum() # min_count=0 is the default
0.0
This can be controlled with the ``min_count`` parameter. For example, if
you'd like the sum of an empty series to be NaN, pass ``min_count=1``.
>>> pd.Series([]).sum(min_count=1)
nan
Thanks to the ``skipna`` parameter, ``min_count`` handles all-NA and
empty series identically.
>>> pd.Series([np.nan]).sum()
0.0
>>> pd.Series([np.nan]).sum(min_count=1)
nan"""
_max_examples = _shared_docs["stat_func_example"].format(
stat_func="max", verb="Max", default_output=8, level_output_0=4, level_output_1=8
)
_min_examples = _shared_docs["stat_func_example"].format(
stat_func="min", verb="Min", default_output=0, level_output_0=2, level_output_1=0
)
_stat_func_see_also = """
See Also
--------
Series.sum : Return the sum.
Series.min : Return the minimum.
Series.max : Return the maximum.
Series.idxmin : Return the index of the minimum.
Series.idxmax : Return the index of the maximum.
DataFrame.sum : Return the sum over the requested axis.
DataFrame.min : Return the minimum over the requested axis.
DataFrame.max : Return the maximum over the requested axis.
DataFrame.idxmin : Return the index of the minimum over the requested axis.
DataFrame.idxmax : Return the index of the maximum over the requested axis."""
_prod_examples = """
Examples
--------
By default, the product of an empty or all-NA Series is ``1``
>>> pd.Series([]).prod()
1.0
This can be controlled with the ``min_count`` parameter
>>> pd.Series([]).prod(min_count=1)
nan
Thanks to the ``skipna`` parameter, ``min_count`` handles all-NA and
empty series identically.
>>> pd.Series([np.nan]).prod()
1.0
>>> pd.Series([np.nan]).prod(min_count=1)
nan"""
_min_count_stub = """\
min_count : int, default 0
The required number of valid values to perform the operation. If fewer than
``min_count`` non-NA values are present the result will be NA.
.. versionadded:: 0.22.0
Added with the default being 0. This means the sum of an all-NA
or empty Series is 0, and the product of an all-NA or empty
Series is 1.
"""
def _make_min_count_stat_function(
cls,
name: str,
name1: str,
name2: str,
axis_descr: str,
desc: str,
func: Callable,
see_also: str = "",
examples: str = "",
) -> Callable:
@Substitution(
desc=desc,
name1=name1,
name2=name2,
axis_descr=axis_descr,
min_count=_min_count_stub,
see_also=see_also,
examples=examples,
)
@Appender(_num_doc)
def stat_func(
self,
axis=None,
skipna=None,
level=None,
numeric_only=None,
min_count=0,
**kwargs,
):
if name == "sum":
nv.validate_sum(tuple(), kwargs)
elif name == "prod":
nv.validate_prod(tuple(), kwargs)
else:
nv.validate_stat_func(tuple(), kwargs, fname=name)
if skipna is None:
skipna = True
if axis is None:
axis = self._stat_axis_number
if level is not None:
return self._agg_by_level(
name, axis=axis, level=level, skipna=skipna, min_count=min_count
)
return self._reduce(
func,
name=name,
axis=axis,
skipna=skipna,
numeric_only=numeric_only,
min_count=min_count,
)
return set_function_name(stat_func, name, cls)
def _make_stat_function(
cls,
name: str,
name1: str,
name2: str,
axis_descr: str,
desc: str,
func: Callable,
see_also: str = "",
examples: str = "",
) -> Callable:
@Substitution(
desc=desc,
name1=name1,
name2=name2,
axis_descr=axis_descr,
min_count="",
see_also=see_also,
examples=examples,
)
@Appender(_num_doc)
def stat_func(
self, axis=None, skipna=None, level=None, numeric_only=None, **kwargs
):
if name == "median":
nv.validate_median(tuple(), kwargs)
else:
nv.validate_stat_func(tuple(), kwargs, fname=name)
if skipna is None:
skipna = True
if axis is None:
axis = self._stat_axis_number
if level is not None:
return self._agg_by_level(name, axis=axis, level=level, skipna=skipna)
return self._reduce(
func, name=name, axis=axis, skipna=skipna, numeric_only=numeric_only
)
return set_function_name(stat_func, name, cls)
def _make_stat_function_ddof(
cls, name: str, name1: str, name2: str, axis_descr: str, desc: str, func: Callable
) -> Callable:
@Substitution(desc=desc, name1=name1, name2=name2, axis_descr=axis_descr)
@Appender(_num_ddof_doc)
def stat_func(
self, axis=None, skipna=None, level=None, ddof=1, numeric_only=None, **kwargs
):
nv.validate_stat_ddof_func(tuple(), kwargs, fname=name)
if skipna is None:
skipna = True
if axis is None:
axis = self._stat_axis_number
if level is not None:
return self._agg_by_level(
name, axis=axis, level=level, skipna=skipna, ddof=ddof
)
return self._reduce(
func, name, axis=axis, numeric_only=numeric_only, skipna=skipna, ddof=ddof
)
return set_function_name(stat_func, name, cls)
def _make_cum_function(
cls,
name: str,
name1: str,
name2: str,
axis_descr: str,
desc: str,
accum_func: Callable,
accum_func_name: str,
mask_a: float,
mask_b: float,
examples: str,
) -> Callable:
@Substitution(
desc=desc,
name1=name1,
name2=name2,
axis_descr=axis_descr,
accum_func_name=accum_func_name,
examples=examples,
)
@Appender(_cnum_doc)
def cum_func(self, axis=None, skipna=True, *args, **kwargs):
skipna = nv.validate_cum_func_with_skipna(skipna, args, kwargs, name)
if axis is None:
axis = self._stat_axis_number
else:
axis = self._get_axis_number(axis)
if axis == 1:
return cum_func(self.T, axis=0, skipna=skipna, *args, **kwargs).T
def na_accum_func(blk_values):
# We will be applying this function to block values
if blk_values.dtype.kind in ["m", "M"]:
# GH#30460, GH#29058
# numpy 1.18 started sorting NaTs at the end instead of beginning,
# so we need to work around to maintain backwards-consistency.
orig_dtype = blk_values.dtype
# We need to define mask before masking NaTs
mask = isna(blk_values)
if accum_func == np.minimum.accumulate:
# Note: the accum_func comparison fails as an "is" comparison
y = blk_values.view("i8")
y[mask] = np.iinfo(np.int64).max
changed = True
else:
y = blk_values
changed = False
result = accum_func(y.view("i8"), axis)
if skipna:
np.putmask(result, mask, iNaT)
elif accum_func == np.minimum.accumulate:
# Restore NaTs that we masked previously
nz = (~np.asarray(mask)).nonzero()[0]
if len(nz):
# everything up to the first non-na entry stays NaT
result[: nz[0]] = iNaT
if changed:
# restore NaT elements
y[mask] = iNaT # TODO: could try/finally for this?
if isinstance(blk_values, np.ndarray):
result = result.view(orig_dtype)
else:
# DatetimeArray
result = type(blk_values)._from_sequence(result, dtype=orig_dtype)
elif skipna and not issubclass(
blk_values.dtype.type, (np.integer, np.bool_)
):
vals = blk_values.copy().T
mask = isna(vals)
np.putmask(vals, mask, mask_a)
result = accum_func(vals, axis)
np.putmask(result, mask, mask_b)
else:
result = accum_func(blk_values.T, axis)
# transpose back for ndarray, not for EA
return result.T if hasattr(result, "T") else result
result = self._data.apply(na_accum_func)
d = self._construct_axes_dict()
d["copy"] = False
return self._constructor(result, **d).__finalize__(self)
return set_function_name(cum_func, name, cls)
def _make_logical_function(
cls,
name: str,
name1: str,
name2: str,
axis_descr: str,
desc: str,
func: Callable,
see_also: str,
examples: str,
empty_value: bool,
) -> Callable:
@Substitution(
desc=desc,
name1=name1,
name2=name2,
axis_descr=axis_descr,
see_also=see_also,
examples=examples,
empty_value=empty_value,
)
@Appender(_bool_doc)
def logical_func(self, axis=0, bool_only=None, skipna=True, level=None, **kwargs):
nv.validate_logical_func(tuple(), kwargs, fname=name)
if level is not None:
if bool_only is not None:
raise NotImplementedError(
"Option bool_only is not implemented with option level."
)
return self._agg_by_level(name, axis=axis, level=level, skipna=skipna)
return self._reduce(
func,
name=name,
axis=axis,
skipna=skipna,
numeric_only=bool_only,
filter_type="bool",
)
return set_function_name(logical_func, name, cls)
|
convert_integer: bool_t = True,
convert_boolean: bool_t = True,
|
machineset.go
|
/*
Copyright (c) 2021 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by lister-gen. DO NOT EDIT.
package v1alpha1
import (
v1alpha1 "github.com/gardener/machine-controller-manager/pkg/apis/machine/v1alpha1"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/client-go/tools/cache"
)
// MachineSetLister helps list MachineSets.
type MachineSetLister interface {
// List lists all MachineSets in the indexer.
List(selector labels.Selector) (ret []*v1alpha1.MachineSet, err error)
// MachineSets returns an object that can list and get MachineSets.
MachineSets(namespace string) MachineSetNamespaceLister
MachineSetListerExpansion
}
// machineSetLister implements the MachineSetLister interface.
type machineSetLister struct {
indexer cache.Indexer
}
// NewMachineSetLister returns a new MachineSetLister.
func
|
(indexer cache.Indexer) MachineSetLister {
return &machineSetLister{indexer: indexer}
}
// List lists all MachineSets in the indexer.
func (s *machineSetLister) List(selector labels.Selector) (ret []*v1alpha1.MachineSet, err error) {
err = cache.ListAll(s.indexer, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha1.MachineSet))
})
return ret, err
}
// MachineSets returns an object that can list and get MachineSets.
func (s *machineSetLister) MachineSets(namespace string) MachineSetNamespaceLister {
return machineSetNamespaceLister{indexer: s.indexer, namespace: namespace}
}
// MachineSetNamespaceLister helps list and get MachineSets.
type MachineSetNamespaceLister interface {
// List lists all MachineSets in the indexer for a given namespace.
List(selector labels.Selector) (ret []*v1alpha1.MachineSet, err error)
// Get retrieves the MachineSet from the indexer for a given namespace and name.
Get(name string) (*v1alpha1.MachineSet, error)
MachineSetNamespaceListerExpansion
}
// machineSetNamespaceLister implements the MachineSetNamespaceLister
// interface.
type machineSetNamespaceLister struct {
indexer cache.Indexer
namespace string
}
// List lists all MachineSets in the indexer for a given namespace.
func (s machineSetNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.MachineSet, err error) {
err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) {
ret = append(ret, m.(*v1alpha1.MachineSet))
})
return ret, err
}
// Get retrieves the MachineSet from the indexer for a given namespace and name.
func (s machineSetNamespaceLister) Get(name string) (*v1alpha1.MachineSet, error) {
obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name)
if err != nil {
return nil, err
}
if !exists {
return nil, errors.NewNotFound(v1alpha1.Resource("machineset"), name)
}
return obj.(*v1alpha1.MachineSet), nil
}
|
NewMachineSetLister
|
barcode_ccs.py
|
#! python
"""
Modified version of barcode report for use on CCS inputs
"""
from pprint import pformat
import functools
import logging
import json
import os.path as op
import sys
from pbcommand.models import DataStore, FileTypes
from pbcommand.models.report import PlotGroup
from pbcommand.cli import pbparser_runner
from pbcommand.utils import setup_log
from pbcore.io import ConsensusReadSet
from pbreports.report import barcode as barcode_report
from pbreports.report.barcode import (read_inputs, get_barcode_info_parallel,
save_demuxed_dataset_reports)
from pbreports.io.barcode import get_unbarcoded_reads_info
from pbreports.io.specs import load_spec
from pbreports.plot.helper import to_plotgroup
from pbreports.plot.tools import plot_read_lengths_with_cdf
log = logging.getLogger(__name__)
__version__ = "0.2.1"
class Constants(barcode_report.Constants):
TOOL_ID = "pbreports.tasks.barcode_ccs"
TOOL_NAME = "barcode_ccs"
DRIVER_EXE = "python -m pbreports.report.barcode_ccs --resolved-tool-contract"
VERSION = __version__
DOC = __doc__
FILE_TYPE_READS_IN = FileTypes.DS_CCS
SHOW_COLUMNS = [
barcode_report.Constants.C_BIOSAMPLE,
barcode_report.Constants.C_IDX,
barcode_report.Constants.C_BARCODE,
barcode_report.Constants.C_NREADS,
barcode_report.Constants.C_NBASES,
|
barcode_report.Constants.C_RANK
]
SHOW_ATTRIBUTES = [
barcode_report.Constants.A_NBARCODES,
barcode_report.Constants.A_NREADS_BARCODED,
barcode_report.Constants.A_NREADS_UNBARCODED,
barcode_report.Constants.A_MEAN_READS,
barcode_report.Constants.A_MAX_READS,
barcode_report.Constants.A_MIN_READS,
barcode_report.Constants.A_MEAN_RL
]
make_report = functools.partial(barcode_report._make_report_impl,
Constants.SHOW_ATTRIBUTES,
Constants.SHOW_COLUMNS,
Constants.LABEL_NONE)
def run_to_report(ds_bc_file,
barcodes_file,
reads_in_file,
base_dir=None,
datastore_json=None,
nproc=1,
test_mode=False,
min_bq_filter=Constants.MIN_BQ_FILTER):
spec = load_spec("barcode")
inputs = read_inputs(ds_bc_file, barcodes_file, reads_in_file)
read_info, barcoded_zmws, biosamples, subrpt_files, bc_dataset_uuids = get_barcode_info_parallel(
inputs.ds_files,
barcodes_file,
nproc=nproc,
subrpt_output_dir=op.join(base_dir, "sub_reports"),
isoseq_mode=False)
read_info.extend(list(get_unbarcoded_reads_info(
inputs.reads_in, barcoded_zmws)))
if datastore_json is not None:
save_demuxed_dataset_reports(
subrpt_files, base_dir, datastore_json)
else:
barcode_report.write_empty_datastore(datastore_json)
rpt = make_report(biosamples=biosamples,
read_info=read_info,
bc_dataset_uuids=bc_dataset_uuids,
dataset_uuids=inputs.dataset_uuids,
base_dir=base_dir,
use_spec=spec,
test_mode=test_mode,
min_bq_filter=min_bq_filter)
return spec.apply_view(rpt)
def args_runner(args):
log.info("Starting {f} version {v} report generation".format(
f=__file__, v=__version__))
report = run_to_report(args.ds_bc, args.barcodes, args.reads_in,
base_dir=op.dirname(args.report_json),
datastore_json=args.dataset_reports,
nproc=args.nproc,
test_mode=args.test_mode,
min_bq_filter=args.min_bq_filter)
log.info(pformat(report.to_dict()))
report.write_json(args.report_json)
report.tables[0].to_csv(args.report_csv)
return 0
def resolved_tool_contract_runner(rtc):
log.info("Starting {f} version {v} report generation".format(
f=__file__, v=__version__))
report = run_to_report(
ds_bc_file=rtc.task.input_files[0],
barcodes_file=rtc.task.input_files[2],
reads_in_file=rtc.task.input_files[1],
base_dir=op.dirname(rtc.task.output_files[0]),
datastore_json=rtc.task.output_files[2],
nproc=rtc.task.nproc)
log.debug(pformat(report.to_dict()))
report.write_json(rtc.task.output_files[0])
report.tables[0].to_csv(rtc.task.output_files[1])
return 0
def _get_parser():
return barcode_report.get_parser(Constants)
def main(argv=sys.argv):
return pbparser_runner(
argv=argv[1:],
parser=_get_parser(),
args_runner_func=args_runner,
contract_runner_func=resolved_tool_contract_runner,
alog=log,
setup_log_func=setup_log)
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
barcode_report.Constants.C_BCQUAL,
|
setup.py
|
from setuptools import find_packages, setup
def main():
extras = {
'bots': ['python-telegram-bot'],
'hpo': ['scikit-optimize==0.5.2', 'scipy'],
'monitoring': ['scikit-optimize==0.5.2', 'sacred==0.7.5', 'scikit-learn==0.21.3',
'scikit-plot==0.3.7', 'seaborn==0.8.1', 'aif360==0.2.1'],
'versioning': ['boto3', 'numpy'],
'viz': ['altair==2.3.0'],
}
all_deps = []
for group_name in extras:
all_deps += extras[group_name]
extras['all'] = all_deps
base_libs = ['attrdict==2.0.0', 'neptune-client', 'joblib==0.13', 'pandas', 'matplotlib', 'Pillow==5.4.1']
setup(
name='neptune-contrib',
version='0.13.7',
|
description='Neptune Python library contributions',
author='neptune.ml',
author_email='[email protected]',
url="https://github.com/neptune-ml/neptune-contrib",
long_description='Neptune Python library contributions',
license='MIT License',
install_requires=base_libs,
extras_require=extras,
packages=find_packages(include=['neptunecontrib*']),
)
if __name__ == "__main__":
main()
| |
main.rs
|
#[derive(Debug)]
struct StackNode<T> {
data: T,
next: Option<Box<StackNode<T>>>,
}
#[derive(Debug)]
struct
|
<T> {
top: Option<Box<StackNode<T>>>,
}
impl<T> Stack<T> {
fn new() -> Stack<T> {
Stack { top: None }
}
fn push(&mut self, data: T) {
let mut node = StackNode { data, next: None };
let next = self.top.take();
node.next = next;
self.top = Some(Box::new(node));
}
fn pop(&mut self) -> Option<T> {
let node = self.top.take();
match node {
None => None,
Some(mut x) => {
self.top = x.next.take();
Some(x.data)
}
}
}
}
fn main() {
let mut l = Stack::new();
for i in 1..100 {
l.push(i);
}
println!("{}", l.pop().unwrap());
}
|
Stack
|
ping_targets.py
|
class
|
:
def __init__(self, id: str, name: str, color: str, studon: str):
self.id = id
self.name = name
self.color = color
self.studon = studon
class Role:
def __init__(self, id: str, name: str, color: str):
self.id = id
self.name = name
self.color = color
|
User
|
log.js
|
'use strict';
const assert = require('assert');
const isStream = require('is-stream');
const logsLevels = require('../settings/logs-levels');
function
|
(stdout, message, logLevel = logsLevels.LOG) {
assert(isStream.writable(stdout) || stdout === undefined, `stdout must be a writable stream or eventually undefined`);
if (stdout) {
assert(typeof message === 'string', `message must be a string`);
assert(typeof logLevel === 'symbol' && logLevel in logsLevels,
`logLevel must be one of those following level: ${Object.keys(logsLevels).filter(l => typeof l === 'string').join(', ')}`
);
stdout.write(
`${logsLevels[logLevel]}: ${message}\n`
);
}
}
log.notice = (stdout, message) => log(stdout, message, logsLevels.NOTICE);
log.warn = (stdout, message) => log(stdout, message, logsLevels.WARN);
log.success = (stdout, message) => log(stdout, message, logsLevels.SUCCESS);
module.exports = log;
|
log
|
action-app_template.py
|
#!/usr/bin/env python3
from snipsTools import SnipsConfigParser
from hermes_python.hermes import Hermes
# imported to get type check and IDE completion
from hermes_python.ontology.dialogue.intent import IntentMessage
CONFIG_INI = "config.ini"
# If this skill is supposed to run on the satellite,
# please get this mqtt connection info from <config.ini>
# Hint: MQTT server is always running on the master device
MQTT_IP_ADDR: str = "localhost"
MQTT_PORT: int = 1883
MQTT_ADDR: str = "{}:{}".format(MQTT_IP_ADDR, str(MQTT_PORT))
class
|
(object):
"""Class used to wrap action code with mqtt connection
Please change the name refering to your application
"""
def __init__(self):
# get the configuration if needed
try:
self.config = SnipsConfigParser.read_configuration_file(CONFIG_INI)
except Exception:
self.config = None
# start listening to MQTT
self.start_blocking()
@staticmethod
def intent_1_callback(self,
hermes: Hermes,
intent_message: IntentMessage):
# terminate the session first if not continue
hermes.publish_end_session(intent_message.session_id, "")
# action code goes here...
print('[Received] intent: {}'.format(
intent_message.intent.intent_name))
# if need to speak the execution result by tts
hermes.publish_start_session_notification(
intent_message.site_id,
"Action 1", "")
@staticmethod
def intent_2_callback(self,
hermes: Hermes,
intent_message: IntentMessage):
# terminate the session first if not continue
hermes.publish_end_session()
hermes.publish_end_session(intent_message.session_id, "")
# action code goes here...
print('[Received] intent: {}'.format(
intent_message.intent.intent_name))
# if need to speak the execution result by tts
hermes.publish_start_session_notification(
intent_message.site_id,
"Action 2", "")
@staticmethod
def master_intent_callback(self,
hermes: Hermes,
intent_message: IntentMessage,):
coming_intent = intent_message.intent.intent_name
if coming_intent == 'intent_1':
self.intent_1_callback(hermes, intent_message)
if coming_intent == 'intent_2':
self.intent_2_callback(hermes, intent_message)
# more callback and if condition goes here...
# --> Register callback function and start MQTT
def start_blocking(self):
with Hermes(MQTT_ADDR) as h:
h.subscribe_intents(self.master_intent_callback).start()
if __name__ == "__main__":
Template()
|
Template
|
tracing.py
|
import logging
import opentracing
from jaeger_client import Config
def init_tracer(service):
logging.getLogger('').handlers = []
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
config = Config(
config={
'sampler': {
'type': 'const',
'param': 1,
},
'logging': True,
'reporter_batch_size': 1,
},
service_name=service,
)
# this call sets global variable opentracing.tracer
config.initialize_tracer()
def flask_to_scope(flask_tracer, request):
return opentracing.tracer.scope_manager.activate(
flask_tracer.get_span(request),
False,
)
def parse_baggage(headers, scope):
baggage = headers.get("jaeger-baggage")
print(f"found baggage: {baggage}")
|
if not baggage:
return
fields_as_dict = dict([f.split("=") for f in (baggage.split(","))])
if "session" in fields_as_dict.keys():
sessionId = fields_as_dict.get("session")
scope.span.set_tag("garden-session", sessionId)
#print(f"set session {sessionId}")
if "request" in fields_as_dict.keys():
requestId = fields_as_dict.get("request")
scope.span.set_tag("quizz-request", requestId)
#print(f"set request {requestId}")
| |
test_missing_impute.py
|
from eazieda.missing_impute import missing_impute
import pandas as pd
import numpy as np
from pytest import raises, fixture
@fixture
def df_miss():
df = pd.DataFrame(
[[1.0, "x"], [np.nan, "y"], [2.0, np.nan], [3.0, "y"]],
columns=["a", "b"],
)
return df
@fixture
def
|
():
df = pd.DataFrame(
[[1.0, "x"], [np.nan, "y"], [2.0, np.nan], [3.0, "y"], [4.0, "y"]],
columns=["a", "b"],
)
return df
def test_missing_impute(df_miss, df_miss_2):
# Test with default arguments
expected_output_default = pd.DataFrame(
data={"a": [1.0, 2.0, 2.0, 3.0], "b": ["x", "y", "y", "y"]}
).reset_index(drop=True)
missing_output_default = missing_impute(df_miss)
assert pd.DataFrame.equals(missing_output_default, expected_output_default)
# Test with two drop arguments selected at the same time
expected_output_two_drop = pd.DataFrame(
data={"a": [1.0, 3.0], "b": ["x", "y"]}
).reset_index(drop=True)
missing_output_two_drop = missing_impute(
df_miss, method_num="drop", method_non_num="drop"
)
assert pd.DataFrame.equals(
missing_output_two_drop, expected_output_two_drop
)
# Test with method_num="mean", method_non_num="drop"
expected_output_one_drop = pd.DataFrame(
data={"a": [1.0, 2.0, 3.0], "b": ["x", "y", "y"]}
).reset_index(drop=True)
missing_output_one_drop = missing_impute(df_miss, method_non_num="drop")
assert pd.DataFrame.equals(
expected_output_one_drop, missing_output_one_drop
)
# Test with method_num="median", method_non_num="most_frequent"
expected_output_median = pd.DataFrame(
data={"a": [1.0, 2.0, 2.0, 3.0], "b": ["x", "y", "y", "y"]}
).reset_index(drop=True)
missing_output_median = missing_impute(df_miss, method_num="median")
assert pd.DataFrame.equals(missing_output_median, expected_output_median)
# Test with method_num="median", method_non_num="drop"
expected_output_median_drop = pd.DataFrame(
data={"a": [1.0, 2.0, 3.0], "b": ["x", "y", "y"]}
).reset_index(drop=True)
missing_output_median_drop = missing_impute(
df_miss, method_num="median", method_non_num="drop"
)
assert pd.DataFrame.equals(
missing_output_median_drop, expected_output_median_drop
)
# Test with method_num="drop", method_non_num="most_frequent"
expected_output_drop_freq = pd.DataFrame(
[[1.0, "x"], [2.0, "y"], [3.0, "y"], [4.0, "y"]], columns=["a", "b"],
).reset_index(drop=True)
missing_output_drop_freq = missing_impute(
df_miss_2, method_num="drop", method_non_num="most_frequent"
)
assert pd.DataFrame.equals(
missing_output_drop_freq, expected_output_drop_freq
)
# Test whether a not dataframe input raises TypeError
with raises(TypeError):
missing_impute(5)
# Test whether invaild input of method_num raises ValueError
with raises(ValueError):
missing_impute(df_miss, method_num="mea")
# Test whether invaild input of method_non_num raises ValueError
with raises(ValueError):
missing_impute(df_miss, method_num="mean", method_non_num="most_freq")
|
df_miss_2
|
mod.rs
|
use std::env;
use std::net::SocketAddr;
use std::str::FromStr;
use lazy_static::lazy_static;
// Gets an environment variable by a key and parses as type `T` or returns
// the provided default value.
fn
|
<T>(key: &str, default: T) -> T
where
T: FromStr,
{
if let Ok(v) = env::var(key) {
if let Ok(v) = v.parse::<T>() {
return v;
}
}
default
}
fn get_env_var_or_else<T, F>(key: &str, f: F) -> T
where
T: FromStr,
F: FnOnce() -> T,
{
if let Ok(v) = env::var(key) {
if let Ok(v) = v.parse::<T>() {
return v;
}
}
f()
}
#[cfg(target_os = "ios")]
lazy_static! {
/// Maximum number of proxy outbound TCP connections allowed at the same time.
pub static ref ENDPOINT_TCP_CONCURRENCY: usize = {
get_env_var_or("ENDPOINT_TCP_CONCURRENCY", 45)
};
/// Maximum number of direct outbound TCP connections allowed at the same time.
pub static ref DIRECT_TCP_CONCURRENCY: usize = {
get_env_var_or("DIRECT_TCP_CONCURRENCY", 64)
};
/// DNS cache size in the built-in DNS client.
pub static ref DNS_CACHE_SIZE: usize = {
get_env_var_or("DNS_CACHE_SIZE", 64)
};
}
#[cfg(not(target_os = "ios"))]
lazy_static! {
/// Maximum number of proxy outbound TCP connections allowed at the same time.
pub static ref ENDPOINT_TCP_CONCURRENCY: usize = {
get_env_var_or("ENDPOINT_TCP_CONCURRENCY", 1024)
};
/// Maximum number of direct outbound TCP connections allowed at the same time.
pub static ref DIRECT_TCP_CONCURRENCY: usize = {
get_env_var_or("DIRECT_TCP_CONCURRENCY", 1024)
};
/// DNS cache size in the built-in DNS client.
pub static ref DNS_CACHE_SIZE: usize = {
get_env_var_or("DNS_CACHE_SIZE", 512)
};
}
#[cfg(feature = "stat")]
lazy_static! {
pub static ref ENABLE_STATS: bool = get_env_var_or("ENABLE_STATS", false);
}
lazy_static! {
pub static ref HTTP_USER_AGENT: String = {
get_env_var_or_else(
"HTTP_USER_AGENT",
|| get_env_var_or("USER_AGENT", "".to_string()), // legacy support
)
};
// The purpose is not to propagate the header, but to extract the forwarded
// source IP. Expects only comma separated IP list and only the first IP is
// taken as the forwarded source. Having this value customizable would benefit
// in case you don't trust the X-Forwarded-For header but there is another header
// which you can trust, for example the CF-Connecting-IP provided by Cloudflare.
pub static ref HTTP_FORWARDED_HEADER: String = {
get_env_var_or("HTTP_FORWARDED_HEADER", "X-Forwarded-For".to_string())
};
pub static ref LOG_CONSOLE_OUT: bool = {
get_env_var_or("LOG_CONSOLE_OUT", false)
};
pub static ref LOG_NO_COLOR: bool = {
get_env_var_or("LOG_NO_COLOR", false)
};
/// Uplink timeout after downlink EOF.
pub static ref TCP_UPLINK_TIMEOUT: u64 = {
get_env_var_or("TCP_UPLINK_TIMEOUT", 10)
};
/// Downlink timeout after uplink EOF.
pub static ref TCP_DOWNLINK_TIMEOUT: u64 = {
get_env_var_or("TCP_DOWNLINK_TIMEOUT", 10)
};
/// Buffer size for uplink and downlink connections, in KB.
pub static ref LINK_BUFFER_SIZE: usize = {
get_env_var_or("LINK_BUFFER_SIZE", 2)
};
/// Buffer size for UDP datagrams receiving/sending, in KB.
pub static ref DATAGRAM_BUFFER_SIZE: usize = {
get_env_var_or("DATAGRAM_BUFFER_SIZE", 2)
};
pub static ref OUTBOUND_DIAL_TIMEOUT: u64 = {
get_env_var_or("OUTBOUND_DIAL_TIMEOUT", 4)
};
pub static ref OUTBOUND_DIAL_ORDER: crate::proxy::DialOrder = {
match get_env_var_or("OUTBOUND_DIAL_ORDER", "ordered".to_string()).as_str() {
"random" => crate::proxy::DialOrder::Random,
"partial-random" => crate::proxy::DialOrder::PartialRandom,
_ => crate::proxy::DialOrder::Ordered,
}
};
/// Maximum outbound dial concurrency.
pub static ref OUTBOUND_DIAL_CONCURRENCY: usize = {
get_env_var_or("OUTBOUND_DIAL_CONCURRENCY", 1)
};
pub static ref ASSET_LOCATION: String = {
get_env_var_or_else("ASSET_LOCATION", || {
let mut file = std::env::current_exe().unwrap();
file.pop();
file.to_str().unwrap().to_string()
})
};
pub static ref CACHE_LOCATION: String = {
get_env_var_or("CACHE_LOCATION", "".to_string())
};
pub static ref API_LISTEN: String = {
get_env_var_or("API_LISTEN", "".to_string())
};
pub static ref ENABLE_IPV6: bool = {
get_env_var_or("ENABLE_IPV6", false)
};
pub static ref PREFER_IPV6: bool = {
get_env_var_or("PREFER_IPV6", false)
};
pub static ref UNSPECIFIED_BIND_ADDR: SocketAddr = {
get_env_var_or_else("UNSPECIFIED_BIND_ADDR", || {
if *ENABLE_IPV6 {
"[::]:0".to_string().parse().unwrap()
} else {
"0.0.0.0:0".to_string().parse().unwrap()
}
})
};
pub static ref OUTBOUND_BINDS: Vec<crate::proxy::OutboundBind> = {
let binds = get_env_var_or("OUTBOUND_INTERFACE", "0.0.0.0,::".to_string());
let mut outbound_binds = Vec::new();
for item in binds.split(',').map(str::trim) {
if let Ok(addr) = crate::common::net::parse_bind_addr(item) {
outbound_binds.push(crate::proxy::OutboundBind::Ip(addr));
} else {
outbound_binds.push(crate::proxy::OutboundBind::Interface(item.to_owned()));
}
}
outbound_binds
};
/// Sets the RPC service endpoint for protecting outbound sockets on Android to
/// avoid infinite loop. The `path` is treated as a Unix domain socket endpoint.
/// The RPC service simply listens for incoming connections, reads an int32 on
/// each connection, treats it as the file descriptor to protect, writes back 0
/// on success.
pub static ref SOCKET_PROTECT_PATH: String = {
get_env_var_or("SOCKET_PROTECT_PATH", "".to_string())
};
pub static ref SOCKET_PROTECT_SERVER: Option<SocketAddr> = {
get_env_var_or("SOCKET_PROTECT_SERVER", "".to_string()).parse().ok()
};
pub static ref GATEWAY_MODE: bool = {
get_env_var_or("GATEWAY_MODE", false)
};
/// UDP session timeout. A UDP session shall be terminated if there are no
/// activities in this period. The timeouts are observed only when a check
/// is happened.
pub static ref UDP_SESSION_TIMEOUT: u64 = {
get_env_var_or("UDP_SESSION_TIMEOUT", 30)
};
/// UDP session timeout check interval. The interval to check for UDP session
/// timeouts.
pub static ref UDP_SESSION_TIMEOUT_CHECK_INTERVAL: u64 = {
get_env_var_or("UDP_SESSION_TIMEOUT_CHECK_INTERVAL", 10)
};
/// Maximum retries for a specific DNS query for the built-in DNS client.
pub static ref MAX_DNS_RETRIES: usize = {
get_env_var_or("MAX_DNS_RETRIES", 4)
};
/// Timeout for a DNS query for the built-in DNS client.
pub static ref DNS_TIMEOUT: u64 = {
get_env_var_or("DNS_TIMEOUT", 4)
};
pub static ref DEFAULT_TUN_NAME: String = {
get_env_var_or("DEFAULT_TUN_NAME", "utun233".to_string())
};
pub static ref DEFAULT_TUN_IPV4_ADDR: String = {
get_env_var_or("DEFAULT_TUN_IPV4_ADDR", "240.255.0.2".to_string())
};
pub static ref DEFAULT_TUN_IPV4_GW: String = {
get_env_var_or("DEFAULT_TUN_IPV4_GW", "240.255.0.1".to_string())
};
pub static ref DEFAULT_TUN_IPV4_MASK: String = {
get_env_var_or("DEFAULT_TUN_IPV4_MASK", "255.255.255.0".to_string())
};
pub static ref DEFAULT_TUN_IPV6_ADDR: String = {
get_env_var_or("DEFAULT_TUN_IPV6_ADDR", "2001:2::2".to_string())
};
pub static ref DEFAULT_TUN_IPV6_GW: String = {
get_env_var_or("DEFAULT_TUN_IPV6_GW", "2001:2::1".to_string())
};
pub static ref DEFAULT_TUN_IPV6_PREFIXLEN: i32 = {
get_env_var_or("DEFAULT_TUN_IPV6_PREFIXLEN", 64)
};
}
|
get_env_var_or
|
kv_rwset.pb.go
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: ledger/rwset/kvrwset/kv_rwset.proto
/*
Package kvrwset is a generated protocol buffer package.
It is generated from these files:
ledger/rwset/kvrwset/kv_rwset.proto
It has these top-level messages:
KVRWSet
HashedRWSet
KVRead
KVWrite
KVReadHash
KVWriteHash
Version
RangeQueryInfo
QueryReads
QueryReadsMerkleSummary
*/
package kvrwset
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// KVRWSet encapsulates the read-write set for a chaincode that operates upon a KV or Document data model
// This structure is used for both the public data and the private data
type KVRWSet struct {
Reads []*KVRead `protobuf:"bytes,1,rep,name=reads" json:"reads,omitempty"`
RangeQueriesInfo []*RangeQueryInfo `protobuf:"bytes,2,rep,name=range_queries_info,json=rangeQueriesInfo" json:"range_queries_info,omitempty"`
Writes []*KVWrite `protobuf:"bytes,3,rep,name=writes" json:"writes,omitempty"`
}
func (m *KVRWSet) Reset() { *m = KVRWSet{} }
func (m *KVRWSet) String() string { return proto.CompactTextString(m) }
func (*KVRWSet) ProtoMessage() {}
func (*KVRWSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
func (m *KVRWSet) GetReads() []*KVRead {
if m != nil {
return m.Reads
}
return nil
}
func (m *KVRWSet) GetRangeQueriesInfo() []*RangeQueryInfo {
if m != nil {
return m.RangeQueriesInfo
}
return nil
}
func (m *KVRWSet) GetWrites() []*KVWrite {
if m != nil {
return m.Writes
}
return nil
}
// HashedRWSet encapsulates hashed representation of a private read-write set for KV or Document data model
type HashedRWSet struct {
HashedReads []*KVReadHash `protobuf:"bytes,1,rep,name=hashed_reads,json=hashedReads" json:"hashed_reads,omitempty"`
HashedWrites []*KVWriteHash `protobuf:"bytes,2,rep,name=hashed_writes,json=hashedWrites" json:"hashed_writes,omitempty"`
}
func (m *HashedRWSet) Reset() { *m = HashedRWSet{} }
func (m *HashedRWSet) String() string { return proto.CompactTextString(m) }
func (*HashedRWSet) ProtoMessage() {}
func (*HashedRWSet) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
func (m *HashedRWSet) GetHashedReads() []*KVReadHash {
if m != nil {
return m.HashedReads
}
return nil
}
func (m *HashedRWSet) GetHashedWrites() []*KVWriteHash {
if m != nil {
return m.HashedWrites
}
return nil
}
// KVRead captures a read operation performed during transaction simulation
// A 'nil' version indicates a non-existing key read by the transaction
type KVRead struct {
Key string `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"`
Version *Version `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
}
func (m *KVRead) Reset() { *m = KVRead{} }
func (m *KVRead) String() string { return proto.CompactTextString(m) }
func (*KVRead) ProtoMessage() {}
func (*KVRead) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
func (m *KVRead) GetKey() string {
if m != nil {
return m.Key
}
return ""
}
func (m *KVRead) GetVersion() *Version {
if m != nil {
return m.Version
}
return nil
}
// KVWrite captures a write (update/delete) operation performed during transaction simulation
type KVWrite struct {
Key string `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"`
IsDelete bool `protobuf:"varint,2,opt,name=is_delete,json=isDelete" json:"is_delete,omitempty"`
Value []byte `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"`
}
func (m *KVWrite) Reset() { *m = KVWrite{} }
func (m *KVWrite) String() string { return proto.CompactTextString(m) }
func (*KVWrite) ProtoMessage() {}
func (*KVWrite) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
func (m *KVWrite) GetKey() string {
if m != nil {
return m.Key
}
return ""
}
func (m *KVWrite) GetIsDelete() bool {
if m != nil {
return m.IsDelete
}
return false
}
func (m *KVWrite) GetValue() []byte {
if m != nil {
return m.Value
}
return nil
}
// KVReadHash is similar to the KVRead in spirit. However, it captures the hash of the key instead of the key itself
// version is kept as is for now. However, if the version also needs to be privacy-protected, it would need to be the
// hash of the version and hence of 'bytes' type
type KVReadHash struct {
KeyHash []byte `protobuf:"bytes,1,opt,name=key_hash,json=keyHash,proto3" json:"key_hash,omitempty"`
Version *Version `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
}
func (m *KVReadHash) Reset() { *m = KVReadHash{} }
func (m *KVReadHash) String() string { return proto.CompactTextString(m) }
func (*KVReadHash) ProtoMessage() {}
func (*KVReadHash) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
func (m *KVReadHash) GetKeyHash() []byte {
if m != nil {
return m.KeyHash
}
return nil
}
func (m *KVReadHash) GetVersion() *Version {
if m != nil {
return m.Version
}
return nil
}
// KVWriteHash is similar to the KVWrite in spiritcaptures a write (update/delete) operation performed during transaction simulation
type KVWriteHash struct {
KeyHash []byte `protobuf:"bytes,1,opt,name=key_hash,json=keyHash,proto3" json:"key_hash,omitempty"`
IsDelete bool `protobuf:"varint,2,opt,name=is_delete,json=isDelete" json:"is_delete,omitempty"`
ValueHash []byte `protobuf:"bytes,3,opt,name=value_hash,json=valueHash,proto3" json:"value_hash,omitempty"`
}
func (m *KVWriteHash) Reset() { *m = KVWriteHash{} }
func (m *KVWriteHash) String() string { return proto.CompactTextString(m) }
func (*KVWriteHash) ProtoMessage() {}
func (*KVWriteHash) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
func (m *KVWriteHash) GetKeyHash() []byte {
if m != nil {
return m.KeyHash
}
return nil
}
func (m *KVWriteHash) GetIsDelete() bool {
if m != nil {
return m.IsDelete
}
return false
}
func (m *KVWriteHash) GetValueHash() []byte {
if m != nil {
return m.ValueHash
}
return nil
}
// Version encapsulates the version of a Key
// A version of a committed key is maintained as the height of the transaction that committed the key.
// The height is represenetd as a tuple <blockNum, txNum> where the txNum is the height of the transaction
// (starting with 1) within block
type Version struct {
BlockNum uint64 `protobuf:"varint,1,opt,name=block_num,json=blockNum" json:"block_num,omitempty"`
TxNum uint64 `protobuf:"varint,2,opt,name=tx_num,json=txNum" json:"tx_num,omitempty"`
}
func (m *Version) Reset() { *m = Version{} }
func (m *Version) String() string { return proto.CompactTextString(m) }
func (*Version) ProtoMessage() {}
func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
func (m *Version) GetBlockNum() uint64 {
if m != nil {
return m.BlockNum
}
return 0
}
func (m *Version) GetTxNum() uint64 {
if m != nil {
return m.TxNum
}
return 0
}
// RangeQueryInfo encapsulates the details of a range query performed by a transaction during simulation.
// This helps protect transactions from phantom reads by varifying during validation whether any new items
// got committed within the given range between transaction simuation and validation
// (in addition to regular checks for updates/deletes of the existing items).
// readInfo field contains either the KVReads (for the items read by the range query) or a merkle-tree hash
// if the KVReads exceeds a pre-configured numbers
type RangeQueryInfo struct {
StartKey string `protobuf:"bytes,1,opt,name=start_key,json=startKey" json:"start_key,omitempty"`
EndKey string `protobuf:"bytes,2,opt,name=end_key,json=endKey" json:"end_key,omitempty"`
ItrExhausted bool `protobuf:"varint,3,opt,name=itr_exhausted,json=itrExhausted" json:"itr_exhausted,omitempty"`
// Types that are valid to be assigned to ReadsInfo:
// *RangeQueryInfo_RawReads
// *RangeQueryInfo_ReadsMerkleHashes
ReadsInfo isRangeQueryInfo_ReadsInfo `protobuf_oneof:"reads_info"`
}
func (m *RangeQueryInfo) Reset() { *m = RangeQueryInfo{} }
func (m *RangeQueryInfo) String() string { return proto.CompactTextString(m) }
func (*RangeQueryInfo) ProtoMessage() {}
func (*RangeQueryInfo) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
type isRangeQueryInfo_ReadsInfo interface {
isRangeQueryInfo_ReadsInfo()
}
type RangeQueryInfo_RawReads struct {
RawReads *QueryReads `protobuf:"bytes,4,opt,name=raw_reads,json=rawReads,oneof"`
}
type RangeQueryInfo_ReadsMerkleHashes struct {
ReadsMerkleHashes *QueryReadsMerkleSummary `protobuf:"bytes,5,opt,name=reads_merkle_hashes,json=readsMerkleHashes,oneof"`
}
func (*RangeQueryInfo_RawReads) isRangeQueryInfo_ReadsInfo() {}
func (*RangeQueryInfo_ReadsMerkleHashes) isRangeQueryInfo_ReadsInfo() {}
func (m *RangeQueryInfo) GetReadsInfo() isRangeQueryInfo_ReadsInfo {
if m != nil {
return m.ReadsInfo
}
return nil
}
func (m *RangeQueryInfo) GetStartKey() string {
if m != nil {
return m.StartKey
}
return ""
}
func (m *RangeQueryInfo) GetEndKey() string {
if m != nil {
return m.EndKey
}
return ""
}
func (m *RangeQueryInfo) GetItrExhausted() bool {
if m != nil {
return m.ItrExhausted
}
return false
}
func (m *RangeQueryInfo) GetRawReads() *QueryReads {
if x, ok := m.GetReadsInfo().(*RangeQueryInfo_RawReads); ok {
return x.RawReads
}
return nil
}
func (m *RangeQueryInfo) GetReadsMerkleHashes() *QueryReadsMerkleSummary {
if x, ok := m.GetReadsInfo().(*RangeQueryInfo_ReadsMerkleHashes); ok {
return x.ReadsMerkleHashes
}
return nil
}
// XXX_OneofFuncs is for the internal use of the proto package.
func (*RangeQueryInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
return _RangeQueryInfo_OneofMarshaler, _RangeQueryInfo_OneofUnmarshaler, _RangeQueryInfo_OneofSizer, []interface{}{
(*RangeQueryInfo_RawReads)(nil),
(*RangeQueryInfo_ReadsMerkleHashes)(nil),
}
}
func _RangeQueryInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
m := msg.(*RangeQueryInfo)
// reads_info
switch x := m.ReadsInfo.(type) {
case *RangeQueryInfo_RawReads:
b.EncodeVarint(4<<3 | proto.WireBytes)
if err := b.EncodeMessage(x.RawReads); err != nil {
return err
}
case *RangeQueryInfo_ReadsMerkleHashes:
b.EncodeVarint(5<<3 | proto.WireBytes)
if err := b.EncodeMessage(x.ReadsMerkleHashes); err != nil {
return err
}
case nil:
default:
return fmt.Errorf("RangeQueryInfo.ReadsInfo has unexpected type %T", x)
}
return nil
}
func _RangeQueryInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
m := msg.(*RangeQueryInfo)
switch tag {
case 4: // reads_info.raw_reads
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
msg := new(QueryReads)
err := b.DecodeMessage(msg)
m.ReadsInfo = &RangeQueryInfo_RawReads{msg}
return true, err
case 5: // reads_info.reads_merkle_hashes
if wire != proto.WireBytes {
return true, proto.ErrInternalBadWireType
}
msg := new(QueryReadsMerkleSummary)
err := b.DecodeMessage(msg)
m.ReadsInfo = &RangeQueryInfo_ReadsMerkleHashes{msg}
return true, err
default:
return false, nil
}
}
func _RangeQueryInfo_OneofSizer(msg proto.Message) (n int) {
m := msg.(*RangeQueryInfo)
// reads_info
switch x := m.ReadsInfo.(type) {
case *RangeQueryInfo_RawReads:
s := proto.Size(x.RawReads)
n += proto.SizeVarint(4<<3 | proto.WireBytes)
n += proto.SizeVarint(uint64(s))
n += s
case *RangeQueryInfo_ReadsMerkleHashes:
s := proto.Size(x.ReadsMerkleHashes)
n += proto.SizeVarint(5<<3 | proto.WireBytes)
n += proto.SizeVarint(uint64(s))
n += s
case nil:
default:
panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
}
return n
}
// QueryReads encapsulates the KVReads for the items read by a transaction as a result of a query execution
type QueryReads struct {
KvReads []*KVRead `protobuf:"bytes,1,rep,name=kv_reads,json=kvReads" json:"kv_reads,omitempty"`
}
func (m *QueryReads) Reset() { *m = QueryReads{} }
func (m *QueryReads) String() string { return proto.CompactTextString(m) }
func (*QueryReads) ProtoMessage() {}
func (*QueryReads) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} }
func (m *QueryReads) GetKvReads() []*KVRead {
if m != nil {
return m.KvReads
}
return nil
}
// QueryReadsMerkleSummary encapsulates the Merkle-tree hashes for the QueryReads
// This allows to reduce the size of RWSet in the presence of query results
// by storing certain hashes instead of actual results.
// maxDegree field refers to the maximum number of children in the tree at any level
// maxLevel field contains the lowest level which has lesser nodes than maxDegree (starting from leaf level)
type QueryReadsMerkleSummary struct {
MaxDegree uint32 `protobuf:"varint,1,opt,name=max_degree,json=maxDegree" json:"max_degree,omitempty"`
MaxLevel uint32 `protobuf:"varint,2,opt,name=max_level,json=maxLevel" json:"max_level,omitempty"`
MaxLevelHashes [][]byte `protobuf:"bytes,3,rep,name=max_level_hashes,json=maxLevelHashes,proto3" json:"max_level_hashes,omitempty"`
}
func (m *QueryReadsMerkleSummary) Reset() { *m = QueryReadsMerkleSummary{} }
func (m *QueryReadsMerkleSummary) String() string { return proto.CompactTextString(m) }
func (*QueryReadsMerkleSummary) ProtoMessage() {}
func (*QueryReadsMerkleSummary) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} }
func (m *QueryReadsMerkleSummary) GetMaxDegree() uint32 {
if m != nil {
return m.MaxDegree
}
return 0
}
func (m *QueryReadsMerkleSummary) GetMaxLevel() uint32 {
if m != nil {
return m.MaxLevel
}
return 0
}
func (m *QueryReadsMerkleSummary) GetMaxLevelHashes() [][]byte {
if m != nil {
return m.MaxLevelHashes
}
return nil
}
func init() {
proto.RegisterType((*KVRWSet)(nil), "kvrwset.KVRWSet")
proto.RegisterType((*HashedRWSet)(nil), "kvrwset.HashedRWSet")
proto.RegisterType((*KVRead)(nil), "kvrwset.KVRead")
proto.RegisterType((*KVWrite)(nil), "kvrwset.KVWrite")
proto.RegisterType((*KVReadHash)(nil), "kvrwset.KVReadHash")
proto.RegisterType((*KVWriteHash)(nil), "kvrwset.KVWriteHash")
proto.RegisterType((*Version)(nil), "kvrwset.Version")
proto.RegisterType((*RangeQueryInfo)(nil), "kvrwset.RangeQueryInfo")
proto.RegisterType((*QueryReads)(nil), "kvrwset.QueryReads")
proto.RegisterType((*QueryReadsMerkleSummary)(nil), "kvrwset.QueryReadsMerkleSummary")
}
func init()
|
var fileDescriptor0 = []byte{
// 641 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x5d, 0x6b, 0xdb, 0x4a,
0x10, 0x8d, 0xec, 0xd8, 0x92, 0xc7, 0x76, 0xae, 0xef, 0x26, 0x97, 0xe8, 0x52, 0x0a, 0x46, 0xa1,
0x60, 0xf2, 0x20, 0x43, 0x0a, 0xa5, 0x79, 0xe8, 0x4b, 0x49, 0x4a, 0x4a, 0xda, 0x40, 0x37, 0x90,
0x40, 0x5f, 0xc4, 0x3a, 0x9a, 0xd8, 0x42, 0x5f, 0xe9, 0xae, 0x64, 0x5b, 0x4f, 0x6d, 0x7f, 0x4a,
0xff, 0x69, 0xd9, 0x59, 0x39, 0x76, 0x42, 0x1a, 0xe8, 0x93, 0xb5, 0xe7, 0xcc, 0x99, 0x39, 0x33,
0x9e, 0x5d, 0x38, 0x48, 0x30, 0x9c, 0xa2, 0x1c, 0xcb, 0x85, 0xc2, 0x62, 0x1c, 0xcf, 0x57, 0xbf,
0x01, 0x7d, 0xf8, 0x77, 0x32, 0x2f, 0x72, 0x66, 0xd7, 0xb8, 0xf7, 0xcb, 0x02, 0xfb, 0xfc, 0x8a,
0x5f, 0x5f, 0x62, 0xc1, 0x5e, 0x41, 0x4b, 0xa2, 0x08, 0x95, 0x6b, 0x0d, 0x9b, 0xa3, 0xee, 0xd1,
0x3f, 0x7e, 0x1d, 0xe4, 0x9f, 0x5f, 0x71, 0x14, 0x21, 0x37, 0x2c, 0x3b, 0x05, 0x26, 0x45, 0x36,
0xc5, 0xe0, 0x5b, 0x89, 0x32, 0x42, 0x15, 0x44, 0xd9, 0x6d, 0xee, 0x36, 0x48, 0xb3, 0x7f, 0xaf,
0xe1, 0x3a, 0xe4, 0x4b, 0x89, 0xb2, 0xfa, 0x98, 0xdd, 0xe6, 0x7c, 0x20, 0x57, 0xe7, 0x08, 0x95,
0x46, 0xd8, 0x08, 0xda, 0x0b, 0x19, 0x15, 0xa8, 0xdc, 0x26, 0x49, 0x07, 0x1b, 0xe5, 0xae, 0x35,
0xc1, 0x6b, 0xde, 0xfb, 0x61, 0x41, 0xf7, 0x4c, 0xa8, 0x19, 0x86, 0xc6, 0xe7, 0x1b, 0xe8, 0xcd,
0xe8, 0x18, 0x6c, 0xda, 0xdd, 0x7d, 0x64, 0x57, 0x2b, 0x78, 0xd7, 0x04, 0x72, 0x32, 0x7e, 0x0c,
0xfd, 0x5a, 0x57, 0x17, 0x36, 0x9e, 0xf7, 0x1e, 0x17, 0x26, 0x65, 0x5d, 0xe2, 0xda, 0x58, 0xf8,
0x00, 0x6d, 0x93, 0x95, 0x0d, 0xa0, 0x19, 0x63, 0xe5, 0x5a, 0x43, 0x6b, 0xd4, 0xe1, 0xfa, 0x93,
0x1d, 0x82, 0x3d, 0x47, 0xa9, 0xa2, 0x3c, 0x73, 0x1b, 0x43, 0xeb, 0x41, 0x27, 0x57, 0x06, 0xe7,
0xab, 0x00, 0xef, 0x42, 0x4f, 0x9b, 0x72, 0x3e, 0x91, 0xe8, 0x05, 0x74, 0x22, 0x15, 0x84, 0x98,
0x60, 0x81, 0x94, 0xca, 0xe1, 0x4e, 0xa4, 0x4e, 0xe8, 0xcc, 0xf6, 0xa0, 0x35, 0x17, 0x49, 0x89,
0x6e, 0x73, 0x68, 0x8d, 0x7a, 0xdc, 0x1c, 0xbc, 0x4b, 0x80, 0x75, 0xb7, 0xec, 0x7f, 0x70, 0x62,
0xac, 0x02, 0xed, 0x9c, 0xf2, 0xf6, 0xb8, 0x1d, 0x63, 0x45, 0xd4, 0xdf, 0x98, 0x0c, 0xa1, 0xbb,
0x31, 0x89, 0xe7, 0xb2, 0x3e, 0xeb, 0xf8, 0x25, 0x00, 0x99, 0x34, 0x4a, 0x63, 0xbb, 0x43, 0x88,
0xd6, 0x7a, 0xef, 0xc0, 0xae, 0x2b, 0xeb, 0x34, 0x93, 0x24, 0xbf, 0x89, 0x83, 0xac, 0x4c, 0xa9,
0xc4, 0x36, 0x77, 0x08, 0xb8, 0x28, 0x53, 0xf6, 0x1f, 0xb4, 0x8b, 0x25, 0x31, 0x0d, 0x62, 0x5a,
0xc5, 0xf2, 0xa2, 0x4c, 0xbd, 0x9f, 0x0d, 0xd8, 0x79, 0xb8, 0x63, 0x3a, 0x8d, 0x2a, 0x84, 0x2c,
0x82, 0xf5, 0x5c, 0x1d, 0x02, 0xce, 0xb1, 0x62, 0xfb, 0x60, 0x63, 0x16, 0x12, 0xd5, 0x20, 0xaa,
0x8d, 0x59, 0xa8, 0x89, 0x03, 0xe8, 0x47, 0x85, 0x0c, 0x70, 0x39, 0x13, 0xa5, 0x2a, 0x30, 0x24,
0xa7, 0x0e, 0xef, 0x45, 0x85, 0x3c, 0x5d, 0x61, 0xec, 0x08, 0x3a, 0x52, 0x2c, 0xea, 0x7d, 0xdb,
0xa6, 0x01, 0xae, 0xf7, 0x8d, 0x1c, 0xd0, 0x8a, 0x9d, 0x6d, 0x71, 0x47, 0x8a, 0x85, 0x59, 0x37,
0x0e, 0xbb, 0x14, 0x1f, 0xa4, 0x28, 0xe3, 0xc4, 0x8c, 0x01, 0x95, 0xdb, 0x22, 0xf5, 0xf0, 0x09,
0xf5, 0x67, 0x8a, 0xbb, 0x2c, 0xd3, 0x54, 0xc8, 0xea, 0x6c, 0x8b, 0xff, 0x2b, 0xd7, 0x28, 0xed,
0xbf, 0x7a, 0xdf, 0x03, 0x30, 0x39, 0xf5, 0x9d, 0xf3, 0xde, 0x02, 0xac, 0xd5, 0xec, 0x10, 0x1c,
0x7d, 0xcb, 0x9f, 0xbb, 0xc1, 0x76, 0x3c, 0xa7, 0x58, 0xef, 0x3b, 0xec, 0xff, 0xa1, 0xae, 0xfe,
0xdb, 0x52, 0xb1, 0x0c, 0x42, 0x9c, 0x4a, 0x44, 0x1a, 0x63, 0x9f, 0x77, 0x52, 0xb1, 0x3c, 0x21,
0x40, 0x0f, 0x59, 0xd3, 0x09, 0xce, 0x31, 0xa1, 0x49, 0xf6, 0xb9, 0x93, 0x8a, 0xe5, 0x27, 0x7d,
0x66, 0x23, 0x18, 0xdc, 0x93, 0xab, 0x7e, 0xf5, 0xed, 0xee, 0xf1, 0x9d, 0x55, 0x4c, 0xdd, 0x48,
0x0e, 0x47, 0xb9, 0x9c, 0xfa, 0xb3, 0xea, 0x0e, 0xa5, 0x79, 0xb0, 0xfc, 0x5b, 0x31, 0x91, 0xd1,
0x8d, 0x79, 0xa0, 0x94, 0x5f, 0x83, 0xc6, 0x7e, 0xdd, 0xc6, 0xd7, 0xe3, 0x69, 0x54, 0xcc, 0xca,
0x89, 0x7f, 0x93, 0xa7, 0xe3, 0x0d, 0xe9, 0xd8, 0x48, 0xc7, 0x46, 0x3a, 0x7e, 0xea, 0x01, 0x9c,
0xb4, 0x89, 0x7c, 0xfd, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x29, 0xbc, 0x0b, 0xfc, 0x1f, 0x05, 0x00,
0x00,
}
|
{ proto.RegisterFile("ledger/rwset/kvrwset/kv_rwset.proto", fileDescriptor0) }
|
entry.rs
|
use serde_derive::{Deserialize, Serialize};
use std::cmp::Ordering;
#[derive(Debug, Serialize, Deserialize)]
pub struct Entry<T, U> {
pub key: T,
pub value: U,
}
impl<T, U> Ord for Entry<T, U>
where
|
{
fn cmp(&self, other: &Entry<T, U>) -> Ordering {
self.key.cmp(&other.key)
}
}
impl<T, U> PartialOrd for Entry<T, U>
where
T: Ord,
{
fn partial_cmp(&self, other: &Entry<T, U>) -> Option<Ordering> {
Some(self.key.cmp(&other.key))
}
}
impl<T, U> PartialEq for Entry<T, U>
where
T: Ord,
{
fn eq(&self, other: &Entry<T, U>) -> bool {
self.key == other.key
}
}
impl<T, U> Eq for Entry<T, U> where T: Ord {}
|
T: Ord,
|
502_test.go
|
package p502
import (
"github.com/stretchr/testify/assert"
"testing"
)
func Test0(t *testing.T)
|
func Test1(t *testing.T) {
p := []int{1, 2, 3}
c := []int{0, 1, 2}
assert.Equal(t, 6, findMaximizedCapital(10, 0, p, c))
}
|
{
p := []int{1, 2, 3}
c := []int{0, 1, 1}
assert.Equal(t, 4, findMaximizedCapital(2, 0, p, c))
}
|
86ab07cb326c61f39a292a403f2894894a6f0605.js
|
mycallback( {"CONTRIBUTOR OCCUPATION": "SENIOR VICE PRESIDENT", "CONTRIBUTION AMOUNT (F3L Bundled)": "57.69", "ELECTION CODE": "", "MEMO CODE": "", "CONTRIBUTOR EMPLOYER": "MASSACHUSETTS MUTUAL LIFE INS.", "DONOR CANDIDATE STATE": "", "CONTRIBUTOR STREET 1": "347 N STEELE RD", "CONTRIBUTOR MIDDLE NAME": "", "DONOR CANDIDATE FEC ID": "", "DONOR CANDIDATE MIDDLE NAME": "", "CONTRIBUTOR STATE": "CT", "DONOR CANDIDATE FIRST NAME": "", "CONTRIBUTOR FIRST NAME": "DOUGLAS", "BACK REFERENCE SCHED NAME": "", "DONOR CANDIDATE DISTRICT": "", "CONTRIBUTION DATE": "20091231", "DONOR COMMITTEE NAME": "", "MEMO TEXT/DESCRIPTION": "P/R Deduction ($87.45 Bi-Weekly)", "Reference to SI or SL system code that identifies the Account": "", "FILER COMMITTEE ID NUMBER": "C00118943", "DONOR CANDIDATE LAST NAME": "", "CONTRIBUTOR LAST NAME": "RUSSELL", "_record_type": "fec.version.v7_0.SA", "CONDUIT STREET2": "", "CONDUIT STREET1": "", "DONOR COMMITTEE FEC ID": "", "CONTRIBUTION PURPOSE DESCRIP": "", "CONTRIBUTOR ZIP": "061172232", "CONTRIBUTOR STREET 2": "", "CONDUIT CITY": "", "ENTITY TYPE": "IND", "CONTRIBUTOR CITY": "WEST HARTFORD", "CONTRIBUTOR SUFFIX": "", "TRANSACTION ID": "PR1500908518659", "DONOR CANDIDATE SUFFIX": "", "DONOR CANDIDATE OFFICE": "", "CONTRIBUTION PURPOSE CODE": "15", "ELECTION OTHER DESCRIPTION": "", "_src_file": "2011/20110411/721455.fec_1.yml", "CONDUIT STATE": "", "CONTRIBUTOR ORGANIZATION NAME": "", "BACK REFERENCE TRAN ID NUMBER": "", "DONOR CANDIDATE PREFIX": "", "CONTRIBUTOR PREFIX": "MR.", "CONDUIT ZIP": "", "CONDUIT NAME": "", "CONTRIBUTION AGGREGATE F3L Semi-annual Bundled": "526.20", "FORM TYPE": "SA11ai"});
|
||
manager.go
|
package viewer
import (
"github.com/gofrs/uuid"
"github.com/leandro-lugaresi/hub"
"github.com/traPtitech/traQ/event"
"sync"
"time"
)
// Manager チャンネル閲覧者マネージャ
type Manager struct {
hub *hub.Hub
channels map[uuid.UUID]map[*viewer]struct{}
viewers map[interface{}]*viewer
mu sync.RWMutex
}
type viewer struct {
key interface{}
userID uuid.UUID
channelID uuid.UUID
state StateWithTime
}
// NewManager チャンネル閲覧者マネージャーを生成します
func NewManager(hub *hub.Hub) *Manager {
vm := &Manager{
hub: hub,
channels: map[uuid.UUID]map[*viewer]struct{}{},
viewers: map[interface{}]*viewer{},
}
go func() {
for range time.NewTicker(5 * time.Minute).C {
vm.mu.Lock()
vm.gc()
vm.mu.Unlock()
}
}()
return vm
}
// GetChannelViewers 指定したチャンネルのチャンネル閲覧者状態を取得します
func (vm *Manager) GetChannelViewers(channelID uuid.UUID) map[uuid.UUID]StateWithTime {
vm.mu.RLock()
defer vm.mu.RUnlock()
return calculateChannelViewers(vm.channels[channelID])
}
// SetViewer 指定したキーのチャンネル閲覧者状態を設定します
func (vm *Manager) SetViewer(key interface{}, userID uuid.UUID, channelID uuid.UUID, state State) {
vm.mu.Lock()
defer vm.mu.Unlock()
cv, ok := vm.channels[channelID]
if !ok {
cv = map[*viewer]struct{}{}
vm.channels[channelID] = cv
}
v, ok := vm.viewers[key]
if ok {
if v.channelID == channelID {
if v.state.State == state {
// 何も変わってない
return
}
// stateだけ変更
v.state.State = state
} else {
// channelとstateが変更
oldC := v.channelID
old := vm.channels[oldC]
delete(old, v)
v.channelID = channelID
v.state = StateWithTime{
State: state,
Time: time.Now(),
}
vm.hub.Publish(hub.Message{
Name: event.ChannelViewersChanged,
Fields: hub.Fields{
"channel_id": oldC,
"viewers": calculateChannelViewers(old),
},
})
}
} else {
v = &viewer{
key: key,
userID: userID,
channelID: channelID,
state: StateWithTime{
State: state,
Time: time.Now(),
},
}
vm.viewers[key] = v
}
cv[v] = struct{}{}
vm.hub.Publish(hub.Message{
Name: event.ChannelViewersChanged,
Fields: hub.Fields{
"channel_id": channelID,
"viewers": calculateChannelViewers(cv),
},
})
}
// RemoveViewer 指定したキーのチャンネル閲覧者状態を削除します
func (vm *Manager) RemoveViewer(key interface{}) {
vm.mu.Lock()
defer vm.mu.Unlock()
v, ok := vm.viewers[key]
if !ok {
return
}
cv := vm.channels[v.channelID]
delete(vm.viewers, key)
delete(cv, v)
vm.hub.Publish(hub.Message{
Name: event.ChannelViewersChanged,
Fields: hub.Fields{
"channel_id": v.channelID,
"viewers": calculateChannelViewers(cv),
},
})
}
// 5分に1回呼び出される。チャンネルマップのお掃除
func (vm *Manager) gc() {
for cid, cv := range vm.channels {
if len(cv) == 0 {
delete(vm.channels, cid)
}
}
}
func calculateChannelViewers(vs map[*viewer]struct{}) map[uuid.UUID]StateWithTime {
result := make(map[uuid.UUID]StateWithTime, len(vs))
for v := range vs {
if s, ok := result[v.userID]; ok && s.State > v.state.State {
continue
}
result[v.userID] = v.state
}
return result
}
| ||
runner_controller.go
|
/*
Copyright 2020 The actions-runner-controller authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package controllers
import (
"context"
"fmt"
"reflect"
"strings"
"github.com/go-logr/logr"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/client-go/tools/record"
ctrl "sigs.k8s.io/controller-runtime"
"sigs.k8s.io/controller-runtime/pkg/client"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"github.com/summerwind/actions-runner-controller/api/v1alpha1"
"github.com/summerwind/actions-runner-controller/github"
)
const (
containerName = "runner"
finalizerName = "runner.actions.summerwind.dev"
)
// RunnerReconciler reconciles a Runner object
type RunnerReconciler struct {
client.Client
Log logr.Logger
Recorder record.EventRecorder
Scheme *runtime.Scheme
GitHubClient *github.Client
RunnerImage string
DockerImage string
}
// +kubebuilder:rbac:groups=actions.summerwind.dev,resources=runners,verbs=get;list;watch;create;update;patch;delete
// +kubebuilder:rbac:groups=actions.summerwind.dev,resources=runners/finalizers,verbs=get;list;watch;create;update;patch;delete
// +kubebuilder:rbac:groups=actions.summerwind.dev,resources=runners/status,verbs=get;update;patch
// +kubebuilder:rbac:groups=core,resources=pods,verbs=get;list;watch;create;update;patch;delete
// +kubebuilder:rbac:groups=core,resources=pods/finalizers,verbs=get;list;watch;create;update;patch;delete
// +kubebuilder:rbac:groups=core,resources=events,verbs=create;patch
func (r *RunnerReconciler) Reconcile(req ctrl.Request) (ctrl.Result, error) {
ctx := context.Background()
log := r.Log.WithValues("runner", req.NamespacedName)
var runner v1alpha1.Runner
if err := r.Get(ctx, req.NamespacedName, &runner); err != nil {
return ctrl.Result{}, client.IgnoreNotFound(err)
}
err := runner.Validate()
if err != nil {
log.Info("Failed to validate runner spec", "error", err.Error())
return ctrl.Result{}, nil
}
if runner.ObjectMeta.DeletionTimestamp.IsZero() {
finalizers, added := addFinalizer(runner.ObjectMeta.Finalizers)
if added {
newRunner := runner.DeepCopy()
newRunner.ObjectMeta.Finalizers = finalizers
if err := r.Update(ctx, newRunner); err != nil {
log.Error(err, "Failed to update runner")
return ctrl.Result{}, err
}
return ctrl.Result{}, nil
}
} else {
finalizers, removed := removeFinalizer(runner.ObjectMeta.Finalizers)
if removed {
if len(runner.Status.Registration.Token) > 0 {
ok, err := r.unregisterRunner(ctx, runner.Spec.Organization, runner.Spec.Repository, runner.Name)
if err != nil {
log.Error(err, "Failed to unregister runner")
return ctrl.Result{}, err
}
if !ok {
log.V(1).Info("Runner no longer exists on GitHub")
}
} else {
log.V(1).Info("Runner was never registered on GitHub")
}
newRunner := runner.DeepCopy()
newRunner.ObjectMeta.Finalizers = finalizers
if err := r.Update(ctx, newRunner); err != nil {
log.Error(err, "Failed to update runner")
return ctrl.Result{}, err
}
log.Info("Removed runner from GitHub", "repository", runner.Spec.Repository, "organization", runner.Spec.Organization)
}
return ctrl.Result{}, nil
}
var pod corev1.Pod
if err := r.Get(ctx, req.NamespacedName, &pod); err != nil {
if !errors.IsNotFound(err) {
return ctrl.Result{}, err
}
if updated, err := r.updateRegistrationToken(ctx, runner); err != nil {
return ctrl.Result{}, err
} else if updated {
return ctrl.Result{Requeue: true}, nil
}
newPod, err := r.newPod(runner)
if err != nil {
log.Error(err, "Could not create pod")
return ctrl.Result{}, err
}
if err := r.Create(ctx, &newPod); err != nil {
log.Error(err, "Failed to create pod resource")
return ctrl.Result{}, err
}
r.Recorder.Event(&runner, corev1.EventTypeNormal, "PodCreated", fmt.Sprintf("Created pod '%s'", newPod.Name))
log.Info("Created runner pod", "repository", runner.Spec.Repository)
} else {
// If pod has ended up succeeded we need to restart it
// Happens e.g. when dind is in runner and run completes
restart := pod.Status.Phase == corev1.PodSucceeded
if !restart && runner.Status.Phase != string(pod.Status.Phase) {
updated := runner.DeepCopy()
updated.Status.Phase = string(pod.Status.Phase)
updated.Status.Reason = pod.Status.Reason
updated.Status.Message = pod.Status.Message
if err := r.Status().Update(ctx, updated); err != nil {
log.Error(err, "Failed to update runner status")
return ctrl.Result{}, err
}
return ctrl.Result{}, nil
}
if !pod.ObjectMeta.DeletionTimestamp.IsZero() {
return ctrl.Result{}, err
}
if pod.Status.Phase == corev1.PodRunning {
for _, status := range pod.Status.ContainerStatuses {
if status.Name != containerName {
continue
}
if status.State.Terminated != nil && status.State.Terminated.ExitCode == 0 {
restart = true
}
}
}
if updated, err := r.updateRegistrationToken(ctx, runner); err != nil {
return ctrl.Result{}, err
} else if updated {
return ctrl.Result{Requeue: true}, nil
}
newPod, err := r.newPod(runner)
if err != nil {
log.Error(err, "Could not create pod")
return ctrl.Result{}, err
}
runnerBusy, err := r.isRunnerBusy(ctx, runner.Spec.Organization, runner.Spec.Repository, runner.Name)
if err != nil {
log.Error(err, "Failed to check if runner is busy")
return ctrl.Result{}, nil
}
if !runnerBusy && (!reflect.DeepEqual(pod.Spec.Containers[0].Env, newPod.Spec.Containers[0].Env) || pod.Spec.Containers[0].Image != newPod.Spec.Containers[0].Image) {
restart = true
}
if !restart {
return ctrl.Result{}, err
}
if err := r.Delete(ctx, &pod); err != nil {
log.Error(err, "Failed to delete pod resource")
return ctrl.Result{}, err
}
r.Recorder.Event(&runner, corev1.EventTypeNormal, "PodDeleted", fmt.Sprintf("Deleted pod '%s'", newPod.Name))
log.Info("Deleted runner pod", "repository", runner.Spec.Repository)
}
return ctrl.Result{}, nil
}
func (r *RunnerReconciler) isRunnerBusy(ctx context.Context, org, repo, name string) (bool, error) {
runners, err := r.GitHubClient.ListRunners(ctx, org, repo)
if err != nil {
return false, err
}
for _, runner := range runners {
if runner.GetName() == name {
return runner.GetBusy(), nil
}
}
return false, fmt.Errorf("runner not found")
}
func (r *RunnerReconciler) unregisterRunner(ctx context.Context, org, repo, name string) (bool, error) {
runners, err := r.GitHubClient.ListRunners(ctx, org, repo)
if err != nil {
return false, err
}
id := int64(0)
for _, runner := range runners {
if runner.GetName() == name {
if runner.GetBusy() {
return false, fmt.Errorf("runner is busy")
}
id = runner.GetID()
break
}
}
if id == int64(0) {
return false, nil
}
if err := r.GitHubClient.RemoveRunner(ctx, org, repo, id); err != nil {
return false, err
}
return true, nil
}
func (r *RunnerReconciler) updateRegistrationToken(ctx context.Context, runner v1alpha1.Runner) (bool, error) {
if runner.IsRegisterable() {
return false, nil
}
log := r.Log.WithValues("runner", runner.Name)
rt, err := r.GitHubClient.GetRegistrationToken(ctx, runner.Spec.Organization, runner.Spec.Repository, runner.Name)
if err != nil {
r.Recorder.Event(&runner, corev1.EventTypeWarning, "FailedUpdateRegistrationToken", "Updating registration token failed")
log.Error(err, "Failed to get new registration token")
return false, err
}
updated := runner.DeepCopy()
updated.Status.Registration = v1alpha1.RunnerStatusRegistration{
Organization: runner.Spec.Organization,
Repository: runner.Spec.Repository,
Labels: runner.Spec.Labels,
Token: rt.GetToken(),
ExpiresAt: metav1.NewTime(rt.GetExpiresAt().Time),
}
if err := r.Status().Update(ctx, updated); err != nil {
log.Error(err, "Failed to update runner status")
return false, err
}
r.Recorder.Event(&runner, corev1.EventTypeNormal, "RegistrationTokenUpdated", "Successfully update registration token")
log.Info("Updated registration token", "repository", runner.Spec.Repository)
return true, nil
}
func (r *RunnerReconciler) newPod(runner v1alpha1.Runner) (corev1.Pod, error) {
var (
privileged bool = true
dockerdInRunner bool = runner.Spec.DockerdWithinRunnerContainer != nil && *runner.Spec.DockerdWithinRunnerContainer
dockerEnabled bool = runner.Spec.DockerEnabled == nil || *runner.Spec.DockerEnabled
)
runnerImage := runner.Spec.Image
if runnerImage == "" {
runnerImage = r.RunnerImage
}
workDir := runner.Spec.WorkDir
if workDir == "" {
workDir = "/runner/_work"
}
runnerImagePullPolicy := runner.Spec.ImagePullPolicy
if runnerImagePullPolicy == "" {
runnerImagePullPolicy = corev1.PullAlways
}
env := []corev1.EnvVar{
{
Name: "RUNNER_NAME",
Value: runner.Name,
},
{
Name: "RUNNER_ORG",
Value: runner.Spec.Organization,
},
{
Name: "RUNNER_REPO",
Value: runner.Spec.Repository,
},
{
Name: "RUNNER_LABELS",
Value: strings.Join(runner.Spec.Labels, ","),
},
{
Name: "RUNNER_GROUP",
Value: runner.Spec.Group,
},
{
Name: "RUNNER_TOKEN",
Value: runner.Status.Registration.Token,
},
{
Name: "DOCKERD_IN_RUNNER",
Value: fmt.Sprintf("%v", dockerdInRunner),
},
{
Name: "GITHUB_URL",
Value: r.GitHubClient.GithubBaseURL,
},
{
Name: "RUNNER_WORKDIR",
Value: workDir,
},
}
env = append(env, runner.Spec.Env...)
pod := corev1.Pod{
ObjectMeta: metav1.ObjectMeta{
Name: runner.Name,
Namespace: runner.Namespace,
Labels: runner.Labels,
Annotations: runner.Annotations,
},
Spec: corev1.PodSpec{
RestartPolicy: "OnFailure",
Containers: []corev1.Container{
{
Name: containerName,
Image: runnerImage,
ImagePullPolicy: runnerImagePullPolicy,
Env: env,
EnvFrom: runner.Spec.EnvFrom,
SecurityContext: &corev1.SecurityContext{
// Runner need to run privileged if it contains DinD
Privileged: runner.Spec.DockerdWithinRunnerContainer,
},
Resources: runner.Spec.Resources,
},
},
},
}
if !dockerdInRunner && dockerEnabled {
pod.Spec.Volumes = []corev1.Volume{
{
Name: "work",
VolumeSource: corev1.VolumeSource{
EmptyDir: &corev1.EmptyDirVolumeSource{},
},
},
{
Name: "externals",
VolumeSource: corev1.VolumeSource{
EmptyDir: &corev1.EmptyDirVolumeSource{},
},
},
}
pod.Spec.Containers[0].VolumeMounts = []corev1.VolumeMount{
{
Name: "work",
MountPath: workDir,
},
{
Name: "externals",
MountPath: "/runner/externals",
},
}
pod.Spec.Containers[0].Env = append(pod.Spec.Containers[0].Env, corev1.EnvVar{
Name: "DOCKER_HOST",
Value: "tcp://localhost:2375",
})
pod.Spec.Containers = append(pod.Spec.Containers, corev1.Container{
Name: "docker",
Image: r.DockerImage,
VolumeMounts: []corev1.VolumeMount{
{
Name: "work",
MountPath: workDir,
},
{
Name: "externals",
MountPath: "/runner/externals",
},
},
Env: []corev1.EnvVar{
{
Name: "DOCKER_TLS_CERTDIR",
Value: "",
},
},
SecurityContext: &corev1.SecurityContext{
Privileged: &privileged,
},
})
}
if len(runner.Spec.Containers) != 0 {
pod.Spec.Containers = runner.Spec.Containers
for i := 0; i < len(pod.Spec.Containers); i++ {
if pod.Spec.Containers[i].Name == containerName {
pod.Spec.Containers[i].Env = append(pod.Spec.Containers[i].Env, env...)
}
}
}
if len(runner.Spec.VolumeMounts) != 0 {
pod.Spec.Containers[0].VolumeMounts = append(pod.Spec.Containers[0].VolumeMounts, runner.Spec.VolumeMounts...)
}
if len(runner.Spec.Volumes) != 0 {
pod.Spec.Volumes = append(pod.Spec.Volumes, runner.Spec.Volumes...)
}
if len(runner.Spec.InitContainers) != 0 {
pod.Spec.InitContainers = append(pod.Spec.InitContainers, runner.Spec.InitContainers...)
}
if runner.Spec.NodeSelector != nil {
pod.Spec.NodeSelector = runner.Spec.NodeSelector
}
if runner.Spec.ServiceAccountName != "" {
pod.Spec.ServiceAccountName = runner.Spec.ServiceAccountName
}
if runner.Spec.AutomountServiceAccountToken != nil {
pod.Spec.AutomountServiceAccountToken = runner.Spec.AutomountServiceAccountToken
}
if len(runner.Spec.SidecarContainers) != 0 {
pod.Spec.Containers = append(pod.Spec.Containers, runner.Spec.SidecarContainers...)
}
if runner.Spec.SecurityContext != nil {
pod.Spec.SecurityContext = runner.Spec.SecurityContext
}
if len(runner.Spec.ImagePullSecrets) != 0 {
pod.Spec.ImagePullSecrets = runner.Spec.ImagePullSecrets
}
if runner.Spec.Affinity != nil {
pod.Spec.Affinity = runner.Spec.Affinity
}
if len(runner.Spec.Tolerations) != 0 {
pod.Spec.Tolerations = runner.Spec.Tolerations
}
if len(runner.Spec.EphemeralContainers) != 0 {
pod.Spec.EphemeralContainers = runner.Spec.EphemeralContainers
}
if runner.Spec.TerminationGracePeriodSeconds != nil {
pod.Spec.TerminationGracePeriodSeconds = runner.Spec.TerminationGracePeriodSeconds
}
if err := ctrl.SetControllerReference(&runner, &pod, r.Scheme); err != nil {
return pod, err
}
return pod, nil
}
func (r *RunnerReconciler) SetupWithManager(mgr ctrl.Manager) error {
r.Recorder = mgr.GetEventRecorderFor("runner-controller")
return ctrl.NewControllerManagedBy(mgr).
For(&v1alpha1.Runner{}).
Owns(&corev1.Pod{}).
Complete(r)
}
func
|
(finalizers []string) ([]string, bool) {
exists := false
for _, name := range finalizers {
if name == finalizerName {
exists = true
}
}
if exists {
return finalizers, false
}
return append(finalizers, finalizerName), true
}
func removeFinalizer(finalizers []string) ([]string, bool) {
removed := false
result := []string{}
for _, name := range finalizers {
if name == finalizerName {
removed = true
continue
}
result = append(result, name)
}
return result, removed
}
|
addFinalizer
|
syntax.rs
|
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use codespan::{ByteIndex, Span};
use move_ir_types::location::*;
use std::str::FromStr;
use crate::{
errors::*,
parser::{ast::*, lexer::*},
shared::*,
};
// In the informal grammar comments in this file, Comma<T> is shorthand for:
// (<T> ",")* <T>?
// Note that this allows an optional trailing comma.
//**************************************************************************************************
// Error Handling
//**************************************************************************************************
fn unexpected_token_error<'input>(tokens: &Lexer<'input>, expected: &str) -> Error {
let loc = current_token_loc(tokens);
let unexpected = if tokens.peek() == Tok::EOF {
"end-of-file".to_string()
} else {
format!("'{}'", tokens.content())
};
vec![
(loc, format!("Unexpected {}", unexpected)),
(loc, format!("Expected {}", expected)),
]
}
//**************************************************************************************************
// Miscellaneous Utilities
//**************************************************************************************************
pub fn make_loc(file: &'static str, start: usize, end: usize) -> Loc {
Loc::new(
file,
Span::new(ByteIndex(start as u32), ByteIndex(end as u32)),
)
}
fn current_token_loc<'input>(tokens: &Lexer<'input>) -> Loc {
let start_loc = tokens.start_loc();
make_loc(
tokens.file_name(),
start_loc,
start_loc + tokens.content().len(),
)
}
fn spanned<T>(file: &'static str, start: usize, end: usize, value: T) -> Spanned<T> {
Spanned {
loc: make_loc(file, start, end),
value,
}
}
// Check for the specified token and consume it if it matches.
// Returns true if the token matches.
fn match_token<'input>(tokens: &mut Lexer<'input>, tok: Tok) -> Result<bool, Error> {
if tokens.peek() == tok {
tokens.advance()?;
Ok(true)
} else {
Ok(false)
}
}
// Check for the specified token and return an error if it does not match.
fn consume_token<'input>(tokens: &mut Lexer<'input>, tok: Tok) -> Result<(), Error> {
if tokens.peek() != tok {
let expected = format!("'{}'", &tok.to_string());
return Err(unexpected_token_error(tokens, &expected));
}
tokens.advance()?;
Ok(())
}
// Check for the name token with specified value and return an error if it does not match.
fn consume_name_value<'input>(tokens: &mut Lexer<'input>, value: &str) -> Result<(), Error> {
if tokens.peek() == Tok::NameValue && tokens.content() == value {
tokens.advance()
} else {
let expected = format!("name '{}'", value);
Err(unexpected_token_error(tokens, &expected))
}
}
// If the next token is the specified kind, consume it and return
// its source location.
fn consume_optional_token_with_loc<'input>(
tokens: &mut Lexer<'input>,
tok: Tok,
) -> Result<Option<Loc>, Error> {
if tokens.peek() == tok {
let start_loc = tokens.start_loc();
tokens.advance()?;
let end_loc = tokens.previous_end_loc();
Ok(Some(make_loc(tokens.file_name(), start_loc, end_loc)))
} else {
Ok(None)
}
}
// While parsing a list and expecting a ">" token to mark the end, replace
// a ">>" token with the expected ">". This handles the situation where there
// are nested type parameters that result in two adjacent ">" tokens, e.g.,
// "A<B<C>>".
fn adjust_token<'input>(tokens: &mut Lexer<'input>, end_token: Tok) {
if tokens.peek() == Tok::GreaterGreater && end_token == Tok::Greater {
tokens.replace_token(Tok::Greater, 1);
}
}
// Parse a comma-separated list of items, including the specified starting and
// ending tokens.
fn parse_comma_list<'input, F, R>(
tokens: &mut Lexer<'input>,
start_token: Tok,
end_token: Tok,
parse_list_item: F,
item_description: &str,
) -> Result<Vec<R>, Error>
where
F: Fn(&mut Lexer<'input>) -> Result<R, Error>,
{
let start_loc = tokens.start_loc();
consume_token(tokens, start_token)?;
parse_comma_list_after_start(
tokens,
start_loc,
start_token,
end_token,
parse_list_item,
item_description,
)
}
// Parse a comma-separated list of items, including the specified ending token, but
// assuming that the starting token has already been consumed.
fn parse_comma_list_after_start<'input, F, R>(
tokens: &mut Lexer<'input>,
start_loc: usize,
start_token: Tok,
end_token: Tok,
parse_list_item: F,
item_description: &str,
) -> Result<Vec<R>, Error>
where
F: Fn(&mut Lexer<'input>) -> Result<R, Error>,
{
adjust_token(tokens, end_token);
if match_token(tokens, end_token)? {
return Ok(vec![]);
}
let mut v = vec![];
loop {
if tokens.peek() == Tok::Comma {
let current_loc = tokens.start_loc();
let loc = make_loc(tokens.file_name(), current_loc, current_loc);
return Err(vec![(loc, format!("Expected {}", item_description))]);
}
v.push(parse_list_item(tokens)?);
adjust_token(tokens, end_token);
if match_token(tokens, end_token)? {
break Ok(v);
}
if !match_token(tokens, Tok::Comma)? {
let current_loc = tokens.start_loc();
let loc = make_loc(tokens.file_name(), current_loc, current_loc);
let loc2 = make_loc(tokens.file_name(), start_loc, start_loc);
return Err(vec![
(loc, format!("Expected '{}'", end_token)),
(loc2, format!("To match this '{}'", start_token)),
]);
}
adjust_token(tokens, end_token);
if match_token(tokens, end_token)? {
break Ok(v);
}
}
}
// Parse a list of items, without specified start and end tokens, and the separator determined by
// the passed function `parse_list_continue`.
fn parse_list<'input, C, F, R>(
tokens: &mut Lexer<'input>,
mut parse_list_continue: C,
parse_list_item: F,
) -> Result<Vec<R>, Error>
where
C: FnMut(&mut Lexer<'input>) -> Result<bool, Error>,
F: Fn(&mut Lexer<'input>) -> Result<R, Error>,
{
let mut v = vec![];
loop {
v.push(parse_list_item(tokens)?);
if !parse_list_continue(tokens)? {
break Ok(v);
}
}
}
//**************************************************************************************************
// Names and Addresses
//**************************************************************************************************
// Parse a name:
// Name = <NameValue>
fn parse_name<'input>(tokens: &mut Lexer<'input>) -> Result<Spanned<String>, Error> {
if tokens.peek() != Tok::NameValue {
return Err(unexpected_token_error(tokens, "a name value"));
}
let start_loc = tokens.start_loc();
let name = tokens.content().to_string();
tokens.advance()?;
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, name))
}
// Parse an account address:
// Address = <AddressValue>
fn parse_address<'input>(tokens: &mut Lexer<'input>) -> Result<Address, Error> {
if tokens.peek() != Tok::AddressValue {
return Err(unexpected_token_error(tokens, "an account address value"));
}
let addr =
Address::parse_str(&tokens.content()).map_err(|msg| vec![(current_token_loc(tokens), msg)]);
tokens.advance()?;
addr
}
// Parse a variable name:
// Var = <Name>
fn parse_var<'input>(tokens: &mut Lexer<'input>) -> Result<Var, Error> {
Ok(Var(parse_name(tokens)?))
}
// Parse a field name:
// Field = <Name>
fn parse_field<'input>(tokens: &mut Lexer<'input>) -> Result<Field, Error> {
Ok(Field(parse_name(tokens)?))
}
// Parse a module name:
// ModuleName = <Name>
fn parse_module_name<'input>(tokens: &mut Lexer<'input>) -> Result<ModuleName, Error> {
Ok(ModuleName(parse_name(tokens)?))
}
// Parse a module identifier:
// ModuleIdent = <Address> "::" <ModuleName>
fn parse_module_ident<'input>(tokens: &mut Lexer<'input>) -> Result<ModuleIdent, Error> {
let start_loc = tokens.start_loc();
let address = parse_address(tokens)?;
consume_token(tokens, Tok::ColonColon)?;
let name = parse_module_name(tokens)?;
let end_loc = tokens.previous_end_loc();
let m = ModuleIdent_ { address, name };
Ok(ModuleIdent(spanned(
tokens.file_name(),
start_loc,
end_loc,
m,
)))
}
// Parse a module access (either a struct or a function):
// ModuleAccess =
// <Name>
// | <ModuleName> "::" <Name>
// | <ModuleIdent> "::" <Name>
fn parse_module_access<'input, F: FnOnce() -> String>(
tokens: &mut Lexer<'input>,
item_description: F,
) -> Result<ModuleAccess, Error> {
let start_loc = tokens.start_loc();
let acc = match tokens.peek() {
Tok::NameValue => {
// Check if this is a ModuleName followed by "::".
let m = parse_name(tokens)?;
if match_token(tokens, Tok::ColonColon)? {
let n = parse_name(tokens)?;
ModuleAccess_::ModuleAccess(ModuleName(m), n)
} else {
ModuleAccess_::Name(m)
}
}
Tok::AddressValue => {
let m = parse_module_ident(tokens)?;
consume_token(tokens, Tok::ColonColon)?;
let n = parse_name(tokens)?;
ModuleAccess_::QualifiedModuleAccess(m, n)
}
_ => {
return Err(unexpected_token_error(tokens, &item_description()));
}
};
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, acc))
}
//**************************************************************************************************
// Fields and Bindings
//**************************************************************************************************
// Parse a field name optionally followed by a colon and an expression argument:
// ExpField = <Field> <":" <Exp>>?
fn parse_exp_field<'input>(tokens: &mut Lexer<'input>) -> Result<(Field, Exp), Error>
|
// Parse a field name optionally followed by a colon and a binding:
// BindField = <Field> <":" <Bind>>?
//
// If the binding is not specified, the default is to use a variable
// with the same name as the field.
fn parse_bind_field<'input>(tokens: &mut Lexer<'input>) -> Result<(Field, Bind), Error> {
let f = parse_field(tokens)?;
let arg = if match_token(tokens, Tok::Colon)? {
parse_bind(tokens)?
} else {
let v = Var(f.0.clone());
sp(v.loc(), Bind_::Var(v))
};
Ok((f, arg))
}
// Parse a binding:
// Bind =
// <Var>
// | <ModuleAccess> <OptionalTypeArgs> "{" Comma<BindField> "}"
fn parse_bind<'input>(tokens: &mut Lexer<'input>) -> Result<Bind, Error> {
let start_loc = tokens.start_loc();
if tokens.peek() == Tok::NameValue {
let next_tok = tokens.lookahead()?;
if next_tok != Tok::LBrace && next_tok != Tok::Less && next_tok != Tok::ColonColon {
let v = Bind_::Var(parse_var(tokens)?);
let end_loc = tokens.previous_end_loc();
return Ok(spanned(tokens.file_name(), start_loc, end_loc, v));
}
}
// The item description specified here should include the special case above for
// variable names, because if the current tokens cannot be parsed as a struct name
// it is possible that the user intention was to use a variable name.
let ty = parse_module_access(tokens, || "a variable or struct name".to_string())?;
let ty_args = parse_optional_type_args(tokens)?;
let args = parse_comma_list(
tokens,
Tok::LBrace,
Tok::RBrace,
parse_bind_field,
"a field binding",
)?;
let end_loc = tokens.previous_end_loc();
let unpack = Bind_::Unpack(ty, ty_args, args);
Ok(spanned(tokens.file_name(), start_loc, end_loc, unpack))
}
// Parse a list of bindings, which can be zero, one, or more bindings:
// BindList =
// <Bind>
// | "(" Comma<Bind> ")"
//
// The list is enclosed in parenthesis, except that the parenthesis are
// optional if there is a single Bind.
fn parse_bind_list<'input>(tokens: &mut Lexer<'input>) -> Result<BindList, Error> {
let start_loc = tokens.start_loc();
let b = if tokens.peek() != Tok::LParen {
vec![parse_bind(tokens)?]
} else {
parse_comma_list(
tokens,
Tok::LParen,
Tok::RParen,
parse_bind,
"a variable or structure binding",
)?
};
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, b))
}
// Parse a list of bindings for lambda.
// LambdaBindList =
// "|" Comma<Bind> "|"
fn parse_lambda_bind_list<'input>(tokens: &mut Lexer<'input>) -> Result<BindList, Error> {
let start_loc = tokens.start_loc();
let b = parse_comma_list(
tokens,
Tok::Pipe,
Tok::Pipe,
parse_bind,
"a variable or structure binding",
)?;
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, b))
}
//**************************************************************************************************
// Values
//**************************************************************************************************
// Parse a byte string:
// ByteString = <ByteStringValue>
fn parse_byte_string<'input>(tokens: &mut Lexer<'input>) -> Result<Vec<u8>, Error> {
if tokens.peek() != Tok::ByteStringValue {
return Err(unexpected_token_error(tokens, "a byte string value"));
}
let start_loc = tokens.start_loc();
let s = tokens.content();
assert!(s.starts_with("x\""));
let mut hex_string = String::from(&s[2..s.len() - 1]);
if hex_string.len() % 2 != 0 {
hex_string.insert(0, '0');
}
tokens.advance()?;
match hex::decode(hex_string.as_str()) {
Ok(vec) => Ok(vec),
Err(hex::FromHexError::InvalidHexCharacter { c, index }) => {
let offset = start_loc + 1 + index;
let loc = make_loc(tokens.file_name(), offset, offset);
Err(vec![(
loc,
format!("Invalid hexadecimal character: '{}'", c),
)])
}
Err(_) => unreachable!("unexpected error parsing hex byte string value"),
}
}
// Parse a value:
// Value =
// <Address>
// | "true"
// | "false"
// | <U8Value>
// | <U64Value>
// | <U128Value>
// | <ByteString>
fn parse_value<'input>(tokens: &mut Lexer<'input>) -> Result<Value, Error> {
let start_loc = tokens.start_loc();
let val = match tokens.peek() {
Tok::AddressValue => {
let addr = parse_address(tokens)?;
Value_::Address(addr)
}
Tok::True => {
tokens.advance()?;
Value_::Bool(true)
}
Tok::False => {
tokens.advance()?;
Value_::Bool(false)
}
Tok::U8Value => {
let mut s = tokens.content();
if s.ends_with("u8") {
s = &s[..s.len() - 2]
}
let i = u8::from_str(s).unwrap();
tokens.advance()?;
Value_::U8(i)
}
Tok::U64Value => {
let mut s = tokens.content();
if s.ends_with("u64") {
s = &s[..s.len() - 3]
}
let i = u64::from_str(s).unwrap();
tokens.advance()?;
Value_::U64(i)
}
Tok::U128Value => {
let mut s = tokens.content();
if s.ends_with("u128") {
s = &s[..s.len() - 4]
}
let i = u128::from_str(s).unwrap();
tokens.advance()?;
Value_::U128(i)
}
Tok::ByteStringValue => {
let byte_string = parse_byte_string(tokens)?;
Value_::Bytearray(byte_string)
}
_ => unreachable!("parse_value called with invalid token"),
};
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, val))
}
// Parse a num value:
// Num = <NumValue>
fn parse_num(tokens: &mut Lexer) -> Result<u128, Error> {
let start_loc = tokens.start_loc();
assert_eq!(tokens.peek(), Tok::NumValue);
let res = match u128::from_str(tokens.content()) {
Ok(i) => Ok(i),
Err(_) => {
let end_loc = start_loc + tokens.content().len();
let loc = make_loc(tokens.file_name(), start_loc, end_loc);
let msg = "Invalid number literal. The given literal is too large to fit into the \
largest number type 'u128'";
Err(vec![(loc, msg.to_owned())])
}
};
tokens.advance()?;
res
}
//**************************************************************************************************
// Sequences
//**************************************************************************************************
// Parse a sequence item:
// SequenceItem =
// <Exp>
// | "let" <BindList> (":" <Type>)? ("=" <Exp>)?
fn parse_sequence_item<'input>(tokens: &mut Lexer<'input>) -> Result<SequenceItem, Error> {
let start_loc = tokens.start_loc();
let item = if match_token(tokens, Tok::Let)? {
let b = parse_bind_list(tokens)?;
let ty_opt = if match_token(tokens, Tok::Colon)? {
Some(parse_type(tokens)?)
} else {
None
};
if match_token(tokens, Tok::Equal)? {
let e = parse_exp(tokens)?;
SequenceItem_::Bind(b, ty_opt, Box::new(e))
} else {
SequenceItem_::Declare(b, ty_opt)
}
} else {
let e = parse_exp(tokens)?;
SequenceItem_::Seq(Box::new(e))
};
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, item))
}
// Parse a sequence:
// Sequence = (<SequenceItem> ";")* <Exp>? "}"
//
// Note that this does not include the opening brace of a block but it
// does consume the closing right brace.
fn parse_sequence<'input>(tokens: &mut Lexer<'input>) -> Result<Sequence, Error> {
let mut seq: Vec<SequenceItem> = vec![];
let mut last_semicolon_loc = None;
let mut eopt = None;
while tokens.peek() != Tok::RBrace {
let item = parse_sequence_item(tokens)?;
if tokens.peek() == Tok::RBrace {
// If the sequence ends with an expression that is not
// followed by a semicolon, split out that expression
// from the rest of the SequenceItems.
if let SequenceItem_::Seq(e) = item.value {
eopt = Some(Spanned {
loc: item.loc,
value: e.value,
});
} else {
seq.push(item);
}
break;
}
seq.push(item);
last_semicolon_loc = Some(current_token_loc(&tokens));
consume_token(tokens, Tok::Semicolon)?;
}
tokens.advance()?; // consume the RBrace
Ok((seq, last_semicolon_loc, Box::new(eopt)))
}
//**************************************************************************************************
// Expressions
//**************************************************************************************************
// Parse an expression term:
// Term =
// "break"
// | "continue"
// | <Name>
// | <ModuleAccess> ("<" Comma<Type> ">")? (spec only)
// | <Value>
// | <Num>
// | "(" Comma<Exp> ")"
// | "(" <Exp> ":" <Type> ")"
// | "(" <Exp> "as" <Type> ")"
// | "{" <Sequence>
// | <ModuleAccess> ("<" Comma<Type> ">")? "{" Comma<ExpField> "}"
// | <ModuleAccess> ("<" Comma<Type> ">")? "(" Comma<Exp> ")"
// | "::" <Name> ("<" Comma<Type> ">")? "(" Comma<Exp> ")"
fn parse_term<'input>(tokens: &mut Lexer<'input>) -> Result<Exp, Error> {
let start_loc = tokens.start_loc();
let term = match tokens.peek() {
Tok::Break => {
tokens.advance()?;
Exp_::Break
}
Tok::Continue => {
tokens.advance()?;
Exp_::Continue
}
Tok::NameValue => {
// Check if this is a ModuleAccess for a pack or call expression.
match tokens.lookahead()? {
Tok::ColonColon | Tok::LBrace | Tok::LParen => {
parse_pack_or_call_or_generic_name(tokens)?
}
Tok::Less => {
// There's an ambiguity here. If there is no whitespace after the
// name, treat it as the start of a list of type arguments. Otherwise
// assume that the "<" is a boolean operator.
let next_start = tokens.lookahead_start_loc();
if next_start == start_loc + tokens.content().len() {
let loc = make_loc(tokens.file_name(), next_start, next_start);
parse_pack_or_call_or_generic_name(tokens).or_else(|mut e| {
let msg = "Perhaps you need a blank space before this '<' operator?";
e.push((loc, msg.to_owned()));
Err(e)
})?
} else {
let name = parse_name(tokens)?;
Exp_::Name(sp(name.loc, ModuleAccess_::Name(name)), None)
}
}
_ => {
let name = parse_name(tokens)?;
Exp_::Name(sp(name.loc, ModuleAccess_::Name(name)), None)
}
}
}
Tok::AddressValue => {
// Check if this is a ModuleIdent (in a ModuleAccess).
if tokens.lookahead()? == Tok::ColonColon {
parse_pack_or_call_or_generic_name(tokens)?
} else {
Exp_::Value(parse_value(tokens)?)
}
}
Tok::True
| Tok::False
| Tok::U8Value
| Tok::U64Value
| Tok::U128Value
| Tok::ByteStringValue => Exp_::Value(parse_value(tokens)?),
Tok::NumValue => Exp_::InferredNum(parse_num(tokens)?),
// "(" Comma<Exp> ")"
// "(" <Exp> ":" <Type> ")"
// "(" <Exp> "as" <Type> ")"
Tok::LParen => {
let list_loc = tokens.start_loc();
tokens.advance()?; // consume the LParen
if match_token(tokens, Tok::RParen)? {
Exp_::Unit
} else {
// If there is a single expression inside the parens,
// then it may be followed by a colon and a type annotation.
let e = parse_exp(tokens)?;
if match_token(tokens, Tok::Colon)? {
let ty = parse_type(tokens)?;
consume_token(tokens, Tok::RParen)?;
Exp_::Annotate(Box::new(e), ty)
} else if match_token(tokens, Tok::As)? {
let ty = parse_type(tokens)?;
consume_token(tokens, Tok::RParen)?;
Exp_::Cast(Box::new(e), ty)
} else {
if tokens.peek() != Tok::RParen {
consume_token(tokens, Tok::Comma)?;
}
let mut es = parse_comma_list_after_start(
tokens,
list_loc,
Tok::LParen,
Tok::RParen,
parse_exp,
"an expression",
)?;
if es.is_empty() {
e.value
} else {
es.insert(0, e);
Exp_::ExpList(es)
}
}
}
}
// "{" <Sequence>
Tok::LBrace => {
tokens.advance()?; // consume the LBrace
Exp_::Block(parse_sequence(tokens)?)
}
// "::" <Name> <OptionalTypeArgs> "(" Comma<Exp> ")"
Tok::ColonColon => {
tokens.advance()?; // consume the "::"
let n = parse_name(tokens)?;
let tys = parse_optional_type_args(tokens)?;
let rhs = parse_call_args(tokens)?;
Exp_::GlobalCall(n, tys, rhs)
}
Tok::Spec => {
let spec_block = parse_spec_block(tokens)?;
Exp_::Spec(spec_block)
}
_ => {
return Err(unexpected_token_error(tokens, "an expression term"));
}
};
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, term))
}
// Parse the subset of expression terms for pack and call operations, as well as for generic names
// (in specifications only).
// This is a helper function for parse_term.
fn parse_pack_or_call_or_generic_name<'input>(tokens: &mut Lexer<'input>) -> Result<Exp_, Error> {
let n = parse_module_access(tokens, || {
panic!("parse_pack_or_call_or_generic_name with something other than a NameValue or AddressValue token")
})?;
let tys = parse_optional_type_args(tokens)?;
match tokens.peek() {
// <ModuleAccess> ("<" Comma<Type> ">")? "{" Comma<ExpField> "}"
Tok::LBrace => {
let fs = parse_comma_list(
tokens,
Tok::LBrace,
Tok::RBrace,
parse_exp_field,
"a field expression",
)?;
Ok(Exp_::Pack(n, tys, fs))
}
// <ModuleAccess> ("<" Comma<Type> ">")? "(" Comma<Exp> ")"
Tok::LParen => {
let rhs = parse_call_args(tokens)?;
Ok(Exp_::Call(n, tys, rhs))
}
// <ModuleAccess> ("<" Comma<Type> ">")?
_ => Ok(Exp_::Name(n, tys)),
}
}
// Parse the arguments to a call: "(" Comma<Exp> ")"
fn parse_call_args<'input>(tokens: &mut Lexer<'input>) -> Result<Spanned<Vec<Exp>>, Error> {
let start_loc = tokens.start_loc();
let args = parse_comma_list(
tokens,
Tok::LParen,
Tok::RParen,
parse_exp,
"a call argument expression",
)?;
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, args))
}
// Return true if the current token is one that might occur after an Exp.
// This is needed, for example, to check for the optional Exp argument to
// a return (where "return" is itself an Exp).
fn at_end_of_exp<'input>(tokens: &mut Lexer<'input>) -> bool {
matches!(
tokens.peek(),
// These are the tokens that can occur after an Exp. If the grammar
// changes, we need to make sure that these are kept up to date and that
// none of these tokens can occur at the beginning of an Exp.
Tok::Else | Tok::RBrace | Tok::RParen | Tok::Comma | Tok::Colon | Tok::Semicolon
)
}
// Parse an expression:
// Exp =
// <LambdaBindList> <Exp> lambda: spec only
// | "if" "(" <Exp> ")" <Exp> ("else" <Exp>)?
// | "while" "(" <Exp> ")" <Exp>
// | "loop" <Exp>
// | "return" <Exp>?
// | "abort" <Exp>
// | <BinOpExp>
// | <UnaryExp> "=" <Exp>
fn parse_exp<'input>(tokens: &mut Lexer<'input>) -> Result<Exp, Error> {
let start_loc = tokens.start_loc();
let exp = match tokens.peek() {
Tok::Pipe => {
let bindings = parse_lambda_bind_list(tokens)?;
let body = Box::new(parse_exp(tokens)?);
Exp_::Lambda(bindings, body)
}
Tok::If => {
tokens.advance()?;
consume_token(tokens, Tok::LParen)?;
let eb = Box::new(parse_exp(tokens)?);
consume_token(tokens, Tok::RParen)?;
let et = Box::new(parse_exp(tokens)?);
let ef = if match_token(tokens, Tok::Else)? {
Some(Box::new(parse_exp(tokens)?))
} else {
None
};
Exp_::IfElse(eb, et, ef)
}
Tok::While => {
tokens.advance()?;
consume_token(tokens, Tok::LParen)?;
let eb = Box::new(parse_exp(tokens)?);
consume_token(tokens, Tok::RParen)?;
let eloop = Box::new(parse_exp(tokens)?);
Exp_::While(eb, eloop)
}
Tok::Loop => {
tokens.advance()?;
let eloop = Box::new(parse_exp(tokens)?);
Exp_::Loop(eloop)
}
Tok::Return => {
tokens.advance()?;
let e = if at_end_of_exp(tokens) {
None
} else {
Some(Box::new(parse_exp(tokens)?))
};
Exp_::Return(e)
}
Tok::Abort => {
tokens.advance()?;
let e = Box::new(parse_exp(tokens)?);
Exp_::Abort(e)
}
_ => {
// This could be either an assignment or a binary operator
// expression.
let lhs = parse_unary_exp(tokens)?;
if tokens.peek() != Tok::Equal {
return parse_binop_exp(tokens, lhs, /* min_prec */ 1);
}
tokens.advance()?; // consume the "="
let rhs = Box::new(parse_exp(tokens)?);
Exp_::Assign(Box::new(lhs), rhs)
}
};
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, exp))
}
// Get the precedence of a binary operator. The minimum precedence value
// is 1, and larger values have higher precedence. For tokens that are not
// binary operators, this returns a value of zero so that they will be
// below the minimum value and will mark the end of the binary expression
// for the code in parse_binop_exp.
fn get_precedence(token: Tok) -> u32 {
match token {
// Reserved minimum precedence value is 1
Tok::EqualEqualGreater => 2,
Tok::PipePipe => 3,
Tok::AmpAmp => 4,
Tok::EqualEqual => 5,
Tok::ExclaimEqual => 5,
Tok::Less => 5,
Tok::Greater => 5,
Tok::LessEqual => 5,
Tok::GreaterEqual => 5,
Tok::PeriodPeriod => 6,
Tok::Pipe => 7,
Tok::Caret => 8,
Tok::Amp => 9,
Tok::LessLess => 10,
Tok::GreaterGreater => 10,
Tok::Plus => 11,
Tok::Minus => 11,
Tok::Star => 12,
Tok::Slash => 12,
Tok::Percent => 12,
_ => 0, // anything else is not a binary operator
}
}
// Parse a binary operator expression:
// BinOpExp =
// <BinOpExp> <BinOp> <BinOpExp>
// | <UnaryExp>
// BinOp = (listed from lowest to highest precedence)
// "==>" spec only
// | "||"
// | "&&"
// | "==" | "!=" | "<" | ">" | "<=" | ">="
// | ".." spec only
// | "|"
// | "^"
// | "&"
// | "<<" | ">>"
// | "+" | "-"
// | "*" | "/" | "%"
//
// This function takes the LHS of the expression as an argument, and it
// continues parsing binary expressions as long as they have at least the
// specified "min_prec" minimum precedence.
fn parse_binop_exp<'input>(
tokens: &mut Lexer<'input>,
lhs: Exp,
min_prec: u32,
) -> Result<Exp, Error> {
let mut result = lhs;
let mut next_tok_prec = get_precedence(tokens.peek());
while next_tok_prec >= min_prec {
// Parse the operator.
let op_start_loc = tokens.start_loc();
let op_token = tokens.peek();
tokens.advance()?;
let op_end_loc = tokens.previous_end_loc();
let mut rhs = parse_unary_exp(tokens)?;
// If the next token is another binary operator with a higher
// precedence, then recursively parse that expression as the RHS.
let this_prec = next_tok_prec;
next_tok_prec = get_precedence(tokens.peek());
if this_prec < next_tok_prec {
rhs = parse_binop_exp(tokens, rhs, this_prec + 1)?;
next_tok_prec = get_precedence(tokens.peek());
}
let op = match op_token {
Tok::EqualEqual => BinOp_::Eq,
Tok::ExclaimEqual => BinOp_::Neq,
Tok::Less => BinOp_::Lt,
Tok::Greater => BinOp_::Gt,
Tok::LessEqual => BinOp_::Le,
Tok::GreaterEqual => BinOp_::Ge,
Tok::PipePipe => BinOp_::Or,
Tok::AmpAmp => BinOp_::And,
Tok::Caret => BinOp_::Xor,
Tok::Pipe => BinOp_::BitOr,
Tok::Amp => BinOp_::BitAnd,
Tok::LessLess => BinOp_::Shl,
Tok::GreaterGreater => BinOp_::Shr,
Tok::Plus => BinOp_::Add,
Tok::Minus => BinOp_::Sub,
Tok::Star => BinOp_::Mul,
Tok::Slash => BinOp_::Div,
Tok::Percent => BinOp_::Mod,
Tok::PeriodPeriod => BinOp_::Range,
Tok::EqualEqualGreater => BinOp_::Implies,
_ => panic!("Unexpected token that is not a binary operator"),
};
let sp_op = spanned(tokens.file_name(), op_start_loc, op_end_loc, op);
let start_loc = result.loc.span().start().to_usize();
let end_loc = tokens.previous_end_loc();
let e = Exp_::BinopExp(Box::new(result), sp_op, Box::new(rhs));
result = spanned(tokens.file_name(), start_loc, end_loc, e);
}
Ok(result)
}
// Parse a unary expression:
// UnaryExp =
// "!" <UnaryExp>
// | "&mut" <UnaryExp>
// | "&" <UnaryExp>
// | "*" <UnaryExp>
// | "move" <Var>
// | "copy" <Var>
// | <DotOrIndexChain>
fn parse_unary_exp<'input>(tokens: &mut Lexer<'input>) -> Result<Exp, Error> {
let start_loc = tokens.start_loc();
let exp = match tokens.peek() {
Tok::Exclaim => {
tokens.advance()?;
let op_end_loc = tokens.previous_end_loc();
let op = spanned(tokens.file_name(), start_loc, op_end_loc, UnaryOp_::Not);
let e = parse_unary_exp(tokens)?;
Exp_::UnaryExp(op, Box::new(e))
}
Tok::AmpMut => {
tokens.advance()?;
let e = parse_unary_exp(tokens)?;
Exp_::Borrow(true, Box::new(e))
}
Tok::Amp => {
tokens.advance()?;
let e = parse_unary_exp(tokens)?;
Exp_::Borrow(false, Box::new(e))
}
Tok::Star => {
tokens.advance()?;
let e = parse_unary_exp(tokens)?;
Exp_::Dereference(Box::new(e))
}
Tok::Move => {
tokens.advance()?;
Exp_::Move(parse_var(tokens)?)
}
Tok::Copy => {
tokens.advance()?;
Exp_::Copy(parse_var(tokens)?)
}
_ => {
return parse_dot_or_index_chain(tokens);
}
};
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, exp))
}
// Parse an expression term optionally followed by a chain of dot or index accesses:
// DotOrIndexChain =
// <DotOrIndexChain> "." <Name>
// | <DotOrIndexChain> "[" <Exp> "]" spec only
// | <Term>
fn parse_dot_or_index_chain<'input>(tokens: &mut Lexer<'input>) -> Result<Exp, Error> {
let start_loc = tokens.start_loc();
let mut lhs = parse_term(tokens)?;
loop {
let exp = match tokens.peek() {
Tok::Period => {
tokens.advance()?;
let n = parse_name(tokens)?;
Exp_::Dot(Box::new(lhs), n)
}
Tok::LBracket => {
tokens.advance()?;
let index = parse_exp(tokens)?;
let exp = Exp_::Index(Box::new(lhs), Box::new(index));
consume_token(tokens, Tok::RBracket)?;
exp
}
_ => break,
};
let end_loc = tokens.previous_end_loc();
lhs = spanned(tokens.file_name(), start_loc, end_loc, exp);
}
Ok(lhs)
}
//**************************************************************************************************
// Types
//**************************************************************************************************
// Parse a Type:
// Type =
// <ModuleAccess> ("<" Comma<Type> ">")?
// | "&" <Type>
// | "&mut" <Type>
// | "|" Comma<Type> "|" Type (spec only)
// | "(" Comma<Type> ")"
fn parse_type<'input>(tokens: &mut Lexer<'input>) -> Result<Type, Error> {
let start_loc = tokens.start_loc();
let t = match tokens.peek() {
Tok::LParen => {
let mut ts = parse_comma_list(tokens, Tok::LParen, Tok::RParen, parse_type, "a type")?;
match ts.len() {
0 => Type_::Unit,
1 => ts.pop().unwrap().value,
_ => Type_::Multiple(ts),
}
}
Tok::Amp => {
tokens.advance()?;
let t = parse_type(tokens)?;
Type_::Ref(false, Box::new(t))
}
Tok::AmpMut => {
tokens.advance()?;
let t = parse_type(tokens)?;
Type_::Ref(true, Box::new(t))
}
Tok::Pipe => {
let args = parse_comma_list(tokens, Tok::Pipe, Tok::Pipe, parse_type, "a type")?;
let result = parse_type(tokens)?;
return Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
Type_::Fun(args, Box::new(result)),
));
}
_ => {
let tn = parse_module_access(tokens, || "a type name".to_string())?;
let tys = if tokens.peek() == Tok::Less {
parse_comma_list(tokens, Tok::Less, Tok::Greater, parse_type, "a type")?
} else {
vec![]
};
Type_::Apply(Box::new(tn), tys)
}
};
let end_loc = tokens.previous_end_loc();
Ok(spanned(tokens.file_name(), start_loc, end_loc, t))
}
// Parse an optional list of type arguments.
// OptionalTypeArgs = "<" Comma<Type> ">" | <empty>
fn parse_optional_type_args<'input>(
tokens: &mut Lexer<'input>,
) -> Result<Option<Vec<Type>>, Error> {
if tokens.peek() == Tok::Less {
Ok(Some(parse_comma_list(
tokens,
Tok::Less,
Tok::Greater,
parse_type,
"a type",
)?))
} else {
Ok(None)
}
}
// Parse a type parameter:
// TypeParameter =
// <Name> <Constraint>?
// Constraint =
// ":" "copyable"
// | ":" "resource"
fn parse_type_parameter<'input>(tokens: &mut Lexer<'input>) -> Result<(Name, Kind), Error> {
let n = parse_name(tokens)?;
let kind = if match_token(tokens, Tok::Colon)? {
let start_loc = tokens.start_loc();
let k = match tokens.peek() {
Tok::Copyable => Kind_::Affine,
Tok::Resource => Kind_::Resource,
_ => {
let expected = "either 'copyable' or 'resource'";
return Err(unexpected_token_error(tokens, expected));
}
};
tokens.advance()?;
let end_loc = tokens.previous_end_loc();
spanned(tokens.file_name(), start_loc, end_loc, k)
} else {
sp(n.loc, Kind_::Unknown)
};
Ok((n, kind))
}
// Parse optional type parameter list.
// OptionalTypeParameters = "<" Comma<TypeParameter> ">" | <empty>
fn parse_optional_type_parameters<'input>(
tokens: &mut Lexer<'input>,
) -> Result<Vec<(Name, Kind)>, Error> {
if tokens.peek() == Tok::Less {
parse_comma_list(
tokens,
Tok::Less,
Tok::Greater,
parse_type_parameter,
"a type parameter",
)
} else {
Ok(vec![])
}
}
//**************************************************************************************************
// Functions
//**************************************************************************************************
// Parse a function declaration:
// FunctionDecl =
// <NativeFunctionDecl>
// | <MoveFunctionDecl>
// NativeFunctionDecl =
// "native" ( "public" )? "fun"
// <FunctionDefName> "(" Comma<Parameter> ")"
// (":" <Type>)?
// ("acquires" <ModuleAccess> ("," <ModuleAccess>)*)?
// ";"
// MoveFunctionDecl =
// ( "public" )? "fun"
// <FunctionDefName> "(" Comma<Parameter> ")"
// (":" <Type>)?
// ("acquires" <ModuleAccess> ("," <ModuleAccess>)*)?
// "{" <Sequence>
// FunctionDefName =
// <Name> <OptionalTypeParameters>
//
// If the "allow_native" parameter is false, this will only accept Move
// functions.
fn parse_function_decl<'input>(
tokens: &mut Lexer<'input>,
allow_native: bool,
) -> Result<Function, Error> {
let start_loc = tokens.start_loc();
// Record the source location of the "native" keyword (if there is one).
let native_opt = if allow_native {
consume_optional_token_with_loc(tokens, Tok::Native)?
} else {
if tokens.peek() == Tok::Native {
let loc = current_token_loc(tokens);
return Err(vec![(
loc,
"Native functions can only be declared inside a module".to_string(),
)]);
}
None
};
// (<Public>)?
let public_opt = consume_optional_token_with_loc(tokens, Tok::Public)?;
let visibility = if let Some(loc) = public_opt {
FunctionVisibility::Public(loc)
} else {
FunctionVisibility::Internal
};
// "fun" <FunctionDefName>
consume_token(tokens, Tok::Fun)?;
let name = FunctionName(parse_name(tokens)?);
let type_parameters = parse_optional_type_parameters(tokens)?;
// "(" Comma<Parameter> ")"
let parameters = parse_comma_list(
tokens,
Tok::LParen,
Tok::RParen,
parse_parameter,
"a function parameter",
)?;
// (":" <Type>)?
let return_type = if match_token(tokens, Tok::Colon)? {
parse_type(tokens)?
} else {
sp(name.loc(), Type_::Unit)
};
// ("acquires" (<ModuleAccess> ",")* <ModuleAccess> ","?
let mut acquires = vec![];
if match_token(tokens, Tok::Acquires)? {
let follows_acquire = |tok| matches!(tok, Tok::Semicolon | Tok::LBrace);
loop {
acquires.push(parse_module_access(tokens, || {
"a resource struct name".to_string()
})?);
if follows_acquire(tokens.peek()) {
break;
}
consume_token(tokens, Tok::Comma)?;
if follows_acquire(tokens.peek()) {
break;
}
}
}
let body = match native_opt {
Some(loc) => {
consume_token(tokens, Tok::Semicolon)?;
sp(loc, FunctionBody_::Native)
}
_ => {
let start_loc = tokens.start_loc();
consume_token(tokens, Tok::LBrace)?;
let seq = parse_sequence(tokens)?;
let end_loc = tokens.previous_end_loc();
sp(
make_loc(tokens.file_name(), start_loc, end_loc),
FunctionBody_::Defined(seq),
)
}
};
let signature = FunctionSignature {
type_parameters,
parameters,
return_type,
};
let loc = make_loc(tokens.file_name(), start_loc, tokens.previous_end_loc());
Ok(Function {
loc,
visibility,
signature,
acquires,
name,
body,
})
}
// Parse a function parameter:
// Parameter = <Var> ":" <Type>
fn parse_parameter<'input>(tokens: &mut Lexer<'input>) -> Result<(Var, Type), Error> {
let v = parse_var(tokens)?;
consume_token(tokens, Tok::Colon)?;
let t = parse_type(tokens)?;
Ok((v, t))
}
//**************************************************************************************************
// Structs
//**************************************************************************************************
// Parse a struct definition:
// StructDefinition =
// "resource"? "struct" <StructDefName> "{" Comma<FieldAnnot> "}"
// | "native" "resource"? "struct" <StructDefName> ";"
// StructDefName =
// <Name> <OptionalTypeParameters>
fn parse_struct_definition<'input>(tokens: &mut Lexer<'input>) -> Result<StructDefinition, Error> {
let start_loc = tokens.start_loc();
// Record the source location of the "native" keyword (if there is one).
let native_opt = consume_optional_token_with_loc(tokens, Tok::Native)?;
// Record the source location of the "resource" keyword (if there is one).
let resource_opt = consume_optional_token_with_loc(tokens, Tok::Resource)?;
consume_token(tokens, Tok::Struct)?;
// <StructDefName>
let name = StructName(parse_name(tokens)?);
let type_parameters = parse_optional_type_parameters(tokens)?;
let fields = match native_opt {
Some(loc) => {
consume_token(tokens, Tok::Semicolon)?;
StructFields::Native(loc)
}
_ => {
let list = parse_comma_list(
tokens,
Tok::LBrace,
Tok::RBrace,
parse_field_annot,
"a field",
)?;
StructFields::Defined(list)
}
};
let loc = make_loc(tokens.file_name(), start_loc, tokens.previous_end_loc());
Ok(StructDefinition {
loc,
resource_opt,
name,
type_parameters,
fields,
})
}
// Parse a field annotated with a type:
// FieldAnnot = <Field> ":" <Type>
fn parse_field_annot<'input>(tokens: &mut Lexer<'input>) -> Result<(Field, Type), Error> {
let f = parse_field(tokens)?;
consume_token(tokens, Tok::Colon)?;
let st = parse_type(tokens)?;
Ok((f, st))
}
//**************************************************************************************************
// Modules
//**************************************************************************************************
// Parse a use declaration:
// UseDecl = "use" <ModuleIdent> ("as" <ModuleName>)? ";"
fn parse_use_decl<'input>(
tokens: &mut Lexer<'input>,
) -> Result<(ModuleIdent, Option<ModuleName>), Error> {
consume_token(tokens, Tok::Use)?;
let ident = parse_module_ident(tokens)?;
let alias = if tokens.peek() == Tok::As {
tokens.advance()?;
Some(parse_module_name(tokens)?)
} else {
None
};
consume_token(tokens, Tok::Semicolon)?;
Ok((ident, alias))
}
fn is_struct_definition<'input>(tokens: &mut Lexer<'input>) -> Result<bool, Error> {
let mut t = tokens.peek();
if t == Tok::Native {
t = tokens.lookahead()?;
}
Ok(t == Tok::Struct || t == Tok::Resource)
}
// Parse a module:
// Module =
// "module" <ModuleName> "{"
// <UseDecl>*
// ( <StructDefinition> | <FunctionDecl> | <Spec> )*
// "}"
fn parse_module<'input>(tokens: &mut Lexer<'input>) -> Result<ModuleDefinition, Error> {
let start_loc = tokens.start_loc();
consume_token(tokens, Tok::Module)?;
let name = parse_module_name(tokens)?;
consume_token(tokens, Tok::LBrace)?;
let mut uses = vec![];
while tokens.peek() == Tok::Use {
uses.push(parse_use_decl(tokens)?);
}
let mut structs = vec![];
let mut functions = vec![];
let mut specs = vec![];
while tokens.peek() != Tok::RBrace {
if tokens.peek() == Tok::Spec {
specs.push(parse_spec_block(tokens)?);
} else if is_struct_definition(tokens)? {
structs.push(parse_struct_definition(tokens)?);
} else {
functions.push(parse_function_decl(tokens, /* allow_native */ true)?);
}
}
tokens.advance()?; // consume the RBrace
let loc = make_loc(tokens.file_name(), start_loc, tokens.previous_end_loc());
Ok(ModuleDefinition {
loc,
uses,
name,
structs,
functions,
specs,
})
}
//**************************************************************************************************
// Specification Blocks
//**************************************************************************************************
// Parse an optional specification block:
// SpecBlockTarget =
// "fun" <Name>
// | "struct <Name>
// | "module"
// | "schema" <Name> <OptionalTypeParameters>
// | <empty>
// SpecBlock = "spec" <SpecBlockTarget> "{" SpecBlockMember* "}"
fn parse_spec_block<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlock, Error> {
let start_loc = tokens.start_loc();
consume_token(tokens, Tok::Spec)?;
let target_start_loc = tokens.start_loc();
let target_ = match tokens.peek() {
Tok::Fun => {
tokens.advance()?;
let name = FunctionName(parse_name(tokens)?);
SpecBlockTarget_::Function(name)
}
Tok::Struct => {
tokens.advance()?;
let name = StructName(parse_name(tokens)?);
SpecBlockTarget_::Structure(name)
}
Tok::Module => {
tokens.advance()?;
SpecBlockTarget_::Module
}
Tok::NameValue if tokens.content() == "schema" => {
tokens.advance()?;
let name = parse_name(tokens)?;
let type_parameters = parse_optional_type_parameters(tokens)?;
SpecBlockTarget_::Schema(name, type_parameters)
}
Tok::LBrace => SpecBlockTarget_::Code,
_ => {
return Err(unexpected_token_error(
tokens,
"one of `module`, `struct`, `fun`, `schema`, or `{`",
))
}
};
let target = spanned(
tokens.file_name(),
target_start_loc,
match target_ {
SpecBlockTarget_::Code => target_start_loc,
_ => tokens.previous_end_loc(),
},
target_,
);
consume_token(tokens, Tok::LBrace)?;
let mut uses = vec![];
while tokens.peek() == Tok::Use {
uses.push(parse_use_decl(tokens)?);
}
let mut members = vec![];
while tokens.peek() != Tok::RBrace {
members.push(parse_spec_block_member(tokens)?);
}
consume_token(tokens, Tok::RBrace)?;
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
SpecBlock_ {
target,
uses,
members,
},
))
}
// Parse a spec block member:
// SpecBlockMember = <Invariant> | <Condition> | <SpecFunction> | <SpecVariable>
// | <SpecInclude> | <SpecApply> | <SpecPragma>
fn parse_spec_block_member<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {
match tokens.peek() {
Tok::Invariant => parse_invariant(tokens),
Tok::Define | Tok::Native => parse_spec_function(tokens),
Tok::NameValue => match tokens.content() {
"assert" | "assume" | "decreases" | "aborts_if" | "ensures" | "requires" => {
parse_condition(tokens)
}
"include" => parse_spec_include(tokens),
"apply" => parse_spec_apply(tokens),
"pragma" => parse_spec_pragma(tokens),
"global" | "local" => parse_spec_variable(tokens),
_ => {
// local is optional but supported to be able to declare variables which are
// named like the weak keywords above
parse_spec_variable(tokens)
}
},
_ => Err(unexpected_token_error(
tokens,
"one of `assert`, `assume`, `decreases`, `aborts_if`, `ensures`,\
`requires`, `include`, `apply`, `pragma`, `global`, or a name",
)),
}
}
// Parse a specification condition:
// SpecCondition = ("assert" | "assume" | "decreases" | "aborts_if" | "ensures" | "requires" ) <Exp> ";"
fn parse_condition<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {
let start_loc = tokens.start_loc();
let kind = match tokens.content() {
"assert" => SpecConditionKind::Assert,
"assume" => SpecConditionKind::Assume,
"decreases" => SpecConditionKind::Decreases,
"aborts_if" => SpecConditionKind::AbortsIf,
"ensures" => SpecConditionKind::Ensures,
"requires" => {
if tokens.lookahead()? == Tok::Module {
tokens.advance()?;
SpecConditionKind::RequiresModule
} else {
SpecConditionKind::Requires
}
}
_ => unreachable!(),
};
tokens.advance()?;
let exp = parse_exp(tokens)?;
consume_token(tokens, Tok::Semicolon)?;
let end_loc = tokens.previous_end_loc();
Ok(spanned(
tokens.file_name(),
start_loc,
end_loc,
SpecBlockMember_::Condition { kind, exp },
))
}
// Parse an invariant:
// Invariant = "invariant" ( "update" | "pack" | "unpack" )? <Exp> ";"
fn parse_invariant<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {
let start_loc = tokens.start_loc();
consume_token(tokens, Tok::Invariant)?;
let kind = if Tok::NameValue == tokens.peek() {
// The update/pack/unpack modifiers are 'weak' keywords. They are reserved
// only when following an "invariant" token. One can use "invariant (update ...)" to
// force interpretation as identifiers in expressions.
match tokens.content() {
"update" => {
tokens.advance()?;
InvariantKind::Update
}
"pack" => {
tokens.advance()?;
InvariantKind::Pack
}
"unpack" => {
tokens.advance()?;
InvariantKind::Unpack
}
_ => InvariantKind::Data,
}
} else {
InvariantKind::Data
};
let exp = parse_exp(tokens)?;
consume_token(tokens, Tok::Semicolon)?;
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
SpecBlockMember_::Invariant { kind, exp },
))
}
// Parse a specification function.
// SpecFunction = "define" <SpecFunctionSignature> "{" <Sequence> "}"
// | "native" "define" <SpecFunctionSignature> ";"
// SpecFunctionSignature = <Name> <OptionalTypeParameters> "(" Comma<Parameter> ")" ":" <Type>
fn parse_spec_function<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {
let start_loc = tokens.start_loc();
let native_opt = consume_optional_token_with_loc(tokens, Tok::Native)?;
consume_token(tokens, Tok::Define)?;
let name = FunctionName(parse_name(tokens)?);
let type_parameters = parse_optional_type_parameters(tokens)?;
// "(" Comma<Parameter> ")"
let parameters = parse_comma_list(
tokens,
Tok::LParen,
Tok::RParen,
parse_parameter,
"a function parameter",
)?;
// ":" <Type>)
consume_token(tokens, Tok::Colon)?;
let return_type = parse_type(tokens)?;
let body_start_loc = tokens.start_loc();
let body_ = if native_opt.is_some() {
consume_token(tokens, Tok::Semicolon)?;
FunctionBody_::Native
} else {
consume_token(tokens, Tok::LBrace)?;
let seq = parse_sequence(tokens)?;
FunctionBody_::Defined(seq)
};
let body = spanned(
tokens.file_name(),
body_start_loc,
tokens.previous_end_loc(),
body_,
);
let signature = FunctionSignature {
type_parameters,
parameters,
return_type,
};
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
SpecBlockMember_::Function {
signature,
name,
body,
},
))
}
// Parse a specification variable.
// SpecVariable = ( "global" | "local" )? <Name> <OptionalTypeParameters> ":" <Type> ";"
fn parse_spec_variable<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {
let start_loc = tokens.start_loc();
let is_global = match tokens.content() {
"global" => {
consume_token(tokens, Tok::NameValue)?;
true
}
"local" => {
consume_token(tokens, Tok::NameValue)?;
false
}
_ => false,
};
let name = parse_name(tokens)?;
let type_parameters = parse_optional_type_parameters(tokens)?;
consume_token(tokens, Tok::Colon)?;
let type_ = parse_type(tokens)?;
consume_token(tokens, Tok::Semicolon)?;
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
SpecBlockMember_::Variable {
is_global,
name,
type_parameters,
type_,
},
))
}
// Parse a specification schema include.
// SpecInclude = "include" <ModuleAccess> <OptionalTypeArgs>
// ( "{" Comma<Name ":" Name> "}" )? ";"
fn parse_spec_include<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {
let start_loc = tokens.start_loc();
consume_name_value(tokens, "include")?;
let name = parse_module_access(tokens, || "a schema name".to_string())?;
let type_arguments = parse_optional_type_args(tokens)?;
let arguments = if tokens.peek() == Tok::LBrace {
parse_comma_list(
tokens,
Tok::LBrace,
Tok::RBrace,
|tokens| {
let left = parse_name(tokens)?;
consume_token(tokens, Tok::Colon)?;
let right = parse_exp(tokens)?;
Ok((left, right))
},
"a schema argument",
)?
} else {
vec![]
};
consume_token(tokens, Tok::Semicolon)?;
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
SpecBlockMember_::Include {
name,
type_arguments,
arguments,
},
))
}
// Parse a specification schema apply.
// SpecApply = "apply" <ModuleAccess> <OptionalTypeArgs>
// "to" Comma<SpecApplyPattern>
// ( "except" Comma<SpecApplyPattern> )? ";"
fn parse_spec_apply<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {
let start_loc = tokens.start_loc();
consume_name_value(tokens, "apply")?;
let name = parse_module_access(tokens, || "a schema name".to_string())?;
let type_arguments = parse_optional_type_args(tokens)?;
let arguments = if tokens.peek() == Tok::LBrace {
parse_comma_list(
tokens,
Tok::LBrace,
Tok::RBrace,
|tokens| {
let left = parse_name(tokens)?;
consume_token(tokens, Tok::Colon)?;
let right = parse_exp(tokens)?;
Ok((left, right))
},
"a schema argument",
)?
} else {
vec![]
};
consume_name_value(tokens, "to")?;
let parse_patterns = |tokens: &mut Lexer<'input>| {
parse_list(
tokens,
|tokens| {
if tokens.peek() == Tok::Comma {
tokens.advance()?;
Ok(true)
} else {
Ok(false)
}
},
parse_spec_apply_pattern,
)
};
let patterns = parse_patterns(tokens)?;
let exclusion_patterns = if tokens.peek() == Tok::NameValue && tokens.content() == "except" {
tokens.advance()?;
parse_patterns(tokens)?
} else {
vec![]
};
consume_token(tokens, Tok::Semicolon)?;
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
SpecBlockMember_::Apply {
name,
type_arguments,
arguments,
patterns,
exclusion_patterns,
},
))
}
// Parse a function pattern:
// SpecApplyPattern = <SpecApplyFragment>+ <OptionalTypeArgs>
fn parse_spec_apply_pattern<'input>(tokens: &mut Lexer<'input>) -> Result<SpecApplyPattern, Error> {
let start_loc = tokens.start_loc();
let public_opt = consume_optional_token_with_loc(tokens, Tok::Public)?;
let visibility = if let Some(loc) = public_opt {
Some(FunctionVisibility::Public(loc))
} else if tokens.peek() == Tok::NameValue && tokens.content() == "internal" {
// Its not ideal right now that we do not have a loc here, but acceptable for what
// we are doing with this in specs.
tokens.advance()?;
Some(FunctionVisibility::Internal)
} else {
None
};
let mut last_end = tokens.start_loc() + tokens.content().len();
let name_pattern = parse_list(
tokens,
|tokens| {
// We need name fragments followed by each other without space. So we do some
// magic here similar as with `>>` based on token distance.
let start_loc = tokens.start_loc();
let adjacent = last_end == start_loc;
last_end = start_loc + tokens.content().len();
Ok(adjacent && [Tok::NameValue, Tok::Star].contains(&tokens.peek()))
},
parse_spec_apply_fragment,
)?;
let type_arguments = parse_optional_type_args(tokens)?;
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
SpecApplyPattern_ {
visibility,
name_pattern,
type_arguments,
},
))
}
// Parse a name pattern fragment
// SpecApplyFragment = <Name> | "*"
fn parse_spec_apply_fragment<'input>(
tokens: &mut Lexer<'input>,
) -> Result<SpecApplyFragment, Error> {
let start_loc = tokens.start_loc();
let fragment = match tokens.peek() {
Tok::NameValue => SpecApplyFragment_::NamePart(parse_name(tokens)?),
Tok::Star => {
tokens.advance()?;
SpecApplyFragment_::Wildcard
}
_ => return Err(unexpected_token_error(tokens, "a name fragment or `*`")),
};
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
fragment,
))
}
// Parse a specification pragma:
// SpecPragma = "pragma" Comma<SpecPragmaProperty> ";"
fn parse_spec_pragma<'input>(tokens: &mut Lexer<'input>) -> Result<SpecBlockMember, Error> {
let start_loc = tokens.start_loc();
consume_name_value(tokens, "pragma")?;
let properties = parse_comma_list_after_start(
tokens,
start_loc,
Tok::NameValue,
Tok::Semicolon,
parse_spec_pragma_property,
"a pragma property",
)?;
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
SpecBlockMember_::Pragma { properties },
))
}
// Parse a specification pragma property:
// SpecPragmaProperty = <Name> ( "=" Value )?
fn parse_spec_pragma_property<'input>(tokens: &mut Lexer<'input>) -> Result<PragmaProperty, Error> {
let start_loc = tokens.start_loc();
let name = parse_name(tokens)?;
let value = if tokens.peek() == Tok::Equal {
tokens.advance()?;
match tokens.peek() {
Tok::True
| Tok::False
| Tok::U8Value
| Tok::U64Value
| Tok::U128Value
| Tok::ByteStringValue
| Tok::AddressValue => Some(parse_value(tokens)?),
Tok::NumValue => {
let i = parse_num(tokens)?;
Some(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
Value_::U128(i),
))
}
_ => return Err(unexpected_token_error(tokens, "a value")),
}
} else {
None
};
Ok(spanned(
tokens.file_name(),
start_loc,
tokens.previous_end_loc(),
PragmaProperty_ { name, value },
))
}
//**************************************************************************************************
// File
//**************************************************************************************************
// Parse a file:
// File =
// (("address" <Address> ":") | <Module>)*
// | <UseDecl>* <MoveFunctionDecl>
//
// Note that "address" is not a token.
fn parse_file<'input>(tokens: &mut Lexer<'input>) -> Result<FileDefinition, Error> {
let f = if tokens.peek() == Tok::EOF
|| tokens.peek() == Tok::Module
|| tokens.peek() == Tok::NameValue
{
let mut v = vec![];
while tokens.peek() != Tok::EOF {
let m = if tokens.peek() == Tok::Module {
ModuleOrAddress::Module(parse_module(tokens)?)
} else {
let addr_name = parse_name(tokens)?;
if addr_name.value != "address" {
return Err(vec![(
addr_name.loc,
format!(
"Invalid address directive. Expected 'address' got '{}'",
addr_name.value
),
)]);
}
let start_loc = tokens.start_loc();
let addr = parse_address(tokens)?;
let end_loc = tokens.previous_end_loc();
consume_token(tokens, Tok::Colon)?;
let loc = make_loc(tokens.file_name(), start_loc, end_loc);
ModuleOrAddress::Address(loc, addr)
};
v.push(m);
}
FileDefinition::Modules(v)
} else {
let mut uses = vec![];
while tokens.peek() == Tok::Use {
uses.push(parse_use_decl(tokens)?);
}
let function = parse_function_decl(tokens, /* allow_native */ false)?;
if tokens.peek() != Tok::EOF {
let loc = current_token_loc(tokens);
return Err(vec![(
loc,
"Unexpected characters after end of main function".to_string(),
)]);
}
FileDefinition::Main(Main { uses, function })
};
Ok(f)
}
/// Parse the `input` string as a file of Move source code and return the
/// result as either a FileDefinition value or an Error. The `file` name
/// is used to identify source locations in error messages.
pub fn parse_file_string(file: &'static str, input: &str) -> Result<FileDefinition, Error> {
let mut tokens = Lexer::new(input, file);
tokens.advance()?;
parse_file(&mut tokens)
}
|
{
let f = parse_field(tokens)?;
let arg = if match_token(tokens, Tok::Colon)? {
parse_exp(tokens)?
} else {
sp(
f.loc(),
Exp_::Name(sp(f.loc(), ModuleAccess_::Name(f.0.clone())), None),
)
};
Ok((f, arg))
}
|
nodes.py
|
# -*- coding: utf-8 -*-
from collections.abc import MutableSequence, MutableMapping
from collections import OrderedDict
from itertools import chain
class Snode(MutableSequence):
"""A sequence object that knows it's parent"""
# this will allow easy subclassing to extend the container types that can
# be parsed
STYPES = (list, tuple)
MTYPES = (dict, OrderedDict)
def __init__(self, nodes=None, parent=None):
self.__children = list()
self._parent = None
self.parent = parent
if nodes:
self.extend(nodes)
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, parent):
if parent is None:
self._parent = None
elif isinstance(parent, (Snode, Mnode)):
self._parent = parent
else:
raise TypeError("Type({}) cannot be a parent of \
Type({})".format(type(parent), type(self)))
def __getitem__(self, index):
return self.__children[index]
def __setitem__(self, index, node):
if isinstance(node, (Snode, Mnode)):
node.parent = self
self.__children[index] = node
elif isinstance(node, self.MTYPES):
self.__children[index] = Mnode(node, parent=self)
elif isinstance(node, self.STYPES):
self.__children[index] = Snode(node, parent=self)
else:
self.__children[index] = node
def __delitem__(self, index):
del self.__children[index]
def
|
(self):
return len(self.__children)
def insert(self, index, node):
"""insert something as a child of this node. If that something derives
from MutableSequence it will be converted into an Snode
"""
if isinstance(node, (Snode, Mnode)):
node.parent = self
self.__children.insert(index, node)
elif isinstance(node, self.MTYPES):
self.__children.insert(index, Mnode(node, parent=self))
elif isinstance(node, self.STYPES):
self.__children.insert(index, Snode(node, parent=self))
else:
self.__children.insert(index, node)
class Mnode(MutableMapping):
"""A mapping object that knows it's parent
Parameters
----------
nodes : mapping
parent: Mnode or Snode
"""
STYPES = (list, tuple)
MTYPES = (dict, OrderedDict)
def __init__(self, nodes=None, parent=None):
self.__children = OrderedDict()
self._parent = None
if parent:
self.parent = parent
if nodes:
self.update(nodes)
def __repr__(self):
return repr(self.__children)
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, parent):
if parent is None:
self._parent = None
elif isinstance(parent, (Snode, Mnode)):
self._parent = parent
else:
raise TypeError("Type({}) cannot be a parent of \
Type({})".format(type(parent), type(self)))
def __iter__(self):
for node_name in self.__children:
yield node_name
def __getitem__(self, key):
return self.__children[key]
def __setitem__(self, key, node):
if isinstance(node, (Mnode, Snode)):
node.parent = self
self.__children[key] = node
elif isinstance(node, self.MTYPES):
self.__children[key] = Mnode(node, parent=self)
elif isinstance(node, self.STYPES):
self.__children[key] = Snode(node, parent=self)
else:
self.__children[key] = node
def __delitem__(self, key):
del self.__children[key]
def __len__(self):
return len(self.__children)
def update(self, mapping):
for key, node in mapping.items():
self[key] = node
|
__len__
|
slackbot.py
|
import requests, json
import camping_server2.config as config
class IncomingWebhook:
def send_msg(err_msg):
|
payload = {"channel": "dss17", "username": "bot", "text": err_msg}
response = requests.post(config.Config.WEBHOOK_URL, json.dumps(payload))
print(response)
|
|
vote.rs
|
#![allow(clippy::integer_arithmetic)]
use {
mundis_cli::{
check_balance,
cli::{process_command, request_and_confirm_airdrop, CliCommand, CliConfig},
spend_utils::SpendAmount,
},
mundis_cli_output::{parse_sign_only_reply_string, OutputFormat},
mundis_client::{
blockhash_query::{self, BlockhashQuery},
rpc_client::RpcClient,
},
mundis_faucet::faucet::run_local_faucet,
mundis_sdk::{
account_utils::StateMut,
commitment_config::CommitmentConfig,
signature::{Keypair, NullSigner, Signer},
},
mundis_streamer::socket::SocketAddrSpace,
mundis_test_validator::TestValidator,
mundis_vote_program::vote_state::{VoteAuthorize, VoteState, VoteStateVersions},
};
#[test]
fn test_vote_authorize_and_withdraw() {
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let mut config = CliConfig::recent_for_tests();
config.json_rpc_url = test_validator.rpc_url();
config.signers = vec![&default_signer];
request_and_confirm_airdrop(&rpc_client, &config, &config.signers[0].pubkey(), 100_000)
.unwrap();
// Create vote account
let vote_account_keypair = Keypair::new();
let vote_account_pubkey = vote_account_keypair.pubkey();
config.signers = vec![&default_signer, &vote_account_keypair];
config.command = CliCommand::CreateVoteAccount {
vote_account: 1,
seed: None,
identity_account: 0,
authorized_voter: None,
authorized_withdrawer: config.signers[0].pubkey(),
commission: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let vote_account = rpc_client
.get_account(&vote_account_keypair.pubkey())
.unwrap();
let vote_state: VoteStateVersions = vote_account.state().unwrap();
let authorized_withdrawer = vote_state.convert_to_current().authorized_withdrawer;
assert_eq!(authorized_withdrawer, config.signers[0].pubkey());
let expected_balance = rpc_client
.get_minimum_balance_for_rent_exemption(VoteState::size_of())
.unwrap()
.max(1);
check_balance!(expected_balance, &rpc_client, &vote_account_pubkey);
// Transfer in some more MUNDIS
config.signers = vec![&default_signer];
config.command = CliCommand::Transfer {
amount: SpendAmount::Some(10_000),
to: vote_account_pubkey,
from: 0,
sign_only: false,
dump_transaction_message: false,
allow_unfunded_recipient: true,
no_wait: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
derived_address_seed: None,
derived_address_program_id: None,
};
process_command(&config).unwrap();
let expected_balance = expected_balance + 10_000;
check_balance!(expected_balance, &rpc_client, &vote_account_pubkey);
// Authorize vote account withdrawal to another signer
let first_withdraw_authority = Keypair::new();
config.signers = vec![&default_signer];
config.command = CliCommand::VoteAuthorize {
vote_account_pubkey,
new_authorized_pubkey: first_withdraw_authority.pubkey(),
vote_authorize: VoteAuthorize::Withdrawer,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
authorized: 0,
new_authorized: None,
};
process_command(&config).unwrap();
let vote_account = rpc_client
.get_account(&vote_account_keypair.pubkey())
.unwrap();
let vote_state: VoteStateVersions = vote_account.state().unwrap();
let authorized_withdrawer = vote_state.convert_to_current().authorized_withdrawer;
assert_eq!(authorized_withdrawer, first_withdraw_authority.pubkey());
// Authorize vote account withdrawal to another signer with checked instruction
let withdraw_authority = Keypair::new();
config.signers = vec![&default_signer, &first_withdraw_authority];
config.command = CliCommand::VoteAuthorize {
vote_account_pubkey,
new_authorized_pubkey: withdraw_authority.pubkey(),
vote_authorize: VoteAuthorize::Withdrawer,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
authorized: 1,
new_authorized: Some(1),
};
process_command(&config).unwrap_err(); // unsigned by new authority should fail
config.signers = vec![
&default_signer,
&first_withdraw_authority,
&withdraw_authority,
];
config.command = CliCommand::VoteAuthorize {
vote_account_pubkey,
new_authorized_pubkey: withdraw_authority.pubkey(),
vote_authorize: VoteAuthorize::Withdrawer,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
authorized: 1,
new_authorized: Some(2),
};
process_command(&config).unwrap();
let vote_account = rpc_client
.get_account(&vote_account_keypair.pubkey())
.unwrap();
let vote_state: VoteStateVersions = vote_account.state().unwrap();
let authorized_withdrawer = vote_state.convert_to_current().authorized_withdrawer;
assert_eq!(authorized_withdrawer, withdraw_authority.pubkey());
// Withdraw from vote account
let destination_account = mundis_sdk::pubkey::new_rand(); // Send withdrawal to new account to make balance check easy
config.signers = vec![&default_signer, &withdraw_authority];
config.command = CliCommand::WithdrawFromVoteAccount {
vote_account_pubkey,
withdraw_authority: 1,
withdraw_amount: SpendAmount::Some(1_000),
destination_account_pubkey: destination_account,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
let expected_balance = expected_balance - 1_000;
check_balance!(expected_balance, &rpc_client, &vote_account_pubkey);
check_balance!(1_000, &rpc_client, &destination_account);
// Re-assign validator identity
let new_identity_keypair = Keypair::new();
config.signers.push(&new_identity_keypair);
config.command = CliCommand::VoteUpdateValidator {
vote_account_pubkey,
new_identity_account: 2,
withdraw_authority: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
// Close vote account
let destination_account = mundis_sdk::pubkey::new_rand(); // Send withdrawal to new account to make balance check easy
config.signers = vec![&default_signer, &withdraw_authority];
config.command = CliCommand::CloseVoteAccount {
vote_account_pubkey,
withdraw_authority: 1,
destination_account_pubkey: destination_account,
memo: None,
fee_payer: 0,
};
process_command(&config).unwrap();
check_balance!(0, &rpc_client, &vote_account_pubkey);
check_balance!(expected_balance, &rpc_client, &destination_account);
}
#[test]
fn test_offline_vote_authorize_and_withdraw() {
let mint_keypair = Keypair::new();
let mint_pubkey = mint_keypair.pubkey();
let faucet_addr = run_local_faucet(mint_keypair, None);
let test_validator =
TestValidator::with_no_fees(mint_pubkey, Some(faucet_addr), SocketAddrSpace::Unspecified);
let rpc_client =
RpcClient::new_with_commitment(test_validator.rpc_url(), CommitmentConfig::processed());
let default_signer = Keypair::new();
let mut config_payer = CliConfig::recent_for_tests();
config_payer.json_rpc_url = test_validator.rpc_url();
config_payer.signers = vec![&default_signer];
let mut config_offline = CliConfig::recent_for_tests();
config_offline.json_rpc_url = String::default();
config_offline.command = CliCommand::ClusterVersion;
let offline_keypair = Keypair::new();
config_offline.signers = vec![&offline_keypair];
// Verify that we cannot reach the cluster
process_command(&config_offline).unwrap_err();
request_and_confirm_airdrop(
&rpc_client,
&config_payer,
&config_payer.signers[0].pubkey(),
100_000,
)
.unwrap();
check_balance!(100_000, &rpc_client, &config_payer.signers[0].pubkey());
request_and_confirm_airdrop(
&rpc_client,
&config_offline,
&config_offline.signers[0].pubkey(),
100_000,
)
.unwrap();
check_balance!(100_000, &rpc_client, &config_offline.signers[0].pubkey());
// Create vote account with specific withdrawer
let vote_account_keypair = Keypair::new();
let vote_account_pubkey = vote_account_keypair.pubkey();
config_payer.signers = vec![&default_signer, &vote_account_keypair];
config_payer.command = CliCommand::CreateVoteAccount {
vote_account: 1,
seed: None,
identity_account: 0,
authorized_voter: None,
authorized_withdrawer: offline_keypair.pubkey(),
commission: 0,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_payer).unwrap();
let vote_account = rpc_client
.get_account(&vote_account_keypair.pubkey())
.unwrap();
let vote_state: VoteStateVersions = vote_account.state().unwrap();
let authorized_withdrawer = vote_state.convert_to_current().authorized_withdrawer;
assert_eq!(authorized_withdrawer, offline_keypair.pubkey());
let expected_balance = rpc_client
.get_minimum_balance_for_rent_exemption(VoteState::size_of())
.unwrap()
.max(1);
check_balance!(expected_balance, &rpc_client, &vote_account_pubkey);
// Transfer in some more MUNDIS
config_payer.signers = vec![&default_signer];
config_payer.command = CliCommand::Transfer {
amount: SpendAmount::Some(10_000),
to: vote_account_pubkey,
from: 0,
sign_only: false,
dump_transaction_message: false,
allow_unfunded_recipient: true,
no_wait: false,
blockhash_query: BlockhashQuery::All(blockhash_query::Source::Cluster),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
derived_address_seed: None,
derived_address_program_id: None,
};
process_command(&config_payer).unwrap();
let expected_balance = expected_balance + 10_000;
check_balance!(expected_balance, &rpc_client, &vote_account_pubkey);
// Authorize vote account withdrawal to another signer, offline
let withdraw_authority = Keypair::new();
let blockhash = rpc_client.get_latest_blockhash().unwrap();
config_offline.command = CliCommand::VoteAuthorize {
vote_account_pubkey,
new_authorized_pubkey: withdraw_authority.pubkey(),
vote_authorize: VoteAuthorize::Withdrawer,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
authorized: 0,
new_authorized: None,
};
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
assert!(sign_only.has_all_signers());
let offline_presigner = sign_only
.presigner_of(&config_offline.signers[0].pubkey())
.unwrap();
config_payer.signers = vec![&offline_presigner];
config_payer.command = CliCommand::VoteAuthorize {
vote_account_pubkey,
new_authorized_pubkey: withdraw_authority.pubkey(),
vote_authorize: VoteAuthorize::Withdrawer,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
authorized: 0,
new_authorized: None,
};
process_command(&config_payer).unwrap();
let vote_account = rpc_client
.get_account(&vote_account_keypair.pubkey())
.unwrap();
let vote_state: VoteStateVersions = vote_account.state().unwrap();
let authorized_withdrawer = vote_state.convert_to_current().authorized_withdrawer;
assert_eq!(authorized_withdrawer, withdraw_authority.pubkey());
// Withdraw from vote account offline
let destination_account = mundis_sdk::pubkey::new_rand(); // Send withdrawal to new account to make balance check easy
let blockhash = rpc_client.get_latest_blockhash().unwrap();
let fee_payer_null_signer = NullSigner::new(&default_signer.pubkey());
config_offline.signers = vec![&fee_payer_null_signer, &withdraw_authority];
config_offline.command = CliCommand::WithdrawFromVoteAccount {
vote_account_pubkey,
withdraw_authority: 1,
withdraw_amount: SpendAmount::Some(1_000),
destination_account_pubkey: destination_account,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
config_offline.output_format = OutputFormat::JsonCompact;
|
.unwrap();
config_payer.signers = vec![&default_signer, &offline_presigner];
config_payer.command = CliCommand::WithdrawFromVoteAccount {
vote_account_pubkey,
withdraw_authority: 1,
withdraw_amount: SpendAmount::Some(1_000),
destination_account_pubkey: destination_account,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_payer).unwrap();
let expected_balance = expected_balance - 1_000;
check_balance!(expected_balance, &rpc_client, &vote_account_pubkey);
check_balance!(1_000, &rpc_client, &destination_account);
// Re-assign validator identity offline
let blockhash = rpc_client.get_latest_blockhash().unwrap();
let new_identity_keypair = Keypair::new();
let new_identity_null_signer = NullSigner::new(&new_identity_keypair.pubkey());
config_offline.signers = vec![
&fee_payer_null_signer,
&withdraw_authority,
&new_identity_null_signer,
];
config_offline.command = CliCommand::VoteUpdateValidator {
vote_account_pubkey,
new_identity_account: 2,
withdraw_authority: 1,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_offline).unwrap();
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
let offline_presigner = sign_only
.presigner_of(&config_offline.signers[1].pubkey())
.unwrap();
config_payer.signers = vec![&default_signer, &offline_presigner, &new_identity_keypair];
config_payer.command = CliCommand::VoteUpdateValidator {
vote_account_pubkey,
new_identity_account: 2,
withdraw_authority: 1,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_payer).unwrap();
// Close vote account offline. Must use WithdrawFromVoteAccount and specify amount, since
// CloseVoteAccount requires RpcClient
let destination_account = mundis_sdk::pubkey::new_rand(); // Send withdrawal to new account to make balance check easy
config_offline.signers = vec![&fee_payer_null_signer, &withdraw_authority];
config_offline.command = CliCommand::WithdrawFromVoteAccount {
vote_account_pubkey,
withdraw_authority: 1,
withdraw_amount: SpendAmount::Some(expected_balance),
destination_account_pubkey: destination_account,
sign_only: true,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::None(blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_offline).unwrap();
config_offline.output_format = OutputFormat::JsonCompact;
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
let offline_presigner = sign_only
.presigner_of(&config_offline.signers[1].pubkey())
.unwrap();
config_payer.signers = vec![&default_signer, &offline_presigner];
config_payer.command = CliCommand::WithdrawFromVoteAccount {
vote_account_pubkey,
withdraw_authority: 1,
withdraw_amount: SpendAmount::Some(expected_balance),
destination_account_pubkey: destination_account,
sign_only: false,
dump_transaction_message: false,
blockhash_query: BlockhashQuery::FeeCalculator(blockhash_query::Source::Cluster, blockhash),
nonce_account: None,
nonce_authority: 0,
memo: None,
fee_payer: 0,
};
process_command(&config_payer).unwrap();
check_balance!(0, &rpc_client, &vote_account_pubkey);
check_balance!(expected_balance, &rpc_client, &destination_account);
}
|
let sig_response = process_command(&config_offline).unwrap();
let sign_only = parse_sign_only_reply_string(&sig_response);
let offline_presigner = sign_only
.presigner_of(&config_offline.signers[1].pubkey())
|
conv_block.py
|
from torch import nn
class ConvolutionalBlock(nn.Module):
def __init__(self, in_channels=128, out_channels=256, kernel_size=3, padding=1, stride=1, padding_mode='zeros'):
super().__init__()
self.conv1 = nn.Conv1d(in_channels, out_channels, kernel_size=kernel_size, padding=padding, stride=stride,
|
def forward(self, x):
out = self.conv1(x)
out = self.bn1(out)
out = self.relu1(out)
return out
|
padding_mode=padding_mode)
self.bn1 = nn.BatchNorm1d(out_channels)
self.relu1 = nn.ReLU()
|
fsinfo.go
|
// Copyright 2019 Daniel Lorch. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package nfsv3
import (
"bytes"
"encoding/binary"
"fmt"
)
// FSInfo3Args (struct FSINFOargs)
type FSInfo3Args struct {
FileHandle []byte
}
// FSInfo3ResultOK (struct FSINFO3resok)
type FSInfo3ResultOK struct {
FSInfo3Result
Objattributes uint32 // TODO
Rtmax uint32
Rtpref uint32
Rtmult uint32
Wtmax uint32
Wtpref uint32
Wtmult uint32
Dtpref uint32
Maxfilesize uint64
Timedeltaseconds uint32
Timedeltananoseconds uint32
Properties uint32
}
// FSInfo3ResultFail (struct FSINFO3resfail)
type FSInfo3ResultFail struct {
FSInfo3Result
// TODO post_op_attr obj_attributes
}
// FSInfo3Result (union FSINFO3res)
type FSInfo3Result struct {
Status uint32
}
func nfsProcedure3FSInfo(procedureArguments []byte) (interface{}, error) {
// parse request
requestBuffer := bytes.NewBuffer(procedureArguments)
var fileHandleLength uint32
err := binary.Read(requestBuffer, binary.BigEndian, &fileHandleLength)
if err != nil {
fmt.Println("Error: ", err.Error())
// TODO
}
fsInfoArgs := FSInfo3Args{
FileHandle: make([]byte, fileHandleLength), // TODO unsafe?
}
err = binary.Read(requestBuffer, binary.BigEndian, &fsInfoArgs.FileHandle)
if err != nil {
fmt.Println("Error: ", err.Error())
// TODO
}
// prepare result
fsInfoResult := &FSInfo3ResultOK{
FSInfo3Result: FSInfo3Result{
Status: NFS3OK,
},
Objattributes: 0,
Rtmax: 131072,
Rtpref: 131072,
Rtmult: 4096,
|
Wtmult: 4096,
Dtpref: 4096,
Maxfilesize: 8796093022207,
Timedeltaseconds: 1,
Timedeltananoseconds: 0,
Properties: 0x0000001b,
}
return fsInfoResult, nil
}
|
Wtmax: 131072,
Wtpref: 131072,
|
io_test.go
|
package main
import (
"reflect"
"testing"
"github.com/google/go-cmp/cmp"
"go.promotedai.dev/gobenchdata/bench"
)
func Test_showHelp(t *testing.T) {
showHelp()
}
func Test_load(t *testing.T) {
type args struct {
files []string
}
tests := []struct {
name string
args args
want []bench.RunHistory
}{
{"empty benchmarks", args{[]string{"fixtures/empty-benchmarks.json"}}, []bench.RunHistory{{}}},
{"empty benchmarks 2", args{[]string{"fixtures/empty-benchmarks-2.json"}}, []bench.RunHistory{{}}},
{"benchmarks", args{[]string{"fixtures/sample-benchmarks.json"}}, []bench.RunHistory{{
{
Version: "a3b33d25b34e359f022b5a3dfc3607369143e74d",
Date: 1589695147,
Tags: []string{"ref=refs/tags/v1.0.0"},
Suites: []bench.Suite{
{
|
{Name: "BenchmarkFib10/Fib()", Runs: 2819560, NsPerOp: 419, Mem: bench.Mem{BytesPerOp: 0, AllocsPerOp: 0, MBPerSec: 0}, Custom: nil},
{Name: "BenchmarkFib10/Fib()-2", Runs: 2991747, NsPerOp: 412, Mem: bench.Mem{BytesPerOp: 0, AllocsPerOp: 0, MBPerSec: 0}, Custom: nil},
},
},
},
},
}}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := load(tt.args.files...); !reflect.DeepEqual(got, tt.want) {
t.Errorf(cmp.Diff(got, tt.want))
}
})
}
}
|
Goos: "linux",
Goarch: "amd64",
Pkg: "go.promotedai.dev/gobenchdata/demo",
Benchmarks: []bench.Benchmark{
|
ResourceProvider.go
|
// Code generated by mockery v2.8.0. DO NOT EDIT.
package mocks
import (
context "context"
client "github.com/LINBIT/golinstor/client"
mock "github.com/stretchr/testify/mock"
)
// ResourceProvider is an autogenerated mock type for the ResourceProvider type
type ResourceProvider struct {
mock.Mock
}
// Activate provides a mock function with given fields: ctx, resName, nodeName
func (_m *ResourceProvider) Activate(ctx context.Context, resName, nodeName string) error {
ret := _m.Called(ctx, resName, nodeName)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
r0 = rf(ctx, resName, nodeName)
} else {
r0 = ret.Error(0)
}
return r0
}
// Autoplace provides a mock function with given fields: ctx, resName, apr
func (_m *ResourceProvider) Autoplace(ctx context.Context, resName string, apr client.AutoPlaceRequest) error {
ret := _m.Called(ctx, resName, apr)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, client.AutoPlaceRequest) error); ok {
r0 = rf(ctx, resName, apr)
} else {
r0 = ret.Error(0)
}
return r0
}
// Create provides a mock function with given fields: ctx, res
func (_m *ResourceProvider) Create(ctx context.Context, res client.ResourceCreate) error {
ret := _m.Called(ctx, res)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, client.ResourceCreate) error); ok {
r0 = rf(ctx, res)
} else {
r0 = ret.Error(0)
}
return r0
}
// CreateSnapshot provides a mock function with given fields: ctx, snapshot
func (_m *ResourceProvider) CreateSnapshot(ctx context.Context, snapshot client.Snapshot) error {
ret := _m.Called(ctx, snapshot)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, client.Snapshot) error); ok {
r0 = rf(ctx, snapshot)
} else {
r0 = ret.Error(0)
}
return r0
}
// Deactivate provides a mock function with given fields: ctx, resName, nodeName
func (_m *ResourceProvider) Deactivate(ctx context.Context, resName, nodeName string) error {
ret := _m.Called(ctx, resName, nodeName)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
r0 = rf(ctx, resName, nodeName)
} else {
r0 = ret.Error(0)
}
return r0
}
// Delete provides a mock function with given fields: ctx, resName, nodeName
func (_m *ResourceProvider) Delete(ctx context.Context, resName, nodeName string) error {
ret := _m.Called(ctx, resName, nodeName)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
r0 = rf(ctx, resName, nodeName)
} else {
r0 = ret.Error(0)
}
return r0
}
// DeleteSnapshot provides a mock function with given fields: ctx, resName, snapName, nodes
func (_m *ResourceProvider) DeleteSnapshot(ctx context.Context, resName, snapName string, nodes ...string) error {
_va := make([]interface{}, len(nodes))
for _i := range nodes {
_va[_i] = nodes[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName, snapName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, ...string) error); ok {
r0 = rf(ctx, resName, snapName, nodes...)
} else {
r0 = ret.Error(0)
}
return r0
}
// DisableDRBDProxy provides a mock function with given fields: ctx, resName, nodeAName, nodeBName
func (_m *ResourceProvider) DisableDRBDProxy(ctx context.Context, resName, nodeAName, nodeBName string) error {
ret := _m.Called(ctx, resName, nodeAName, nodeBName)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok {
r0 = rf(ctx, resName, nodeAName, nodeBName)
} else {
r0 = ret.Error(0)
}
return r0
}
// Diskful provides a mock function with given fields: ctx, resName, nodeName, storagePoolName, props
func (_m *ResourceProvider) Diskful(ctx context.Context, resName, nodeName, storagePoolName string, props *client.ToggleDiskDiskfulProps) error {
ret := _m.Called(ctx, resName, nodeName, storagePoolName, props)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, string, *client.ToggleDiskDiskfulProps) error); ok {
r0 = rf(ctx, resName, nodeName, storagePoolName, props)
} else {
r0 = ret.Error(0)
}
return r0
}
// Diskless provides a mock function with given fields: ctx, resName, nodeName, disklessPoolName
func (_m *ResourceProvider) Diskless(ctx context.Context, resName, nodeName, disklessPoolName string) error {
ret := _m.Called(ctx, resName, nodeName, disklessPoolName)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok {
r0 = rf(ctx, resName, nodeName, disklessPoolName)
} else {
r0 = ret.Error(0)
}
return r0
}
// EnableDRBDProxy provides a mock function with given fields: ctx, resName, nodeAName, nodeBName
func (_m *ResourceProvider) EnableDRBDProxy(ctx context.Context, resName, nodeAName, nodeBName string) error {
ret := _m.Called(ctx, resName, nodeAName, nodeBName)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, string) error); ok {
r0 = rf(ctx, resName, nodeAName, nodeBName)
} else {
r0 = ret.Error(0)
}
return r0
}
// EnableSnapshotShipping provides a mock function with given fields: ctx, resName, ship
func (_m *ResourceProvider) EnableSnapshotShipping(ctx context.Context, resName string, ship client.SnapshotShipping) error {
ret := _m.Called(ctx, resName, ship)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, client.SnapshotShipping) error); ok {
r0 = rf(ctx, resName, ship)
} else {
r0 = ret.Error(0)
}
return r0
}
// Get provides a mock function with given fields: ctx, resName, nodeName, opts
func (_m *ResourceProvider) Get(ctx context.Context, resName, nodeName string, opts ...*client.ListOpts) (client.Resource, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName, nodeName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 client.Resource
if rf, ok := ret.Get(0).(func(context.Context, string, string, ...*client.ListOpts) client.Resource); ok {
r0 = rf(ctx, resName, nodeName, opts...)
} else {
r0 = ret.Get(0).(client.Resource)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, nodeName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetAll provides a mock function with given fields: ctx, resName, opts
func (_m *ResourceProvider) GetAll(ctx context.Context, resName string, opts ...*client.ListOpts) ([]client.Resource, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.Resource
if rf, ok := ret.Get(0).(func(context.Context, string, ...*client.ListOpts) []client.Resource); ok {
r0 = rf(ctx, resName, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.Resource)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetConnectionPropsInfos provides a mock function with given fields: ctx, resName, opts
func (_m *ResourceProvider) GetConnectionPropsInfos(ctx context.Context, resName string, opts ...*client.ListOpts) ([]client.PropsInfo, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.PropsInfo
if rf, ok := ret.Get(0).(func(context.Context, string, ...*client.ListOpts) []client.PropsInfo); ok {
r0 = rf(ctx, resName, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.PropsInfo)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetConnections provides a mock function with given fields: ctx, resName, nodeAName, nodeBName, opts
func (_m *ResourceProvider) GetConnections(ctx context.Context, resName, nodeAName, nodeBName string, opts ...*client.ListOpts) ([]client.ResourceConnection, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName, nodeAName, nodeBName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.ResourceConnection
if rf, ok := ret.Get(0).(func(context.Context, string, string, string, ...*client.ListOpts) []client.ResourceConnection); ok {
r0 = rf(ctx, resName, nodeAName, nodeBName, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.ResourceConnection)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, string, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, nodeAName, nodeBName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetPropsInfos provides a mock function with given fields: ctx, resName, opts
func (_m *ResourceProvider) GetPropsInfos(ctx context.Context, resName string, opts ...*client.ListOpts) ([]client.PropsInfo, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.PropsInfo
if rf, ok := ret.Get(0).(func(context.Context, string, ...*client.ListOpts) []client.PropsInfo); ok {
r0 = rf(ctx, resName, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.PropsInfo)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetResourceView provides a mock function with given fields: ctx, opts
func (_m *ResourceProvider) GetResourceView(ctx context.Context, opts ...*client.ListOpts) ([]client.ResourceWithVolumes, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.ResourceWithVolumes
if rf, ok := ret.Get(0).(func(context.Context, ...*client.ListOpts) []client.ResourceWithVolumes); ok {
r0 = rf(ctx, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.ResourceWithVolumes)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, ...*client.ListOpts) error); ok {
r1 = rf(ctx, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetSnapshot provides a mock function with given fields: ctx, resName, snapName, opts
func (_m *ResourceProvider) GetSnapshot(ctx context.Context, resName, snapName string, opts ...*client.ListOpts) (client.Snapshot, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName, snapName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 client.Snapshot
if rf, ok := ret.Get(0).(func(context.Context, string, string, ...*client.ListOpts) client.Snapshot); ok {
r0 = rf(ctx, resName, snapName, opts...)
} else {
r0 = ret.Get(0).(client.Snapshot)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, snapName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetSnapshotShippings provides a mock function with given fields: ctx, opts
func (_m *ResourceProvider) GetSnapshotShippings(ctx context.Context, opts ...*client.ListOpts) ([]client.SnapshotShippingStatus, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.SnapshotShippingStatus
if rf, ok := ret.Get(0).(func(context.Context, ...*client.ListOpts) []client.SnapshotShippingStatus); ok {
r0 = rf(ctx, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.SnapshotShippingStatus)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, ...*client.ListOpts) error); ok {
r1 = rf(ctx, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetSnapshotView provides a mock function with given fields: ctx, opts
func (_m *ResourceProvider) GetSnapshotView(ctx context.Context, opts ...*client.ListOpts) ([]client.Snapshot, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.Snapshot
if rf, ok := ret.Get(0).(func(context.Context, ...*client.ListOpts) []client.Snapshot); ok {
r0 = rf(ctx, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.Snapshot)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, ...*client.ListOpts) error); ok {
r1 = rf(ctx, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetSnapshots provides a mock function with given fields: ctx, resName, opts
func (_m *ResourceProvider) GetSnapshots(ctx context.Context, resName string, opts ...*client.ListOpts) ([]client.Snapshot, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.Snapshot
if rf, ok := ret.Get(0).(func(context.Context, string, ...*client.ListOpts) []client.Snapshot); ok {
r0 = rf(ctx, resName, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.Snapshot)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetVolume provides a mock function with given fields: ctx, resName, nodeName, volNr, opts
func (_m *ResourceProvider) GetVolume(ctx context.Context, resName, nodeName string, volNr int, opts ...*client.ListOpts) (client.Volume, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName, nodeName, volNr)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 client.Volume
if rf, ok := ret.Get(0).(func(context.Context, string, string, int, ...*client.ListOpts) client.Volume); ok {
r0 = rf(ctx, resName, nodeName, volNr, opts...)
} else {
r0 = ret.Get(0).(client.Volume)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, string, int, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, nodeName, volNr, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetVolumeDefinitionPropsInfos provides a mock function with given fields: ctx, resName, opts
func (_m *ResourceProvider) GetVolumeDefinitionPropsInfos(ctx context.Context, resName string, opts ...*client.ListOpts) ([]client.PropsInfo, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.PropsInfo
if rf, ok := ret.Get(0).(func(context.Context, string, ...*client.ListOpts) []client.PropsInfo); ok {
r0 = rf(ctx, resName, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.PropsInfo)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetVolumePropsInfos provides a mock function with given fields: ctx, resName, nodeName, opts
func (_m *ResourceProvider) GetVolumePropsInfos(ctx context.Context, resName, nodeName string, opts ...*client.ListOpts) ([]client.PropsInfo, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName, nodeName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.PropsInfo
if rf, ok := ret.Get(0).(func(context.Context, string, string, ...*client.ListOpts) []client.PropsInfo); ok {
r0 = rf(ctx, resName, nodeName, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.PropsInfo)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, nodeName, opts...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetVolumes provides a mock function with given fields: ctx, resName, nodeName, opts
func (_m *ResourceProvider) GetVolumes(ctx context.Context, resName, nodeName string, opts ...*client.ListOpts) ([]client.Volume, error) {
_va := make([]interface{}, len(opts))
for _i := range opts {
_va[_i] = opts[_i]
}
var _ca []interface{}
_ca = append(_ca, ctx, resName, nodeName)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 []client.Volume
if rf, ok := ret.Get(0).(func(context.Context, string, string, ...*client.ListOpts) []client.Volume); ok {
r0 = rf(ctx, resName, nodeName, opts...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]client.Volume)
}
}
|
r1 = ret.Error(1)
}
return r0, r1
}
// MakeAvailable provides a mock function with given fields: ctx, resName, nodeName, makeAvailable
func (_m *ResourceProvider) MakeAvailable(ctx context.Context, resName, nodeName string, makeAvailable client.ResourceMakeAvailable) error {
ret := _m.Called(ctx, resName, nodeName, makeAvailable)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, client.ResourceMakeAvailable) error); ok {
r0 = rf(ctx, resName, nodeName, makeAvailable)
} else {
r0 = ret.Error(0)
}
return r0
}
// Migrate provides a mock function with given fields: ctx, resName, fromNodeName, toNodeName, storagePoolName
func (_m *ResourceProvider) Migrate(ctx context.Context, resName, fromNodeName, toNodeName, storagePoolName string) error {
ret := _m.Called(ctx, resName, fromNodeName, toNodeName, storagePoolName)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, string, string) error); ok {
r0 = rf(ctx, resName, fromNodeName, toNodeName, storagePoolName)
} else {
r0 = ret.Error(0)
}
return r0
}
// Modify provides a mock function with given fields: ctx, resName, nodeName, props
func (_m *ResourceProvider) Modify(ctx context.Context, resName, nodeName string, props client.GenericPropsModify) error {
ret := _m.Called(ctx, resName, nodeName, props)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, client.GenericPropsModify) error); ok {
r0 = rf(ctx, resName, nodeName, props)
} else {
r0 = ret.Error(0)
}
return r0
}
// ModifyConnection provides a mock function with given fields: ctx, resName, nodeAName, nodeBName, props
func (_m *ResourceProvider) ModifyConnection(ctx context.Context, resName, nodeAName, nodeBName string, props client.GenericPropsModify) error {
ret := _m.Called(ctx, resName, nodeAName, nodeBName, props)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, string, client.GenericPropsModify) error); ok {
r0 = rf(ctx, resName, nodeAName, nodeBName, props)
} else {
r0 = ret.Error(0)
}
return r0
}
// ModifyDRBDProxy provides a mock function with given fields: ctx, resName, props
func (_m *ResourceProvider) ModifyDRBDProxy(ctx context.Context, resName string, props client.DrbdProxyModify) error {
ret := _m.Called(ctx, resName, props)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, client.DrbdProxyModify) error); ok {
r0 = rf(ctx, resName, props)
} else {
r0 = ret.Error(0)
}
return r0
}
// ModifyVolume provides a mock function with given fields: ctx, resName, nodeName, volNr, props
func (_m *ResourceProvider) ModifyVolume(ctx context.Context, resName, nodeName string, volNr int, props client.GenericPropsModify) error {
ret := _m.Called(ctx, resName, nodeName, volNr, props)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, int, client.GenericPropsModify) error); ok {
r0 = rf(ctx, resName, nodeName, volNr, props)
} else {
r0 = ret.Error(0)
}
return r0
}
// QueryMaxVolumeSize provides a mock function with given fields: ctx, filter
func (_m *ResourceProvider) QueryMaxVolumeSize(ctx context.Context, filter client.AutoSelectFilter) (client.MaxVolumeSizes, error) {
ret := _m.Called(ctx, filter)
var r0 client.MaxVolumeSizes
if rf, ok := ret.Get(0).(func(context.Context, client.AutoSelectFilter) client.MaxVolumeSizes); ok {
r0 = rf(ctx, filter)
} else {
r0 = ret.Get(0).(client.MaxVolumeSizes)
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, client.AutoSelectFilter) error); ok {
r1 = rf(ctx, filter)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// RestoreSnapshot provides a mock function with given fields: ctx, origResName, snapName, snapRestoreConf
func (_m *ResourceProvider) RestoreSnapshot(ctx context.Context, origResName, snapName string, snapRestoreConf client.SnapshotRestore) error {
ret := _m.Called(ctx, origResName, snapName, snapRestoreConf)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, client.SnapshotRestore) error); ok {
r0 = rf(ctx, origResName, snapName, snapRestoreConf)
} else {
r0 = ret.Error(0)
}
return r0
}
// RestoreVolumeDefinitionSnapshot provides a mock function with given fields: ctx, origResName, snapName, snapRestoreConf
func (_m *ResourceProvider) RestoreVolumeDefinitionSnapshot(ctx context.Context, origResName, snapName string, snapRestoreConf client.SnapshotRestore) error {
ret := _m.Called(ctx, origResName, snapName, snapRestoreConf)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string, client.SnapshotRestore) error); ok {
r0 = rf(ctx, origResName, snapName, snapRestoreConf)
} else {
r0 = ret.Error(0)
}
return r0
}
// RollbackSnapshot provides a mock function with given fields: ctx, resName, snapName
func (_m *ResourceProvider) RollbackSnapshot(ctx context.Context, resName, snapName string) error {
ret := _m.Called(ctx, resName, snapName)
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, string, string) error); ok {
r0 = rf(ctx, resName, snapName)
} else {
r0 = ret.Error(0)
}
return r0
}
|
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, string, string, ...*client.ListOpts) error); ok {
r1 = rf(ctx, resName, nodeName, opts...)
} else {
|
items.module.ts
|
import { Module } from '@nestjs/common';
|
@Module({
imports: [MongooseModule.forFeature([{ name: 'Item', schema: ItemSchema }])],
controllers: [ItemsController],
providers: [ItemsService],
})
export class ItemsModule {}
|
import { MongooseModule } from '@nestjs/mongoose';
import { ItemSchema } from './schemas/item.schema';
import { ItemsController } from './items.controller';
import { ItemsService } from './items.service';
|
emu-main.js
|
window.EJS_main = function(_0xa88a13, _0x17edbf, _0x2c1832) {
'use strict';
_0x2c1832.r(_0x17edbf);
_0x2c1832(0xa2), _0x2c1832(0x16c), _0x2c1832(0x16d);
var _0x39ca5e = {
'volume': 0.5,
'muted': !0x1,
'i18n': {
'restart': 'Restart',
'play': 'Play',
'pause': 'Pause',
'played': 'Played',
'volume': 'Volume',
'mute': 'Mute (F9)',
'unmute': 'Unmute (F9)',
'enterFullscreen': 'Enter fullscreen',
'exitFullscreen': 'Exit fullscreen',
'settings': 'Settings',
'saveState': 'Save State (Shift + F2)',
'loadState': 'Load State (Shift + F4)',
'screenRecord': 'Start Screen Recording',
'netplay': 'Netplay',
'gamepad': 'Control Settings',
'cheat': 'Cheats',
'menuBack': 'Go back to previous menu',
'normal': 'Normal',
'all': 'All',
'reset': 'Reset',
'disabled': 'Disabled',
'enabled': 'Enabled',
'playNow': 'Play Now'
},
'listeners': {
'play': null,
'pause': null,
'restart': null,
'rewind': null,
'mute': null,
'volume': null,
'fullscreen': null
},
'events': ['ready'],
'selectors': {
'editable': 'input, textarea, select, [contenteditable]',
'container': '.ejs',
'controls': {
'container': null,
'wrapper': '.jes__controls'
},
'buttons': {
'play': '[data-btn="play"]',
'pause': '[data-btn="pause"]',
'mute': '[data-btn="mute"]',
'fullscreen': '[data-btn="fullscreen"]',
'settings': '[data-btn="settings"]',
'saveState': '[data-btn="save-state"]',
'loadState': '[data-btn="load-state"]',
'screenRecord': '[data-btn="screen-record"]',
'gamepad': '[data-btn="gamepad"]',
'netplay': '[data-btn="netplay"]',
'cheat': '[data-btn="cheat"]'
},
'inputs': {
'volume': '[data-range="volume"]'
}
},
'classNames': {
'type': 'ejs--video',
'video': 'ejs__video-wrapper',
'poster': 'ejs__poster',
'posterEnabled': 'ejs__poster-enabled',
'control': 'ejs__control',
'controlPressed': 'ejs__control--pressed',
'playing': 'ejs--playing',
'paused': 'ejs--paused',
'stopped': 'ejs--stopped',
'loading': 'ejs--loading',
'hover': 'ejs--hover',
'tooltip': 'ejs__tooltip',
'hidden': 'ejs__sr-only',
'hideControls': 'ejs--hide-controls',
'isIos': 'ejs--is-ios',
'isTouch': 'ejs--is-touch',
'uiSupported': 'ejs--full-ui',
'noTransition': 'ejs--no-transition',
'menu': {
'value': 'ejs__menu__value',
'badge': 'ejs__badge',
'open': 'ejs--menu-open'
},
'fullscreen': {
'enabled': 'ejs--fullscreen-enabled',
'fallback': 'ejs--fullscreen-fallback'
},
'tabFocus': 'ejs__tab-focus'
}
},
_0x3d61f9 = _0x2c1832(0x38),
_0x5127f4 = _0x2c1832(0x9d),
_0x48e5ff = _0x2c1832.n(_0x5127f4),
_0x406e79 = function(_0x2aa74f) {
return null != _0x2aa74f ? _0x2aa74f.constructor : null;
},
_0x1b0c2c = function(_0x8ec039, _0x1cb5ff) {
return Boolean(_0x8ec039 && _0x1cb5ff && _0x8ec039 instanceof _0x1cb5ff);
},
_0x19f739 = function(_0x3e8e1f) {
return null == _0x3e8e1f;
},
_0x4fc5a1 = function(_0x37a418) {
return _0x406e79(_0x37a418) === Object;
},
_0x34f3e8 = function(_0x2d2e5c) {
return _0x406e79(_0x2d2e5c) === String;
},
_0x1ca546 = function(_0x4d698f) {
return Array.isArray(_0x4d698f);
},
_0x37cc52 = function(_0x3a8b14) {
return _0x1b0c2c(_0x3a8b14, NodeList);
},
_0x555ee8 = function(_0x1fde53) {
return _0x19f739(_0x1fde53) || (_0x34f3e8(_0x1fde53) || _0x1ca546(_0x1fde53) || _0x37cc52(_0x1fde53)) && !_0x1fde53.length || _0x4fc5a1(_0x1fde53) && !Object.keys(_0x1fde53).length;
},
_0x1e2c68 = {
'nullOrUndefined': _0x19f739,
'object': _0x4fc5a1,
'number': function(_0x1d08d4) {
return _0x406e79(_0x1d08d4) === Number && !Number.isNaN(_0x1d08d4);
},
'string': _0x34f3e8,
'boolean': function(_0x340d71) {
return _0x406e79(_0x340d71) === Boolean;
},
'function': function(_0x10f562) {
return _0x406e79(_0x10f562) === Function;
},
'array': _0x1ca546,
'weakMap': function(_0x70b79f) {
return _0x1b0c2c(_0x70b79f, WeakMap);
},
'nodeList': _0x37cc52,
'element': function(_0xc21062) {
return _0x1b0c2c(_0xc21062, Element);
},
'textNode': function(_0x3e6b67) {
return _0x406e79(_0x3e6b67) === Text;
},
'event': function(_0x247601) {
return _0x1b0c2c(_0x247601, Event);
},
'keyboardEvent': function(_0x2b3224) {
return _0x1b0c2c(_0x2b3224, KeyboardEvent);
},
'cue': function(_0x57ee67) {
return _0x1b0c2c(_0x57ee67, window.TextTrackCue) || _0x1b0c2c(_0x57ee67, window.VTTCue);
},
'track': function(_0x23318b) {
return _0x1b0c2c(_0x23318b, TextTrack) || !_0x19f739(_0x23318b) && _0x34f3e8(_0x23318b.kind);
},
'url': function(_0xd61cf4) {
if (_0x1b0c2c(_0xd61cf4, window.URL)) return true;
var _0x17edbf = _0xd61cf4;
_0xd61cf4.startsWith('http://') && _0xd61cf4.startsWith('https://') || (_0x17edbf = 'http://' .concat(_0xd61cf4));
try {
return !_0x555ee8(new URL(_0x17edbf).hostname);
} catch (_0x5e6cd2) {
return !0x1;
}
},
'empty': _0x555ee8
},
_0x168698 = function() {
var _0xa88a13 = !0x1;
try {
var _0x17edbf = Object.defineProperty({}, 'passive', {
'get': function() {
return _0xa88a13 = true, null;
}
});
window.addEventListener('test', null, _0x17edbf), window.removeEventListener('test', null, _0x17edbf);
} catch (_0x1db3b5) {}
return _0xa88a13;
}();
function _0x1ef215(_0xa482e6, _0x474854, _0xc30d6e) {
var _0x57056f = this,
_0x3f468e = arguments.length > 0x3 && void 0x0 !== arguments[0x3] && arguments[0x3],
_0x79ce58 = !(arguments.length > 0x4 && void 0x0 !== arguments[0x4]) || arguments[0x4],
_0x23848b = arguments.length > 0x5 && void 0x0 !== arguments[0x5] && arguments[0x5];
if (_0xa482e6 && 'addEventListener' in _0xa482e6 && !_0x1e2c68.empty(_0x474854) && _0x1e2c68.function(_0xc30d6e)) {
var _0x4a2da0 = _0x474854.split(' '),
_0x40de8d = _0x23848b;
_0x168698 && (_0x40de8d = {
'passive': _0x79ce58,
'capture': _0x23848b
}), _0x4a2da0.forEach(function(_0x487d3c) {
_0x57056f && _0x57056f.eventListeners && _0x3f468e && _0x57056f.eventListeners.push({
'element': _0xa482e6,
'type': _0x487d3c,
'callback': _0xc30d6e,
'options': _0x40de8d
}), _0xa482e6[_0x3f468e ? 'addEventListener' : 'removeEventListener'](_0x487d3c, _0xc30d6e, _0x40de8d);
});
}
}
function _0x1093f4(_0x4d8d94) {
var _0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : '',
_0x2c1832 = arguments.length > 0x2 ? arguments[0x2] : void 0x0,
_0x4adcdf = !(arguments.length > 0x3 && void 0x0 !== arguments[0x3]) || arguments[0x3],
_0x2f85bc = arguments.length > 0x4 && void 0x0 !== arguments[0x4] && arguments[0x4];
_0x1ef215.call(this, _0x4d8d94, _0x17edbf, _0x2c1832, true, _0x4adcdf, _0x2f85bc);
}
function _0x20109b(_0x550b1a) {
var _0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : '',
_0x2c1832 = arguments.length > 0x2 ? arguments[0x2] : void 0x0,
_0x5e10a2 = !(arguments.length > 0x3 && void 0x0 !== arguments[0x3]) || arguments[0x3],
_0x5c9512 = arguments.length > 0x4 && void 0x0 !== arguments[0x4] && arguments[0x4];
_0x1ef215.call(this, _0x550b1a, _0x17edbf, _0x2c1832, !0x1, _0x5e10a2, _0x5c9512);
}
function _0x455c85(_0x4e4b15) {
var _0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : '',
_0x2c1832 = arguments.length > 0x2 ? arguments[0x2] : void 0x0,
_0x2b8c91 = !(arguments.length > 0x3 && void 0x0 !== arguments[0x3]) || arguments[0x3],
_0x23e991 = arguments.length > 0x4 && void 0x0 !== arguments[0x4] && arguments[0x4];
_0x1ef215.call(this, _0x4e4b15, _0x17edbf, function _0x5127f4() {
_0x20109b(_0x4e4b15, _0x17edbf, _0x5127f4, _0x2b8c91, _0x23e991);
for (var _0x1fe440 = arguments.length, _0x1d3219 = new Array(_0x1fe440), _0x54457c = 0x0; _0x54457c < _0x1fe440; _0x54457c++) _0x1d3219[_0x54457c] = arguments[_0x54457c];
_0x2c1832.apply(this, _0x1d3219);
}, true, _0x2b8c91, _0x23e991);
}
function _0xbae705(_0x975ccc) {
var _0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : '',
_0x2c1832 = arguments.length > 0x2 && void 0x0 !== arguments[0x2] && arguments[0x2],
_0x57297b = arguments.length > 0x3 && void 0x0 !== arguments[0x3] ? arguments[0x3] : {};
if (_0x1e2c68.element(_0x975ccc) && !_0x1e2c68.empty(_0x17edbf)) {
var _0x4f631e = new CustomEvent(_0x17edbf, {
'bubbles': _0x2c1832,
'detail': Object.assign({}, _0x57297b, {
'emulator': this
})
});
_0x975ccc.dispatchEvent(_0x4f631e);
}
}
var _0x55349e = _0x2c1832(0x9e),
_0x42870c = _0x2c1832.n(_0x55349e);
function _0x30f85e(_0x3ed035, _0xd96db3) {
return function(_0x18bbf3) {
if (Array.isArray(_0x18bbf3)) return _0x18bbf3;
}(_0x3ed035) || function(_0x2bb2ab, _0x4993ee) {
var _0x2c1832 = [],
_0x629f39 = true,
_0xde5653 = !0x1,
_0x346a0d = void 0x0;
try {
for (var _0x57d79d, _0x557ef0 = _0x2bb2ab[Symbol.iterator](); !(_0x629f39 = (_0x57d79d = _0x557ef0.next()).done) && (_0x2c1832.push(_0x57d79d.value), !_0x4993ee || _0x2c1832.length !== _0x4993ee); _0x629f39 = true);
} catch (_0x54a546) {
_0xde5653 = true, _0x346a0d = _0x54a546;
} finally {
try {
_0x629f39 || null == _0x557ef0.return || _0x557ef0.return();
} finally {
if (_0xde5653) throw _0x346a0d;
}
}
return _0x2c1832;
}(_0x3ed035, _0xd96db3) || function() {
throw new TypeError('Invalid attempt to destructure non-iterable instance');
}();
}
function _0x580edd(_0x39eb34, _0x86c3db) {
var _0x2c1832 = _0x39eb34.length ? _0x39eb34 : [_0x39eb34];
Array.from(_0x2c1832).reverse().forEach(function(_0x35fc48, _0x79e6bf) {
var _0x1510e4 = _0x79e6bf > 0x0 ? _0x86c3db.cloneNode(true) : _0x86c3db,
_0x247f26 = _0x35fc48.parentNode,
_0x3a5422 = _0x35fc48.nextSibling;
_0x1510e4.appendChild(_0x35fc48), _0x3a5422 ? _0x247f26.insertBefore(_0x1510e4, _0x3a5422) : _0x247f26.appendChild(_0x1510e4);
});
}
function _0x154f99(_0x5e4eb3, _0x1acdad) {
_0x1e2c68.element(_0x5e4eb3) && !_0x1e2c68.empty(_0x1acdad) && Object.entries(_0x1acdad).filter(function(_0x37e04b) {
var _0x1acdad = _0x30f85e(_0x37e04b, 0x2)[0x1];
return !_0x1e2c68.nullOrUndefined(_0x1acdad);
}).forEach(function(_0x4993dd) {
var _0x2c1832 = _0x30f85e(_0x4993dd, 0x2),
_0x24704f = _0x2c1832[0x0],
_0x52ac21 = _0x2c1832[0x1];
return _0x5e4eb3.setAttribute(_0x24704f, _0x52ac21);
});
}
function _0x428003(_0x1397c4, _0x1ec8c9, _0x1a02af) {
var _0xca6ad9 = document.createElement(_0x1397c4);
return _0x1e2c68.object(_0x1ec8c9) && _0x154f99(_0xca6ad9, _0x1ec8c9), _0x1e2c68.string(_0x1a02af) && (_0xca6ad9.innerText = _0x1a02af), _0xca6ad9;
}
function _0x12a55d(_0x27d9d8) {
_0x1e2c68.nodeList(_0x27d9d8) || _0x1e2c68.array(_0x27d9d8) ? Array.from(_0x27d9d8).forEach(_0x12a55d) : _0x1e2c68.element(_0x27d9d8) && _0x1e2c68.element(_0x27d9d8.parentNode) && _0x27d9d8.parentNode.removeChild(_0x27d9d8);
}
function _0xa949a8(_0x3bc809, _0x215e7f) {
if (!_0x1e2c68.string(_0x3bc809) || _0x1e2c68.empty(_0x3bc809)) return {};
var _0x2c1832 = {},
_0x3ab896 = _0x215e7f;
return _0x3bc809.split(',').forEach(function(_0x2b6c5b) {
var _0x215e7f = _0x2b6c5b.trim(),
_0x2b2f76 = _0x215e7f.replace('.', ''),
_0x499377 = _0x215e7f.replace(/[[\]]/g, '').split('='),
_0x1db3b8 = _0x499377[0x0],
_0x520bcf = _0x499377.length > 0x1 ? _0x499377[0x1].replace(/["']/g, '') : '';
switch (_0x215e7f.charAt(0x0)) {
case '.':
_0x1e2c68.object(_0x3ab896) && _0x1e2c68.string(_0x3ab896.class) && (_0x3ab896.class += ' ' .concat(_0x2b2f76)), _0x2c1832.class = _0x2b2f76;
break;
case '#':
_0x2c1832.id = _0x215e7f.replace('#', '');
break;
case '[':
_0x2c1832[_0x1db3b8] = _0x520bcf;
}
}), _0x2c1832;
}
function _0x132da7(_0x275729, _0x511d6f) {
if (_0x1e2c68.element(_0x275729)) {
var _0x2c1832 = _0x511d6f;
_0x1e2c68.boolean(_0x2c1832) || (_0x2c1832 = !_0x275729.hidden), _0x2c1832 ? _0x275729.setAttribute('hidden', '') : _0x275729.removeAttribute('hidden');
}
}
function _0x3a8e2f(_0x2bf197, _0x46e2bb, _0x3b6b0b) {
if (_0x1e2c68.nodeList(_0x2bf197)) return Array.from(_0x2bf197).map(function(_0x543362) {
return _0x3a8e2f(_0x543362, _0x46e2bb, _0x3b6b0b);
});
if (_0x1e2c68.element(_0x2bf197)) {
var _0x34f25e = 'toggle';
return void 0x0 !== _0x3b6b0b && (_0x34f25e = _0x3b6b0b ? 'add' : 'remove'), _0x2bf197.classList[_0x34f25e](_0x46e2bb), _0x2bf197.classList.contains(_0x46e2bb);
}
return !0x1;
}
function _0x350d73(_0x273517, _0x36ffc8) {
return _0x1e2c68.element(_0x273517) && _0x273517.classList.contains(_0x36ffc8);
}
function _0x13f491(_0x11bf98, _0x674fa2) {
var _0x2c1832 = {
'Element': Element
};
return (_0x2c1832.matches || _0x2c1832.webkitMatchesSelector || _0x2c1832.mozMatchesSelector || _0x2c1832.msMatchesSelector || function() {
return Array.from(document.querySelectorAll(_0x674fa2)).includes(this);
}).call(_0x11bf98, _0x674fa2);
}
function _0x23ffa1(_0x16eec8) {
return this.elements.container.querySelectorAll(_0x16eec8);
}
function _0x530042(_0xbc0da2) {
return this.elements.container.querySelector(_0xbc0da2);
}
function _0x5e0c7d() {
var _0xa88a13 = arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : null,
_0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] && arguments[0x1];
if (_0x1e2c68.element(_0xa88a13)) {
var _0x2c1832 = _0x23ffa1.call(this, 'button:not(:disabled), input:not(:disabled), [tabindex]'),
_0x1c6631 = _0x2c1832[0x0],
_0x52c9df = _0x2c1832[_0x2c1832.length - 0x1];
_0x1ef215.call(this, this.elements.container, 'keydown', function(_0x1bba02) {
if ('Tab' === _0x1bba02.key && 0x9 === _0x1bba02.keyCode) {
var _0x17edbf = document.activeElement;
_0x17edbf !== _0x52c9df || _0x1bba02.shiftKey ? _0x17edbf === _0x1c6631 && _0x1bba02.shiftKey && (_0x52c9df.focus(), _0x1bba02.preventDefault()) : (_0x1c6631.focus(), _0x1bba02.preventDefault());
}
}, _0x17edbf, !0x1);
}
}
function _0x31cc23() {
var _0xa88a13 = arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : null,
_0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] && arguments[0x1];
_0x1e2c68.element(_0xa88a13) && (_0xa88a13.focus(), _0x17edbf && _0x3a8e2f(_0xa88a13, this.config.classNames.tabFocus));
}
function _0x449eac(_0x589e98) {
return _0x48e5ff.a.bind(_0x42870c.a)(_0x589e98);
}
var _0x32d193, _0x5f365a, _0x820caf, _0x5a2767 = (_0x32d193 = document.createElement('span'), _0x5f365a = {
'WebkitTransition': 'webkitTransitionEnd',
'MozTransition': 'transitionend',
'OTransition': 'oTransitionEnd otransitionend',
'transition': 'transitionend'
}, _0x820caf = Object.keys(_0x5f365a).find(function(_0x5bdb45) {
return void 0x0 !== _0x32d193.style[_0x5bdb45];
}), !!_0x1e2c68.string(_0x820caf) && _0x5f365a[_0x820caf]);
function _0x2b30e0(_0x1fb76b) {
setTimeout(function() {
try {
_0x132da7(_0x1fb76b, true), _0x1fb76b.offsetHeight, _0x132da7(_0x1fb76b, !0x1);
} catch (_0x517747) {}
}, 0x0);
}
var _0x296fa9, _0x59aa33 = {
'isEdge': 'Netscape' === navigator.appName && navigator.appVersion.indexOf('Edge') > -0x1,
'isIE': !!document.documentMode,
'isWebkit': 'WebkitAppearance' in document.documentElement.style && !/Edge/ .test(navigator.userAgent),
'isIPhone': /(iPhone|iPod)/gi .test(navigator.platform),
'isIos': /(iPad|iPhone|iPod)/gi .test(navigator.platform),
'info': function() {
var _0xa88a13 = /(MSIE|(?!Gecko.+)Firefox|(?!AppleWebKit.+Chrome.+)Safari|(?!AppleWebKit.+)Chrome|AppleWebKit(?!.+Chrome|.+Safari)|Gecko(?!.+Firefox))(?: |\/)([\d\.apre]+)/ .exec(navigator.userAgent);
return {
'name': _0xa88a13[0x1].toLowerCase(),
'version': _0xa88a13[0x2]
};
}
},
_0x2d904a = {
'rangeInput': (_0x296fa9 = document.createElement('input'), _0x296fa9.type = 'range', 'range' === _0x296fa9.type),
'touch': 'ontouchstart' in document.documentElement,
'transitions': !0x1 !== _0x5a2767,
'reducedMotion': 'matchMedia' in window && window.matchMedia('(prefers-reduced-motion)').matches,
'webgl': function() {
var _0xa88a13 = {
'DETECTED': !0x1
};
if (!_0xa88a13.DETECTED) {
var _0x17edbf = document.createElement('canvas');
if (_0x17edbf && _0x17edbf.getContext)
for (var _0x2c1832 = ['webgl2', 'experimental-webgl2', 'webgl', 'experimental-webgl'], _0x44cd27 = 0x0, _0x12b7b8 = _0x2c1832.length; _0x44cd27 < _0x12b7b8; ++_0x44cd27) {
var _0x11cb40 = _0x2c1832[_0x44cd27],
_0x26b3b2 = _0x17edbf.getContext(_0x11cb40);
if (_0x26b3b2) {
_0xa88a13.WEBGL_CONTEXT = _0x11cb40, _0xa88a13.WEBGL_VERSION = _0x26b3b2.getParameter(_0x26b3b2.VERSION), _0xa88a13.WEBGL_VENDOR = _0x26b3b2.getParameter(_0x26b3b2.VENDOR), _0xa88a13.WEBGL_SL_VERSION = _0x26b3b2.getParameter(_0x26b3b2.SHADING_LANGUAGE_VERSION), _0xa88a13.MAX_TEXTURE_SIZE = _0x26b3b2.getParameter(_0x26b3b2.MAX_TEXTURE_SIZE);
var _0x40c8fc = _0x26b3b2.getExtension('WEBGL_debug_renderer_info');
_0x40c8fc && (_0xa88a13.WEBGL_VENDOR = _0x26b3b2.getParameter(_0x40c8fc.UNMASKED_VENDOR_WEBGL), _0xa88a13.WEBGL_RENDERER = _0x26b3b2.getParameter(_0x40c8fc.UNMASKED_RENDERER_WEBGL)), _0xa88a13.DETECTED = true;
break;
}
}
}
return _0xa88a13;
}(),
'wasm': 'undefined' != typeof WebAssembly && _0x1e2c68.object(WebAssembly),
'audioContext': 'undefined' != typeof AudioContext
};
function _0x55cbbe(_0x43ca9d, _0x1fbf80, _0x332d13) {
return _0x1fbf80 in _0x43ca9d ? Object.defineProperty(_0x43ca9d, _0x1fbf80, {
'value': _0x332d13,
'enumerable': true,
'configurable': true,
'writable': true
}) : _0x43ca9d[_0x1fbf80] = _0x332d13, _0x43ca9d;
}
function _0x344e81(_0xa095b0, _0x3d8795) {
return _0x3d8795.split('.').reduce(function(_0x1a1fcd, _0x5112cd) {
return _0x1a1fcd && _0x1a1fcd[_0x5112cd];
}, _0xa095b0);
}
function _0x5dc0c0() {
for (var _0xa88a13 = arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : {}, _0x17edbf = arguments.length, _0x2c1832 = new Array(_0x17edbf > 0x1 ? _0x17edbf - 0x1 : 0x0), _0x189546 = 0x1; _0x189546 < _0x17edbf; _0x189546++) _0x2c1832[_0x189546 - 0x1] = arguments[_0x189546];
if (!_0x2c1832.length) return _0xa88a13;
var _0x279e54 = _0x2c1832.shift();
return _0x1e2c68.object(_0x279e54) ? (Object.keys(_0x279e54).forEach(function(_0x53b45a) {
_0x1e2c68.object(_0x279e54[_0x53b45a]) ? (Object.keys(_0xa88a13).includes(_0x53b45a) || Object.assign(_0xa88a13, _0x55cbbe({}, _0x53b45a, {})), _0x5dc0c0(_0xa88a13[_0x53b45a], _0x279e54[_0x53b45a])) : Object.assign(_0xa88a13, _0x55cbbe({}, _0x53b45a, _0x279e54[_0x53b45a]));
}), _0x5dc0c0.apply(void 0x0, [_0xa88a13].concat(_0x2c1832))) : _0xa88a13;
}
function _0x1a0e98() {
var _0xa88a13 = arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : '',
_0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : '',
_0x2c1832 = arguments.length > 0x2 && void 0x0 !== arguments[0x2] ? arguments[0x2] : '';
return _0xa88a13.replace(new RegExp(_0x17edbf.toString().replace(/([.*+?^=!:${}()|[\]\/\\])/g, '\x5c$1'), 'g'), _0x2c1832.toString());
}
function _0x9fdcea() {
var _0xa88a13 = (arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : '').toString();
return (_0xa88a13 = function() {
var _0xa88a13 = (arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : '').toString();
return _0xa88a13 = _0x1a0e98(_0xa88a13, '-', ' '), _0xa88a13 = _0x1a0e98(_0xa88a13, '_', ' '), _0x1a0e98(_0xa88a13 = function() {
return (arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : '').toString().replace(/\w\S*/g, function(_0x5b5cba) {
return _0x5b5cba.charAt(0x0).toUpperCase() + _0x5b5cba.substr(0x1).toLowerCase();
});
}(_0xa88a13), ' ', '');
}(_0xa88a13)).charAt(0x0).toLowerCase() + _0xa88a13.slice(0x1);
}
function _0x419e30(_0x3614ce, _0x5196ee) {
return function(_0x38f65b) {
if (Array.isArray(_0x38f65b)) return _0x38f65b;
}(_0x3614ce) || function(_0x20a132, _0x481bd0) {
var _0x2c1832 = [],
_0x930e75 = true,
_0x40863e = !0x1,
_0xf7a5cf = void 0x0;
try {
for (var _0x9d4c5a, _0x2f59ed = _0x20a132[Symbol.iterator](); !(_0x930e75 = (_0x9d4c5a = _0x2f59ed.next()).done) && (_0x2c1832.push(_0x9d4c5a.value), !_0x481bd0 || _0x2c1832.length !== _0x481bd0); _0x930e75 = true);
} catch (_0x383181) {
_0x40863e = true, _0xf7a5cf = _0x383181;
} finally {
try {
_0x930e75 || null == _0x2f59ed.return || _0x2f59ed.return();
} finally {
if (_0x40863e) throw _0xf7a5cf;
}
}
return _0x2c1832;
}(_0x3614ce, _0x5196ee) || function() {
throw new TypeError('Invalid attempt to destructure non-iterable instance');
}();
}
var _0xb9b2ff = {
'get': function() {
var _0xa88a13 = arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : '',
_0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : {};
if (_0x1e2c68.empty(_0xa88a13) || _0x1e2c68.empty(_0x17edbf)) return '';
var _0x2c1832 = _0x344e81(_0x17edbf.i18n, _0xa88a13);
if (_0x1e2c68.empty(_0x2c1832)) return '';
var _0x54bf45 = {
'{seektime}': _0x17edbf.seekTime,
'{title}': _0x17edbf.title
};
return Object.entries(_0x54bf45).forEach(function(_0x34ee43) {
var _0x17edbf = _0x419e30(_0x34ee43, 0x2),
_0x473ebb = _0x17edbf[0x0],
_0x56ec0 = _0x17edbf[0x1];
_0x2c1832 = _0x1a0e98(_0x2c1832, _0x473ebb, _0x56ec0);
}), _0x2c1832;
}
};
function _0x4c97b0(_0xb3d5fd, _0x2cda5d) {
for (var _0x2c1832 = 0x0; _0x2c1832 < _0x2cda5d.length; _0x2c1832++) {
var _0x3f3944 = _0x2cda5d[_0x2c1832];
_0x3f3944.enumerable = _0x3f3944.enumerable || !0x1, _0x3f3944.configurable = true, 'value' in _0x3f3944 && (_0x3f3944.writable = true), Object.defineProperty(_0xb3d5fd, _0x3f3944.key, _0x3f3944);
}
}
var _0x2f61ba = function() {
function _0x566dbe(_0x3a7165, _0x3d2e3a) {
! function(_0x48c5c0, _0x2b027d) {
if (!(_0x48c5c0 instanceof _0x2b027d)) throw new TypeError('Cannot call a class as a function');
}(this, _0x566dbe), this.enabled = true, this.key = _0x3d2e3a;
}
var _0x17edbf, _0x2c1832, _0x4496fc;
return _0x17edbf = _0x566dbe, _0x4496fc = [{
'key': 'supported',
'get': function() {
try {
return 'localStorage' in window && (window.localStorage.getItem('___test') || window.localStorage.setItem('___test', '___test'), true);
} catch (_0x42cafd) {
return !0x1;
}
}
}], (_0x2c1832 = [{
'key': 'get',
'value': function(_0x4ed865) {
if (!_0x566dbe.supported || !this.enabled) return null;
var _0x2c1832 = window.localStorage.getItem(this.key);
if (_0x1e2c68.empty(_0x2c1832)) return null;
var _0x57ebf7 = JSON.parse(_0x2c1832);
return _0x1e2c68.string(_0x4ed865) && _0x4ed865.length ? _0x57ebf7[_0x4ed865] : _0x57ebf7;
}
}, {
'key': 'set',
'value': function(_0x443e7a) {
if (_0x566dbe.supported && this.enabled && _0x1e2c68.object(_0x443e7a)) {
var _0x2c1832 = this.get();
_0x1e2c68.empty(_0x2c1832) && (_0x2c1832 = {}), _0x5dc0c0(_0x2c1832, _0x443e7a), window.localStorage.setItem(this.key, JSON.stringify(_0x2c1832));
}
}
}]) && _0x4c97b0(_0x17edbf.prototype, _0x2c1832), _0x4496fc && _0x4c97b0(_0x17edbf, _0x4496fc), _0x566dbe;
}();
var _0x13fb79, _0x569918 = _0x2c1832(0x6c),
_0x4e4ca6 = _0x2c1832.n(_0x569918),
_0x4ad1c6 = _0x2c1832(0x6),
_0x4704b1 = _0x2c1832(0x9f),
_0x3a58c8 = _0x2c1832.n(_0x4704b1),
_0x5032e6 = {
'2xScaleHQ.glslp': 'shaders = 1\n\nshader0 = "2xScaleHQ.glsl"\nfilter_linear0 = false\nscale_type_0 = source\n',
'4xScaleHQ.glslp': 'shaders = 1\n\nshader0 = "4xScaleHQ.glsl"\nfilter_linear0 = false\nscale_type_0 = source\n',
'crt-easymode.glslp': 'shaders = 1\n\nshader0 = crt-easymode.glsl\nfilter_linear0 = false\nscale_type_0 = source\n',
'crt-aperture.glslp': 'shaders = 1\n\nshader0 = crt-aperture.glsl\nfilter_linear0 = false\n',
'crt-geom.glslp': 'shaders = 1\n\nshader0 = crt-geom.glsl\nfilter_linear0 = false\nscale_type_0 = source\n',
'2xScaleHQ.glsl': '/*\n2xGLSLHqFilter shader\n\nCopyright (C) 2005 guest(r) - [email protected]\n\nThis program is free software; you can redistribute it and/or\nmodify it under the terms of the GNU General Public License\nas published by the Free Software Foundation; either version 2\nof the License, or (at your option) any later version.\n\nThis program is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU General Public License for more details.\n\nYou should have received a copy of the GNU General Public License\nalong with this program; if not, write to the Free Software\nFoundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.\n*/\n\n#if defined(VERTEX)\n\n#if __VERSION__ >= 130\n#define COMPAT_VARYING out\n#define COMPAT_ATTRIBUTE in\n#define COMPAT_TEXTURE texture\n#else\n#define COMPAT_VARYING varying \n#define COMPAT_ATTRIBUTE attribute \n#define COMPAT_TEXTURE texture2D\n#endif\n\n#ifdef GL_ES\n#define COMPAT_PRECISION mediump\n#else\n#define COMPAT_PRECISION\n#endif\n\nCOMPAT_ATTRIBUTE vec4 VertexCoord;\nCOMPAT_ATTRIBUTE vec4 COLOR;\nCOMPAT_ATTRIBUTE vec4 TexCoord;\nCOMPAT_VARYING vec4 COL0;\nCOMPAT_VARYING vec4 TEX0;\nCOMPAT_VARYING vec4 t1;\nCOMPAT_VARYING vec4 t2;\nCOMPAT_VARYING vec4 t3;\nCOMPAT_VARYING vec4 t4;\n\nvec4 _oPosition1; \nuniform mat4 MVPMatrix;\nuniform COMPAT_PRECISION int FrameDirection;\nuniform COMPAT_PRECISION int FrameCount;\nuniform COMPAT_PRECISION vec2 OutputSize;\nuniform COMPAT_PRECISION vec2 TextureSize;\nuniform COMPAT_PRECISION vec2 InputSize;\n\n// compatibility #defines\n#define vTexCoord TEX0.xy\n#define SourceSize vec4(TextureSize, 1.0 / TextureSize) //either TextureSize or InputSize\n#define OutSize vec4(OutputSize, 1.0 / OutputSize)\n\nvoid main()\n{\ngl_Position = MVPMatrix * VertexCoord;\nTEX0.xy = TexCoord.xy;\nfloat x = 0.5 * SourceSize.z;\nfloat y = 0.5 * SourceSize.w;\nvec2 dg1 = vec2( x, y);\nvec2 dg2 = vec2(-x, y);\nvec2 dx = vec2(x, 0.0);\nvec2 dy = vec2(0.0, y);\nt1 = vec4(vTexCoord - dg1, vTexCoord - dy);\nt2 = vec4(vTexCoord - dg2, vTexCoord + dx);\nt3 = vec4(vTexCoord + dg1, vTexCoord + dy);\nt4 = vec4(vTexCoord + dg2, vTexCoord - dx);\n}\n\n#elif defined(FRAGMENT)\n\n#if __VERSION__ >= 130\n#define COMPAT_VARYING in\n#define COMPAT_TEXTURE texture\nout vec4 FragColor;\n#else\n#define COMPAT_VARYING varying\n#define FragColor gl_FragColor\n#define COMPAT_TEXTURE texture2D\n#endif\n\n#ifdef GL_ES\n#ifdef GL_FRAGMENT_PRECISION_HIGH\nprecision highp float;\n#else\nprecision mediump float;\n#endif\n#define COMPAT_PRECISION mediump\n#else\n#define COMPAT_PRECISION\n#endif\n\nuniform COMPAT_PRECISION int FrameDirection;\nuniform COMPAT_PRECISION int FrameCount;\nuniform COMPAT_PRECISION vec2 OutputSize;\nuniform COMPAT_PRECISION vec2 TextureSize;\nuniform COMPAT_PRECISION vec2 InputSize;\nuniform sampler2D Texture;\nCOMPAT_VARYING vec4 TEX0;\nCOMPAT_VARYING vec4 t1;\nCOMPAT_VARYING vec4 t2;\nCOMPAT_VARYING vec4 t3;\nCOMPAT_VARYING vec4 t4;\n\n// compatibility #defines\n#define Source Texture\n#define vTexCoord TEX0.xy\n\n#define SourceSize vec4(TextureSize, 1.0 / TextureSize) //either TextureSize or InputSize\n#define OutSize vec4(OutputSize, 1.0 / OutputSize)\n\nfloat mx = 0.325; // start smoothing wt.\nfloat k = -0.250; // wt. decrease factor\nfloat max_w = 0.25; // max filter weight\nfloat min_w =-0.05; // min filter weight\nfloat lum_add = 0.25; // affects smoothing\nvec3 dt = vec3(1.0);\n\nvoid main()\n{\nvec3 c00 = COMPAT_TEXTURE(Source, t1.xy).xyz; \nvec3 c10 = COMPAT_TEXTURE(Source, t1.zw).xyz; \nvec3 c20 = COMPAT_TEXTURE(Source, t2.xy).xyz; \nvec3 c01 = COMPAT_TEXTURE(Source, t4.zw).xyz; \nvec3 c11 = COMPAT_TEXTURE(Source, vTexCoord).xyz; \nvec3 c21 = COMPAT_TEXTURE(Source, t2.zw).xyz; \nvec3 c02 = COMPAT_TEXTURE(Source, t4.xy).xyz; \nvec3 c12 = COMPAT_TEXTURE(Source, t3.zw).xyz; \nvec3 c22 = COMPAT_TEXTURE(Source, t3.xy).xyz; \n\nfloat md1 = dot(abs(c00 - c22), dt);\nfloat md2 = dot(abs(c02 - c20), dt);\n\nfloat w1 = dot(abs(c22 - c11), dt) * md2;\nfloat w2 = dot(abs(c02 - c11), dt) * md1;\nfloat w3 = dot(abs(c00 - c11), dt) * md2;\nfloat w4 = dot(abs(c20 - c11), dt) * md1;\n\nfloat t1 = w1 + w3;\nfloat t2 = w2 + w4;\nfloat ww = max(t1, t2) + 0.0001;\n\nc11 = (w1 * c00 + w2 * c20 + w3 * c22 + w4 * c02 + ww * c11) / (t1 + t2 + ww);\n\nfloat lc1 = k / (0.12 * dot(c10 + c12 + c11, dt) + lum_add);\nfloat lc2 = k / (0.12 * dot(c01 + c21 + c11, dt) + lum_add);\n\nw1 = clamp(lc1 * dot(abs(c11 - c10), dt) + mx, min_w, max_w);\nw2 = clamp(lc2 * dot(abs(c11 - c21), dt) + mx, min_w, max_w);\nw3 = clamp(lc1 * dot(abs(c11 - c12), dt) + mx, min_w, max_w);\nw4 = clamp(lc2 * dot(abs(c11 - c01), dt) + mx, min_w, max_w);\nFragColor = vec4(w1 * c10 + w2 * c21 + w3 * c12 + w4 * c01 + (1.0 - w1 - w2 - w3 - w4) * c11, 1.0);\n} \n#endif\n',
'4xScaleHQ.glsl': '/*\n4xGLSLHqFilter shader\n\nCopyright (C) 2005 guest(r) - [email protected]\n\nThis program is free software; you can redistribute it and/or\nmodify it under the terms of the GNU General Public License\nas published by the Free Software Foundation; either version 2\nof the License, or (at your option) any later version.\n\nThis program is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU General Public License for more details.\n\nYou should have received a copy of the GNU General Public License\nalong with this program; if not, write to the Free Software\nFoundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.\n*/\n\n#if defined(VERTEX)\n\n#if __VERSION__ >= 130\n#define COMPAT_VARYING out\n#define COMPAT_ATTRIBUTE in\n#define COMPAT_TEXTURE texture\n#else\n#define COMPAT_VARYING varying \n#define COMPAT_ATTRIBUTE attribute \n#define COMPAT_TEXTURE texture2D\n#endif\n\n#ifdef GL_ES\n#define COMPAT_PRECISION mediump\n#else\n#define COMPAT_PRECISION\n#endif\n\nCOMPAT_ATTRIBUTE vec4 VertexCoord;\nCOMPAT_ATTRIBUTE vec4 COLOR;\nCOMPAT_ATTRIBUTE vec4 TexCoord;\nCOMPAT_VARYING vec4 COL0;\nCOMPAT_VARYING vec4 TEX0;\nCOMPAT_VARYING vec4 t1;\nCOMPAT_VARYING vec4 t2;\nCOMPAT_VARYING vec4 t3;\nCOMPAT_VARYING vec4 t4;\nCOMPAT_VARYING vec4 t5;\nCOMPAT_VARYING vec4 t6;\n\nvec4 _oPosition1; \nuniform mat4 MVPMatrix;\nuniform COMPAT_PRECISION int FrameDirection;\nuniform COMPAT_PRECISION int FrameCount;\nuniform COMPAT_PRECISION vec2 OutputSize;\nuniform COMPAT_PRECISION vec2 TextureSize;\nuniform COMPAT_PRECISION vec2 InputSize;\n\n// compatibility #defines\n#define vTexCoord TEX0.xy\n#define SourceSize vec4(TextureSize, 1.0 / TextureSize) //either TextureSize or InputSize\n#define OutSize vec4(OutputSize, 1.0 / OutputSize)\n\nvoid main()\n{\ngl_Position = MVPMatrix * VertexCoord;\nTEX0.xy = TexCoord.xy;\nfloat x = 0.5 * SourceSize.z;\nfloat y = 0.5 * SourceSize.w;\nvec2 dg1 = vec2( x, y);\nvec2 dg2 = vec2(-x, y);\nvec2 sd1 = dg1 * 0.5;\nvec2 sd2 = dg2 * 0.5;\nvec2 ddx = vec2(x, 0.0);\nvec2 ddy = vec2(0.0, y);\nt1 = vec4(vTexCoord - sd1, vTexCoord - ddy);\nt2 = vec4(vTexCoord - sd2, vTexCoord + ddx);\nt3 = vec4(vTexCoord + sd1, vTexCoord + ddy);\nt4 = vec4(vTexCoord + sd2, vTexCoord - ddx);\nt5 = vec4(vTexCoord - dg1, vTexCoord - dg2);\nt6 = vec4(vTexCoord + dg1, vTexCoord + dg2);\n}\n\n#elif defined(FRAGMENT)\n\n#if __VERSION__ >= 130\n#define COMPAT_VARYING in\n#define COMPAT_TEXTURE texture\nout vec4 FragColor;\n#else\n#define COMPAT_VARYING varying\n#define FragColor gl_FragColor\n#define COMPAT_TEXTURE texture2D\n#endif\n\n#ifdef GL_ES\n#ifdef GL_FRAGMENT_PRECISION_HIGH\nprecision highp float;\n#else\nprecision mediump float;\n#endif\n#define COMPAT_PRECISION mediump\n#else\n#define COMPAT_PRECISION\n#endif\n\nuniform COMPAT_PRECISION int FrameDirection;\nuniform COMPAT_PRECISION int FrameCount;\nuniform COMPAT_PRECISION vec2 OutputSize;\nuniform COMPAT_PRECISION vec2 TextureSize;\nuniform COMPAT_PRECISION vec2 InputSize;\nuniform sampler2D Texture;\nCOMPAT_VARYING vec4 TEX0;\nCOMPAT_VARYING vec4 t1;\nCOMPAT_VARYING vec4 t2;\nCOMPAT_VARYING vec4 t3;\nCOMPAT_VARYING vec4 t4;\nCOMPAT_VARYING vec4 t5;\nCOMPAT_VARYING vec4 t6;\n\n// compatibility #defines\n#define Source Texture\n#define vTexCoord TEX0.xy\n\n#define SourceSize vec4(TextureSize, 1.0 / TextureSize) //either TextureSize or InputSize\n#define OutSize vec4(OutputSize, 1.0 / OutputSize)\n\nfloat mx = 1.0; // start smoothing wt.\nfloat k = -1.10; // wt. decrease factor\nfloat max_w = 0.75; // max filter weight\nfloat min_w = 0.03; // min filter weight\nfloat lum_add = 0.33; // affects smoothing\nvec3 dt = vec3(1.0);\n\nvoid main()\n{\nvec3 c = COMPAT_TEXTURE(Source, vTexCoord).xyz;\nvec3 i1 = COMPAT_TEXTURE(Source, t1.xy).xyz; \nvec3 i2 = COMPAT_TEXTURE(Source, t2.xy).xyz; \nvec3 i3 = COMPAT_TEXTURE(Source, t3.xy).xyz; \nvec3 i4 = COMPAT_TEXTURE(Source, t4.xy).xyz; \nvec3 o1 = COMPAT_TEXTURE(Source, t5.xy).xyz; \nvec3 o3 = COMPAT_TEXTURE(Source, t6.xy).xyz; \nvec3 o2 = COMPAT_TEXTURE(Source, t5.zw).xyz;\nvec3 o4 = COMPAT_TEXTURE(Source, t6.zw).xyz;\nvec3 s1 = COMPAT_TEXTURE(Source, t1.zw).xyz; \nvec3 s2 = COMPAT_TEXTURE(Source, t2.zw).xyz; \nvec3 s3 = COMPAT_TEXTURE(Source, t3.zw).xyz; \nvec3 s4 = COMPAT_TEXTURE(Source, t4.zw).xyz; \n\nfloat ko1=dot(abs(o1-c),dt);\nfloat ko2=dot(abs(o2-c),dt);\nfloat ko3=dot(abs(o3-c),dt);\nfloat ko4=dot(abs(o4-c),dt);\n\nfloat k1=min(dot(abs(i1-i3),dt),max(ko1,ko3));\nfloat k2=min(dot(abs(i2-i4),dt),max(ko2,ko4));\n\nfloat w1 = k2; if(ko3<ko1) w1*=ko3/ko1;\nfloat w2 = k1; if(ko4<ko2) w2*=ko4/ko2;\nfloat w3 = k2; if(ko1<ko3) w3*=ko1/ko3;\nfloat w4 = k1; if(ko2<ko4) w4*=ko2/ko4;\n\nc=(w1*o1+w2*o2+w3*o3+w4*o4+0.001*c)/(w1+w2+w3+w4+0.001);\nw1 = k*dot(abs(i1-c)+abs(i3-c),dt)/(0.125*dot(i1+i3,dt)+lum_add);\nw2 = k*dot(abs(i2-c)+abs(i4-c),dt)/(0.125*dot(i2+i4,dt)+lum_add);\nw3 = k*dot(abs(s1-c)+abs(s3-c),dt)/(0.125*dot(s1+s3,dt)+lum_add);\nw4 = k*dot(abs(s2-c)+abs(s4-c),dt)/(0.125*dot(s2+s4,dt)+lum_add);\n\nw1 = clamp(w1+mx,min_w,max_w); \nw2 = clamp(w2+mx,min_w,max_w);\nw3 = clamp(w3+mx,min_w,max_w); \nw4 = clamp(w4+mx,min_w,max_w);\n\nFragColor = vec4((w1*(i1+i3)+w2*(i2+i4)+w3*(s1+s3)+w4*(s2+s4)+c)/(2.0*(w1+w2+w3+w4)+1.0), 1.0);\n} \n#endif\n',
'crt-easymode.glsl': '#if defined(VERTEX)\n\n #if __VERSION__ >= 130\n #define COMPAT_VARYING out\n #define COMPAT_ATTRIBUTE in\n #define COMPAT_TEXTURE texture\n #else\n #define COMPAT_VARYING varying\n #define COMPAT_ATTRIBUTE attribute\n #define COMPAT_TEXTURE texture2D\n #endif\n \n #ifdef GL_ES\n #define COMPAT_PRECISION mediump\n #else\n #define COMPAT_PRECISION\n #endif\n COMPAT_VARYING float _frame_rotation;\n struct input_dummy {\n vec2 _video_size;\n vec2 _texture_size;\n vec2 _output_dummy_size;\n float _frame_count;\n float _frame_direction;\n float _frame_rotation;\n };\n vec4 _oPosition1;\n vec4 _r0005;\n COMPAT_ATTRIBUTE vec4 VertexCoord;\n COMPAT_ATTRIBUTE vec4 TexCoord;\n COMPAT_VARYING vec4 TEX0;\n \n uniform mat4 MVPMatrix;\n uniform int FrameDirection;\n uniform int FrameCount;\n uniform COMPAT_PRECISION vec2 OutputSize;\n uniform COMPAT_PRECISION vec2 TextureSize;\n uniform COMPAT_PRECISION vec2 InputSize;\n void main()\n {\n vec2 _oTex;\n _r0005 = VertexCoord.x*MVPMatrix[0];\n _r0005 = _r0005 + VertexCoord.y*MVPMatrix[1];\n _r0005 = _r0005 + VertexCoord.z*MVPMatrix[2];\n _r0005 = _r0005 + VertexCoord.w*MVPMatrix[3];\n _oPosition1 = _r0005;\n _oTex = TexCoord.xy;\n gl_Position = _r0005;\n TEX0.xy = TexCoord.xy;\n }\n #elif defined(FRAGMENT)\n \n #if __VERSION__ >= 130\n #define COMPAT_VARYING in\n #define COMPAT_TEXTURE texture\n out vec4 FragColor;\n #else\n #define COMPAT_VARYING varying\n #define FragColor gl_FragColor\n #define COMPAT_TEXTURE texture2D\n #endif\n \n #ifdef GL_ES\n #ifdef GL_FRAGMENT_PRECISION_HIGH\n precision highp float;\n #else\n precision mediump float;\n #endif\n #define COMPAT_PRECISION mediump\n #else\n #define COMPAT_PRECISION\n #endif\n COMPAT_VARYING float _frame_rotation;\n struct input_dummy {\n vec2 _video_size;\n vec2 _texture_size;\n vec2 _output_dummy_size;\n float _frame_count;\n float _frame_direction;\n float _frame_rotation;\n };\n vec4 _ret_0;\n float _TMP30;\n float _TMP29;\n float _TMP28;\n float _TMP13;\n float _TMP32;\n float _TMP11;\n float _TMP10;\n float _TMP31;\n float _TMP9;\n float _TMP8;\n float _TMP15;\n float _TMP14;\n float _TMP33;\n vec4 _TMP34;\n vec4 _TMP27;\n vec4 _TMP25;\n vec4 _TMP23;\n vec4 _TMP21;\n vec4 _TMP26;\n vec4 _TMP24;\n vec4 _TMP22;\n vec4 _TMP20;\n float _TMP4;\n vec4 _TMP3;\n vec4 _TMP2;\n float _TMP19;\n float _TMP18;\n float _TMP17;\n float _TMP16;\n vec4 _TMP1;\n vec2 _TMP0;\n uniform sampler2D Texture;\n input_dummy _IN1;\n float _TMP43;\n float _x_step0044;\n float _curve0044;\n float _a0048;\n float _val0052;\n float _a0052;\n vec4 _TMP57;\n vec4 _x0072;\n vec2 _c0086;\n vec4 _x0088;\n vec4 _x0094;\n vec2 _c0098;\n vec4 _x0100;\n vec2 _c0104;\n vec4 _x0106;\n vec4 _sample_min0110;\n vec4 _sample_max0110;\n vec4 _r0112;\n vec4 _TMP117;\n vec2 _co0124;\n vec2 _c0126;\n vec4 _x0128;\n vec4 _x0134;\n vec2 _c0138;\n vec4 _x0140;\n vec2 _c0144;\n vec4 _x0146;\n vec4 _sample_min0150;\n vec4 _sample_max0150;\n vec4 _r0152;\n vec4 _TMP157;\n float _TMP163;\n float _x_step0164;\n float _curve0164;\n float _a0168;\n float _val0172;\n float _a0172;\n float _TMP183;\n float _TMP189;\n float _x0190;\n float _a0196;\n float _x0198;\n vec2 _x0200;\n float _x0208;\n COMPAT_VARYING vec4 TEX0;\n \n uniform COMPAT_PRECISION vec2 OutputSize;\n uniform COMPAT_PRECISION vec2 TextureSize;\n uniform COMPAT_PRECISION vec2 InputSize;\n void main()\n {\n vec2 _dx1;\n vec2 _dy;\n vec2 _pix_co;\n vec2 _tex_co;\n vec2 _dist;\n vec3 _col2;\n vec3 _col21;\n vec4 _coeffs1;\n float _luma;\n float _bright;\n float _scan_weight;\n vec2 _mod_fac;\n int _dot_no;\n vec3 _mask_weight;\n vec3 _TMP37;\n _dx1 = vec2(1.00000000E+00/TextureSize.x, 0.00000000E+00);\n _dy = vec2(0.00000000E+00, 1.00000000E+00/TextureSize.y);\n _pix_co = TEX0.xy*TextureSize - vec2( 5.00000000E-01, 5.00000000E-01);\n _TMP0 = floor(_pix_co);\n _tex_co = (_TMP0 + vec2( 5.00000000E-01, 5.00000000E-01))/TextureSize;\n _dist = fract(_pix_co);\n _x_step0044 = float((_dist.x >= 5.00000000E-01));\n _a0048 = 2.50000000E-01 - (_dist.x - _x_step0044)*(_dist.x - _x_step0044);\n _TMP33 = inversesqrt(_a0048);\n _TMP14 = 1.00000000E+00/_TMP33;\n _a0052 = 5.00000000E-01 - _dist.x;\n _val0052 = float((_a0052 > 0.00000000E+00));\n _TMP15 = _val0052 - float((_a0052 < 0.00000000E+00));\n _curve0044 = 5.00000000E-01 - _TMP14*_TMP15;\n _TMP43 = _dist.x + 2.50000000E-01*(_curve0044 - _dist.x);\n _coeffs1 = 3.14159274E+00*vec4(1.00000000E+00 + _TMP43, _TMP43, 1.00000000E+00 - _TMP43, 2.00000000E+00 - _TMP43);\n _TMP1 = abs(_coeffs1);\n _TMP57 = max(_TMP1, vec4( 9.99999975E-06, 9.99999975E-06, 9.99999975E-06, 9.99999975E-06));\n _TMP16 = sin(_TMP57.x);\n _TMP17 = sin(_TMP57.y);\n _TMP18 = sin(_TMP57.z);\n _TMP19 = sin(_TMP57.w);\n _TMP2 = vec4(_TMP16, _TMP17, _TMP18, _TMP19);\n _x0072 = _TMP57/2.00000000E+00;\n _TMP16 = sin(_x0072.x);\n _TMP17 = sin(_x0072.y);\n _TMP18 = sin(_x0072.z);\n _TMP19 = sin(_x0072.w);\n _TMP3 = vec4(_TMP16, _TMP17, _TMP18, _TMP19);\n _coeffs1 = ((2.00000000E+00*_TMP2)*_TMP3)/(_TMP57*_TMP57);\n _TMP4 = dot(_coeffs1, vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _coeffs1 = _coeffs1/_TMP4;\n _c0086 = _tex_co - _dx1;\n _TMP20 = COMPAT_TEXTURE(Texture, _c0086);\n _x0088 = vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00) + (_TMP20 - vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _TMP21 = _TMP20*_x0088;\n _TMP22 = COMPAT_TEXTURE(Texture, _tex_co);\n _x0094 = vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00) + (_TMP22 - vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _TMP23 = _TMP22*_x0094;\n _c0098 = _tex_co + _dx1;\n _TMP24 = COMPAT_TEXTURE(Texture, _c0098);\n _x0100 = vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00) + (_TMP24 - vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _TMP25 = _TMP24*_x0100;\n _c0104 = _tex_co + 2.00000000E+00*_dx1;\n _TMP26 = COMPAT_TEXTURE(Texture, _c0104);\n _x0106 = vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00) + (_TMP26 - vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _TMP27 = _TMP26*_x0106;\n _r0112 = _coeffs1.x*_TMP21;\n _r0112 = _r0112 + _coeffs1.y*_TMP23;\n _r0112 = _r0112 + _coeffs1.z*_TMP25;\n _r0112 = _r0112 + _coeffs1.w*_TMP27;\n _sample_min0110 = min(_TMP23, _TMP25);\n _sample_max0110 = max(_TMP23, _TMP25);\n _TMP34 = min(_sample_max0110, _r0112);\n _TMP117 = max(_sample_min0110, _TMP34);\n _co0124 = _tex_co + _dy;\n _c0126 = _co0124 - _dx1;\n _TMP20 = COMPAT_TEXTURE(Texture, _c0126);\n _x0128 = vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00) + (_TMP20 - vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _TMP21 = _TMP20*_x0128;\n _TMP22 = COMPAT_TEXTURE(Texture, _co0124);\n _x0134 = vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00) + (_TMP22 - vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _TMP23 = _TMP22*_x0134;\n _c0138 = _co0124 + _dx1;\n _TMP24 = COMPAT_TEXTURE(Texture, _c0138);\n _x0140 = vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00) + (_TMP24 - vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _TMP25 = _TMP24*_x0140;\n _c0144 = _co0124 + 2.00000000E+00*_dx1;\n _TMP26 = COMPAT_TEXTURE(Texture, _c0144);\n _x0146 = vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00) + (_TMP26 - vec4( 1.00000000E+00, 1.00000000E+00, 1.00000000E+00, 1.00000000E+00));\n _TMP27 = _TMP26*_x0146;\n _r0152 = _coeffs1.x*_TMP21;\n _r0152 = _r0152 + _coeffs1.y*_TMP23;\n _r0152 = _r0152 + _coeffs1.z*_TMP25;\n _r0152 = _r0152 + _coeffs1.w*_TMP27;\n _sample_min0150 = min(_TMP23, _TMP25);\n _sample_max0150 = max(_TMP23, _TMP25);\n _TMP34 = min(_sample_max0150, _r0152);\n _TMP157 = max(_sample_min0150, _TMP34);\n _x_step0164 = float((_dist.y >= 5.00000000E-01));\n _a0168 = 2.50000000E-01 - (_dist.y - _x_step0164)*(_dist.y - _x_step0164);\n _TMP33 = inversesqrt(_a0168);\n _TMP14 = 1.00000000E+00/_TMP33;\n _a0172 = 5.00000000E-01 - _dist.y;\n _val0172 = float((_a0172 > 0.00000000E+00));\n _TMP15 = _val0172 - float((_a0172 < 0.00000000E+00));\n _curve0164 = 5.00000000E-01 - _TMP14*_TMP15;\n _TMP163 = _dist.y + (_curve0164 - _dist.y);\n _col2 = _TMP117.xyz + _TMP163*(_TMP157.xyz - _TMP117.xyz);\n _luma = dot(vec3( 2.12599993E-01, 7.15200007E-01, 7.22000003E-02), _col2);\n _TMP8 = max(_col2.y, _col2.z);\n _TMP9 = max(_col2.x, _TMP8);\n _bright = (_TMP9 + _luma)/2.00000000E+00;\n _TMP31 = min(6.49999976E-01, _bright);\n _TMP183 = max(3.49999994E-01, _TMP31);\n _x0190 = _bright*1.50000000E+00;\n _TMP31 = min(1.50000000E+00, _x0190);\n _TMP189 = max(1.50000000E+00, _TMP31);\n _a0196 = TEX0.y*2.00000000E+00*3.14159274E+00*TextureSize.y;\n _TMP10 = cos(_a0196);\n _x0198 = _TMP10*5.00000000E-01 + 5.00000000E-01;\n _TMP11 = pow(_x0198, _TMP189);\n _scan_weight = 1.00000000E+00 - _TMP11;\n _x0200 = (TEX0.xy*OutputSize*TextureSize)/InputSize;\n _mod_fac = floor(_x0200);\n _x0208 = _mod_fac.x/3.00000000E+00;\n _TMP32 = floor(_x0208);\n _TMP13 = _mod_fac.x - 3.00000000E+00*_TMP32;\n _dot_no = int(_TMP13);\n if (_dot_no == 0) {\n _mask_weight = vec3( 1.00000000E+00, 6.99999988E-01, 6.99999988E-01);\n } else {\n if (_dot_no == 1) {\n _mask_weight = vec3( 6.99999988E-01, 1.00000000E+00, 6.99999988E-01);\n } else {\n _mask_weight = vec3( 6.99999988E-01, 6.99999988E-01, 1.00000000E+00);\n }\n }\n if (InputSize.y >= 4.00000000E+02) {\n _scan_weight = 1.00000000E+00;\n }\n _col21 = _col2.xyz;\n _col2 = _col2*vec3(_scan_weight, _scan_weight, _scan_weight);\n _col2 = _col2 + _TMP183*(_col21 - _col2);\n _col2 = _col2*_mask_weight;\n _TMP28 = pow(_col2.x, 5.55555582E-01);\n _TMP29 = pow(_col2.y, 5.55555582E-01);\n _TMP30 = pow(_col2.z, 5.55555582E-01);\n _col2 = vec3(_TMP28, _TMP29, _TMP30);\n _TMP37 = _col2*1.20000005E+00;\n _ret_0 = vec4(_TMP37.x, _TMP37.y, _TMP37.z, 1.00000000E+00);\n FragColor = _ret_0;\n return;\n }\n #endif\n',
'crt-aperture.glsl': '\n/*\nCRT Shader by EasyMode\nLicense: GPL\n*/\n/*\n#pragma parameter SHARPNESS_IMAGE "Sharpness Image" 1.0 1.0 5.0 1.0\n#pragma parameter SHARPNESS_EDGES "Sharpness Edges" 3.0 1.0 5.0 1.0\n#pragma parameter GLOW_WIDTH "Glow Width" 0.5 0.05 0.65 0.05\n#pragma parameter GLOW_HEIGHT "Glow Height" 0.5 0.05 0.65 0.05\n#pragma parameter GLOW_HALATION "Glow Halation" 0.1 0.0 1.0 0.01\n#pragma parameter GLOW_DIFFUSION "Glow Diffusion" 0.05 0.0 1.0 0.01\n#pragma parameter MASK_COLORS "Mask Colors" 2.0 2.0 3.0 1.0\n#pragma parameter MASK_STRENGTH "Mask Strength" 0.3 0.0 1.0 0.05\n#pragma parameter MASK_SIZE "Mask Size" 1.0 1.0 9.0 1.0\n#pragma parameter SCANLINE_SIZE_MIN "Scanline Size Min." 0.5 0.5 1.5 0.05\n#pragma parameter SCANLINE_SIZE_MAX "Scanline Size Max." 1.5 0.5 1.5 0.05\n#pragma parameter GAMMA_INPUT "Gamma Input" 2.4 1.0 5.0 0.1\n#pragma parameter GAMMA_OUTPUT "Gamma Output" 2.4 1.0 5.0 0.1\n#pragma parameter BRIGHTNESS "Brightness" 1.5 0.0 2.0 0.05\n* */\n\n#define Coord TEX0\n\n#if defined(VERTEX)\n\n#if __VERSION__ >= 130\n#define OUT out\n#define IN in\n#define tex2D texture\n#else\n#define OUT varying \n#define IN attribute \n#define tex2D texture2D\n#endif\n\n#ifdef GL_ES\n#define PRECISION mediump\n#else\n#define PRECISION\n#endif\n\nIN vec4 VertexCoord;\nIN vec4 Color;\nIN vec2 TexCoord;\nOUT vec4 color;\nOUT vec2 Coord;\n\nuniform mat4 MVPMatrix;\nuniform PRECISION int FrameDirection;\nuniform PRECISION int FrameCount;\nuniform PRECISION vec2 OutputSize;\nuniform PRECISION vec2 TextureSize;\nuniform PRECISION vec2 InputSize;\n\nvoid main()\n{\ngl_Position = MVPMatrix * VertexCoord;\ncolor = Color;\nCoord = TexCoord;\n}\n\n#elif defined(FRAGMENT)\n\n#if __VERSION__ >= 130\n#define IN in\n#define tex2D texture\nout vec4 FragColor;\n#else\n#define IN varying\n#define FragColor gl_FragColor\n#define tex2D texture2D\n#endif\n\n#ifdef GL_ES\n#ifdef GL_FRAGMENT_PRECISION_HIGH\nprecision highp float;\n#else\nprecision mediump float;\n#endif\n#define PRECISION mediump\n#else\n#define PRECISION\n#endif\n\nuniform PRECISION int FrameDirection;\nuniform PRECISION int FrameCount;\nuniform PRECISION vec2 OutputSize;\nuniform PRECISION vec2 TextureSize;\nuniform PRECISION vec2 InputSize;\nuniform sampler2D Texture;\nIN vec2 Coord;\n\n#ifdef PARAMETER_UNIFORM\nuniform PRECISION float SHARPNESS_IMAGE;\nuniform PRECISION float SHARPNESS_EDGES;\nuniform PRECISION float GLOW_WIDTH;\nuniform PRECISION float GLOW_HEIGHT;\nuniform PRECISION float GLOW_HALATION;\nuniform PRECISION float GLOW_DIFFUSION;\nuniform PRECISION float MASK_COLORS;\nuniform PRECISION float MASK_STRENGTH;\nuniform PRECISION float MASK_SIZE;\nuniform PRECISION float SCANLINE_SIZE_MIN;\nuniform PRECISION float SCANLINE_SIZE_MAX;\nuniform PRECISION float GAMMA_INPUT;\nuniform PRECISION float GAMMA_OUTPUT;\nuniform PRECISION float BRIGHTNESS;\n#else\n#define SHARPNESS_IMAGE 1.0\n#define SHARPNESS_EDGES 3.0\n#define GLOW_WIDTH 0.5\n#define GLOW_HEIGHT 0.5\n#define GLOW_HALATION 0.1\n#define GLOW_DIFFUSION 0.05\n#define MASK_COLORS 2.0\n#define MASK_STRENGTH 0.3\n#define MASK_SIZE 1.0\n#define SCANLINE_SIZE_MIN 0.5\n#define SCANLINE_SIZE_MAX 1.5\n#define GAMMA_INPUT 2.4\n#define GAMMA_OUTPUT 2.4\n#define BRIGHTNESS 1.5\n#endif\n\n#define FIX(c) max(abs(c), 1e-5)\n#define PI 3.141592653589\n#define saturate(c) clamp(c, 0.0, 1.0)\n#define TEX2D(c) pow(tex2D(tex, c).rgb, vec3(GAMMA_INPUT))\n\nmat3 get_color_matrix(sampler2D tex, vec2 co, vec2 dx)\n{\nreturn mat3(TEX2D(co - dx), TEX2D(co), TEX2D(co + dx));\n}\n\nvec3 blur(mat3 m, float dist, float rad)\n{\nvec3 x = vec3(dist - 1.0, dist, dist + 1.0) / rad;\nvec3 w = exp2(x * x * -1.0);\n\nreturn (m[0] * w.x + m[1] * w.y + m[2] * w.z) / (w.x + w.y + w.z);\n}\n\nvec3 filter_gaussian(sampler2D tex, vec2 co, vec2 tex_size)\n{\nvec2 dx = vec2(1.0 / tex_size.x, 0.0);\nvec2 dy = vec2(0.0, 1.0 / tex_size.y);\nvec2 pix_co = co * tex_size;\nvec2 tex_co = (floor(pix_co) + 0.5) / tex_size;\nvec2 dist = (fract(pix_co) - 0.5) * -1.0;\n\nmat3 line0 = get_color_matrix(tex, tex_co - dy, dx);\nmat3 line1 = get_color_matrix(tex, tex_co, dx);\nmat3 line2 = get_color_matrix(tex, tex_co + dy, dx);\nmat3 column = mat3(blur(line0, dist.x, GLOW_WIDTH),\n blur(line1, dist.x, GLOW_WIDTH),\n blur(line2, dist.x, GLOW_WIDTH));\n\nreturn blur(column, dist.y, GLOW_HEIGHT);\n}\n\nvec3 filter_lanczos(sampler2D tex, vec2 co, vec2 tex_size, float sharp)\n{\ntex_size.x *= sharp;\n\nvec2 dx = vec2(1.0 / tex_size.x, 0.0);\nvec2 pix_co = co * tex_size - vec2(0.5, 0.0);\nvec2 tex_co = (floor(pix_co) + vec2(0.5, 0.0)) / tex_size;\nvec2 dist = fract(pix_co);\nvec4 coef = PI * vec4(dist.x + 1.0, dist.x, dist.x - 1.0, dist.x - 2.0);\n\ncoef = FIX(coef);\ncoef = 2.0 * sin(coef) * sin(coef / 2.0) / (coef * coef);\ncoef /= dot(coef, vec4(1.0));\n\nvec4 col1 = vec4(TEX2D(tex_co), 1.0);\nvec4 col2 = vec4(TEX2D(tex_co + dx), 1.0);\n\nreturn (mat4(col1, col1, col2, col2) * coef).rgb;\n}\n\nvec3 get_scanline_weight(float x, vec3 col)\n{\nvec3 beam = mix(vec3(SCANLINE_SIZE_MIN), vec3(SCANLINE_SIZE_MAX), col);\nvec3 x_mul = 2.0 / beam;\nvec3 x_offset = x_mul * 0.5;\n\nreturn smoothstep(0.0, 1.0, 1.0 - abs(x * x_mul - x_offset)) * x_offset;\n}\n\nvec3 get_mask_weight(float x)\n{\nfloat i = mod(floor(x * OutputSize.x * TextureSize.x / (InputSize.x * MASK_SIZE)), MASK_COLORS);\n\nif (i == 0.0) return mix(vec3(1.0, 0.0, 1.0), vec3(1.0, 0.0, 0.0), MASK_COLORS - 2.0);\nelse if (i == 1.0) return vec3(0.0, 1.0, 0.0);\nelse return vec3(0.0, 0.0, 1.0);\n}\n\nvoid main()\n{\nvec3 col_glow = filter_gaussian(Texture, Coord, TextureSize);\nvec3 col_soft = filter_lanczos(Texture, Coord, TextureSize, SHARPNESS_IMAGE);\nvec3 col_sharp = filter_lanczos(Texture, Coord, TextureSize, SHARPNESS_EDGES);\nvec3 col = sqrt(col_sharp * col_soft);\n\ncol *= get_scanline_weight(fract(Coord.y * TextureSize.y), col_soft);\ncol_glow = saturate(col_glow - col);\ncol += col_glow * col_glow * GLOW_HALATION;\ncol = mix(col, col * get_mask_weight(Coord.x) * MASK_COLORS, MASK_STRENGTH);\ncol += col_glow * GLOW_DIFFUSION;\ncol = pow(col * BRIGHTNESS, vec3(1.0 / GAMMA_OUTPUT));\n\nFragColor = vec4(col, 1.0);\n}\n\n#endif\n',
'crt-geom.glsl': '\n/*\nCRT-interlaced\n\nCopyright (C) 2010-2012 cgwg, Themaister and DOLLS\n\nThis program is free software; you can redistribute it and/or modify it\nunder the terms of the GNU General Public License as published by the Free\nSoftware Foundation; either version 2 of the License, or (at your option)\nany later version.\n\n(cgwg gave their consent to have the original version of this shader\ndistributed under the GPL in this message:\n\nhttp://board.byuu.org/viewtopic.php?p=26075#p26075\n\n"Feel free to distribute my shaders under the GPL. After all, the\nbarrel distortion code was taken from the Curvature shader, which is\nunder the GPL."\n)\nThis shader variant is pre-configured with screen curvature\n*/\n/*\n#pragma parameter CRTgamma "CRTGeom Target Gamma" 2.4 0.1 5.0 0.1\n#pragma parameter monitorgamma "CRTGeom Monitor Gamma" 2.2 0.1 5.0 0.1\n#pragma parameter d "CRTGeom Distance" 1.6 0.1 3.0 0.1\n#pragma parameter CURVATURE "CRTGeom Curvature Toggle" 1.0 0.0 1.0 1.0\n#pragma parameter R "CRTGeom Curvature Radius" 2.0 0.1 10.0 0.1\n#pragma parameter cornersize "CRTGeom Corner Size" 0.03 0.001 1.0 0.005\n#pragma parameter cornersmooth "CRTGeom Corner Smoothness" 1000.0 80.0 2000.0 100.0\n#pragma parameter x_tilt "CRTGeom Horizontal Tilt" 0.0 -0.5 0.5 0.05\n#pragma parameter y_tilt "CRTGeom Vertical Tilt" 0.0 -0.5 0.5 0.05\n#pragma parameter overscan_x "CRTGeom Horiz. Overscan %" 100.0 -125.0 125.0 1.0\n#pragma parameter overscan_y "CRTGeom Vert. Overscan %" 100.0 -125.0 125.0 1.0\n#pragma parameter DOTMASK "CRTGeom Dot Mask Toggle" 0.3 0.0 0.3 0.3\n#pragma parameter SHARPER "CRTGeom Sharpness" 1.0 1.0 3.0 1.0\n#pragma parameter scanline_weight "CRTGeom Scanline Weight" 0.3 0.1 0.5 0.05\n*/\n\n#ifndef PARAMETER_UNIFORM\n#define CRTgamma 2.4\n#define monitorgamma 2.2\n#define d 1.6\n#define CURVATURE 1.0\n#define R 2.0\n#define cornersize 0.03\n#define cornersmooth 1000.0\n#define x_tilt 0.0\n#define y_tilt 0.0\n#define overscan_x 100.0\n#define overscan_y 100.0\n#define DOTMASK 0.3\n#define SHARPER 1.0\n#define scanline_weight 0.3\n#endif\n\n#if defined(VERTEX)\n\n#if __VERSION__ >= 130\n#define COMPAT_VARYING out\n#define COMPAT_ATTRIBUTE in\n#define COMPAT_TEXTURE texture\n#else\n#define COMPAT_VARYING varying \n#define COMPAT_ATTRIBUTE attribute \n#define COMPAT_TEXTURE texture2D\n#endif\n\n#ifdef GL_ES\n#define COMPAT_PRECISION mediump\n#else\n#define COMPAT_PRECISION\n#endif\n\nCOMPAT_ATTRIBUTE vec4 VertexCoord;\nCOMPAT_ATTRIBUTE vec4 COLOR;\nCOMPAT_ATTRIBUTE vec4 TexCoord;\nCOMPAT_VARYING vec4 COL0;\nCOMPAT_VARYING vec4 TEX0;\n\nvec4 _oPosition1; \nuniform mat4 MVPMatrix;\nuniform COMPAT_PRECISION int FrameDirection;\nuniform COMPAT_PRECISION int FrameCount;\nuniform COMPAT_PRECISION vec2 OutputSize;\nuniform COMPAT_PRECISION vec2 TextureSize;\nuniform COMPAT_PRECISION vec2 InputSize;\n\nCOMPAT_VARYING vec2 overscan;\nCOMPAT_VARYING vec2 aspect;\nCOMPAT_VARYING vec3 stretch;\nCOMPAT_VARYING vec2 sinangle;\nCOMPAT_VARYING vec2 cosangle;\nCOMPAT_VARYING vec2 one;\nCOMPAT_VARYING float mod_factor;\nCOMPAT_VARYING vec2 ilfac;\n\n#ifdef PARAMETER_UNIFORM\nuniform COMPAT_PRECISION float CRTgamma;\nuniform COMPAT_PRECISION float monitorgamma;\nuniform COMPAT_PRECISION float d;\nuniform COMPAT_PRECISION float CURVATURE;\nuniform COMPAT_PRECISION float R;\nuniform COMPAT_PRECISION float cornersize;\nuniform COMPAT_PRECISION float cornersmooth;\nuniform COMPAT_PRECISION float x_tilt;\nuniform COMPAT_PRECISION float y_tilt;\nuniform COMPAT_PRECISION float overscan_x;\nuniform COMPAT_PRECISION float overscan_y;\nuniform COMPAT_PRECISION float DOTMASK;\nuniform COMPAT_PRECISION float SHARPER;\nuniform COMPAT_PRECISION float scanline_weight;\n#endif\n\n#define FIX(c) max(abs(c), 1e-5);\n\nfloat intersect(vec2 xy)\n{\nfloat A = dot(xy,xy)+d*d;\nfloat B = 2.0*(R*(dot(xy,sinangle)-d*cosangle.x*cosangle.y)-d*d);\nfloat C = d*d + 2.0*R*d*cosangle.x*cosangle.y;\nreturn (-B-sqrt(B*B-4.0*A*C))/(2.0*A);\n}\n\nvec2 bkwtrans(vec2 xy)\n{\nfloat c = intersect(xy);\nvec2 point = vec2(c)*xy;\npoint -= vec2(-R)*sinangle;\npoint /= vec2(R);\nvec2 tang = sinangle/cosangle;\nvec2 poc = point/cosangle;\nfloat A = dot(tang,tang)+1.0;\nfloat B = -2.0*dot(poc,tang);\nfloat C = dot(poc,poc)-1.0;\nfloat a = (-B+sqrt(B*B-4.0*A*C))/(2.0*A);\nvec2 uv = (point-a*sinangle)/cosangle;\nfloat r = R*acos(a);\nreturn uv*r/sin(r/R);\n}\n\nvec2 fwtrans(vec2 uv)\n{\nfloat r = FIX(sqrt(dot(uv,uv)));\nuv *= sin(r/R)/r;\nfloat x = 1.0-cos(r/R);\nfloat D = d/R + x*cosangle.x*cosangle.y+dot(uv,sinangle);\nreturn d*(uv*cosangle-x*sinangle)/D;\n}\n\nvec3 maxscale()\n{\nvec2 c = bkwtrans(-R * sinangle / (1.0 + R/d*cosangle.x*cosangle.y));\nvec2 a = vec2(0.5,0.5)*aspect;\nvec2 lo = vec2(fwtrans(vec2(-a.x,c.y)).x, fwtrans(vec2(c.x,-a.y)).y)/aspect;\nvec2 hi = vec2(fwtrans(vec2(+a.x,c.y)).x, fwtrans(vec2(c.x,+a.y)).y)/aspect;\nreturn vec3((hi+lo)*aspect*0.5,max(hi.x-lo.x,hi.y-lo.y));\n}\n\nvoid main()\n{\n// START of parameters\n\n// gamma of simulated CRT\n// CRTgamma = 1.8;\n// gamma of display monitor (typically 2.2 is correct)\n// monitorgamma = 2.2;\n// overscan (e.g. 1.02 for 2% overscan)\noverscan = vec2(1.00,1.00);\n// aspect ratio\naspect = vec2(1.0, 0.75);\n// lengths are measured in units of (approximately) the width\n// of the monitor simulated distance from viewer to monitor\n// d = 2.0;\n// radius of curvature\n// R = 1.5;\n// tilt angle in radians\n// (behavior might be a bit wrong if both components are\n// nonzero)\nconst vec2 angle = vec2(0.0,0.0);\n// size of curved corners\n// cornersize = 0.03;\n// border smoothness parameter\n// decrease if borders are too aliased\n// cornersmooth = 1000.0;\n\n// END of parameters\n\nvec4 _oColor;\nvec2 _otexCoord;\ngl_Position = VertexCoord.x * MVPMatrix[0] + VertexCoord.y * MVPMatrix[1] + VertexCoord.z * MVPMatrix[2] + VertexCoord.w * MVPMatrix[3];\n_oPosition1 = gl_Position;\n_oColor = COLOR;\n_otexCoord = TexCoord.xy;\nCOL0 = COLOR;\nTEX0.xy = TexCoord.xy;\n\n// Precalculate a bunch of useful values we\'ll need in the fragment\n// shader.\nsinangle = sin(vec2(x_tilt, y_tilt)) + vec2(0.001);//sin(vec2(max(abs(x_tilt), 1e-3), max(abs(y_tilt), 1e-3)));\ncosangle = cos(vec2(x_tilt, y_tilt)) + vec2(0.001);//cos(vec2(max(abs(x_tilt), 1e-3), max(abs(y_tilt), 1e-3)));\nstretch = maxscale();\n\nilfac = vec2(1.0,clamp(floor(InputSize.y/200.0), 1.0, 2.0));\n\n// The size of one texel, in texture-coordinates.\nvec2 sharpTextureSize = vec2(SHARPER * TextureSize.x, TextureSize.y);\none = ilfac / sharpTextureSize;\n\n// Resulting X pixel-coordinate of the pixel we\'re drawing.\nmod_factor = TexCoord.x * TextureSize.x * OutputSize.x / InputSize.x;\n\n}\n\n#elif defined(FRAGMENT)\n\n#if __VERSION__ >= 130\n#define COMPAT_VARYING in\n#define COMPAT_TEXTURE texture\nout vec4 FragColor;\n#else\n#define COMPAT_VARYING varying\n#define FragColor gl_FragColor\n#define COMPAT_TEXTURE texture2D\n#endif\n\n#ifdef GL_ES\n#ifdef GL_FRAGMENT_PRECISION_HIGH\nprecision highp float;\n#else\nprecision mediump float;\n#endif\n#define COMPAT_PRECISION mediump\n#else\n#define COMPAT_PRECISION\n#endif\n\nstruct output_dummy {\nvec4 _color;\n};\n\nuniform COMPAT_PRECISION int FrameDirection;\nuniform COMPAT_PRECISION int FrameCount;\nuniform COMPAT_PRECISION vec2 OutputSize;\nuniform COMPAT_PRECISION vec2 TextureSize;\nuniform COMPAT_PRECISION vec2 InputSize;\nuniform sampler2D Texture;\nCOMPAT_VARYING vec4 TEX0;\n\n// Comment the next line to disable interpolation in linear gamma (and\n// gain speed).\n#define LINEAR_PROCESSING\n\n// Enable screen curvature.\n// #define CURVATURE\n\n// Enable 3x oversampling of the beam profile\n#define OVERSAMPLE\n\n// Use the older, purely gaussian beam profile\n//#define USEGAUSSIAN\n\n// Macros.\n#define FIX(c) max(abs(c), 1e-5);\n#define PI 3.141592653589\n\n#ifdef LINEAR_PROCESSING\n# define TEX2D(c) pow(COMPAT_TEXTURE(Texture, (c)), vec4(CRTgamma))\n#else\n# define TEX2D(c) COMPAT_TEXTURE(Texture, (c))\n#endif\n\nCOMPAT_VARYING vec2 one;\nCOMPAT_VARYING float mod_factor;\nCOMPAT_VARYING vec2 ilfac;\nCOMPAT_VARYING vec2 overscan;\nCOMPAT_VARYING vec2 aspect;\nCOMPAT_VARYING vec3 stretch;\nCOMPAT_VARYING vec2 sinangle;\nCOMPAT_VARYING vec2 cosangle;\n\n#ifdef PARAMETER_UNIFORM\nuniform COMPAT_PRECISION float CRTgamma;\nuniform COMPAT_PRECISION float monitorgamma;\nuniform COMPAT_PRECISION float d;\nuniform COMPAT_PRECISION float CURVATURE;\nuniform COMPAT_PRECISION float R;\nuniform COMPAT_PRECISION float cornersize;\nuniform COMPAT_PRECISION float cornersmooth;\nuniform COMPAT_PRECISION float x_tilt;\nuniform COMPAT_PRECISION float y_tilt;\nuniform COMPAT_PRECISION float overscan_x;\nuniform COMPAT_PRECISION float overscan_y;\nuniform COMPAT_PRECISION float DOTMASK;\nuniform COMPAT_PRECISION float SHARPER;\nuniform COMPAT_PRECISION float scanline_weight;\n#endif\n\nfloat intersect(vec2 xy)\n{\nfloat A = dot(xy,xy)+d*d;\nfloat B = 2.0*(R*(dot(xy,sinangle)-d*cosangle.x*cosangle.y)-d*d);\nfloat C = d*d + 2.0*R*d*cosangle.x*cosangle.y;\nreturn (-B-sqrt(B*B-4.0*A*C))/(2.0*A);\n}\n\nvec2 bkwtrans(vec2 xy)\n{\nfloat c = intersect(xy);\nvec2 point = vec2(c)*xy;\npoint -= vec2(-R)*sinangle;\npoint /= vec2(R);\nvec2 tang = sinangle/cosangle;\nvec2 poc = point/cosangle;\nfloat A = dot(tang,tang)+1.0;\nfloat B = -2.0*dot(poc,tang);\nfloat C = dot(poc,poc)-1.0;\nfloat a = (-B+sqrt(B*B-4.0*A*C))/(2.0*A);\nvec2 uv = (point-a*sinangle)/cosangle;\nfloat r = FIX(R*acos(a));\nreturn uv*r/sin(r/R);\n}\n\nvec2 transform(vec2 coord)\n{\ncoord *= TextureSize / InputSize;\ncoord = (coord-vec2(0.5))*aspect*stretch.z+stretch.xy;\nreturn (bkwtrans(coord)/vec2(overscan_x / 100.0, overscan_y / 100.0)/aspect+vec2(0.5)) * InputSize / TextureSize;\n}\n\nfloat corner(vec2 coord)\n{\ncoord *= TextureSize / InputSize;\ncoord = (coord - vec2(0.5)) * vec2(overscan_x / 100.0, overscan_y / 100.0) + vec2(0.5);\ncoord = min(coord, vec2(1.0)-coord) * aspect;\nvec2 cdist = vec2(cornersize);\ncoord = (cdist - min(coord,cdist));\nfloat dist = sqrt(dot(coord,coord));\nreturn clamp((cdist.x-dist)*cornersmooth,0.0, 1.0);\n}\n\n// Calculate the influence of a scanline on the current pixel.\n//\n// \'distance\' is the distance in texture coordinates from the current\n// pixel to the scanline in question.\n// \'color\' is the colour of the scanline at the horizontal location of\n// the current pixel.\nvec4 scanlineWeights(float distance, vec4 color)\n{\n// "wid" controls the width of the scanline beam, for each RGB\n// channel The "weights" lines basically specify the formula\n// that gives you the profile of the beam, i.e. the intensity as\n// a function of distance from the vertical center of the\n// scanline. In this case, it is gaussian if width=2, and\n// becomes nongaussian for larger widths. Ideally this should\n// be normalized so that the integral across the beam is\n// independent of its width. That is, for a narrower beam\n// "weights" should have a higher peak at the center of the\n// scanline than for a wider beam.\n#ifdef USEGAUSSIAN\nvec4 wid = 0.3 + 0.1 * pow(color, vec4(3.0));\nvec4 weights = vec4(distance / wid);\nreturn 0.4 * exp(-weights * weights) / wid;\n#else\nvec4 wid = 2.0 + 2.0 * pow(color, vec4(4.0));\nvec4 weights = vec4(distance / scanline_weight);\nreturn 1.4 * exp(-pow(weights * inversesqrt(0.5 * wid), wid)) / (0.6 + 0.2 * wid);\n#endif\n}\n\nvoid main()\n{\n// Here\'s a helpful diagram to keep in mind while trying to\n// understand the code:\n//\n// | | | | |\n// -------------------------------\n// | | | | |\n// | 01 | 11 | 21 | 31 | <-- current scanline\n// | | @ | | |\n// -------------------------------\n// | | | | |\n// | 02 | 12 | 22 | 32 | <-- next scanline\n// | | | | |\n// -------------------------------\n// | | | | |\n//\n// Each character-cell represents a pixel on the output\n// surface, "@" represents the current pixel (always somewhere\n// in the bottom half of the current scan-line, or the top-half\n// of the next scanline). The grid of lines represents the\n// edges of the texels of the underlying texture.\n\n// Texture coordinates of the texel containing the active pixel.\nvec2 xy = (CURVATURE > 0.5) ? transform(TEX0.xy) : TEX0.xy;\n\nfloat cval = corner(xy);\n\n// Of all the pixels that are mapped onto the texel we are\n// currently rendering, which pixel are we currently rendering?\nvec2 ilvec = vec2(0.0,ilfac.y > 1.5 ? mod(float(FrameCount),2.0) : 0.0);\nvec2 ratio_scale = (xy * TextureSize - vec2(0.5) + ilvec)/ilfac;\n#ifdef OVERSAMPLE\nfloat filter_ = InputSize.y/OutputSize.y;//fwidth(ratio_scale.y);\n#endif\nvec2 uv_ratio = fract(ratio_scale);\n\n// Snap to the center of the underlying texel.\nxy = (floor(ratio_scale)*ilfac + vec2(0.5) - ilvec) / TextureSize;\n\n// Calculate Lanczos scaling coefficients describing the effect\n// of various neighbour texels in a scanline on the current\n// pixel.\nvec4 coeffs = PI * vec4(1.0 + uv_ratio.x, uv_ratio.x, 1.0 - uv_ratio.x, 2.0 - uv_ratio.x);\n\n// Prevent division by zero.\ncoeffs = FIX(coeffs);\n\n// Lanczos2 kernel.\ncoeffs = 2.0 * sin(coeffs) * sin(coeffs / 2.0) / (coeffs * coeffs);\n\n// Normalize.\ncoeffs /= dot(coeffs, vec4(1.0));\n\n// Calculate the effective colour of the current and next\n// scanlines at the horizontal location of the current pixel,\n// using the Lanczos coefficients above.\nvec4 col = clamp(mat4(\n TEX2D(xy + vec2(-one.x, 0.0)),\n TEX2D(xy),\n TEX2D(xy + vec2(one.x, 0.0)),\n TEX2D(xy + vec2(2.0 * one.x, 0.0))) * coeffs,\n 0.0, 1.0);\nvec4 col2 = clamp(mat4(\n TEX2D(xy + vec2(-one.x, one.y)),\n TEX2D(xy + vec2(0.0, one.y)),\n TEX2D(xy + one),\n TEX2D(xy + vec2(2.0 * one.x, one.y))) * coeffs,\n 0.0, 1.0);\n\n#ifndef LINEAR_PROCESSING\ncol = pow(col , vec4(CRTgamma));\ncol2 = pow(col2, vec4(CRTgamma));\n#endif\n\n// Calculate the influence of the current and next scanlines on\n// the current pixel.\nvec4 weights = scanlineWeights(uv_ratio.y, col);\nvec4 weights2 = scanlineWeights(1.0 - uv_ratio.y, col2);\n#ifdef OVERSAMPLE\nuv_ratio.y =uv_ratio.y+1.0/3.0*filter_;\nweights = (weights+scanlineWeights(uv_ratio.y, col))/3.0;\nweights2=(weights2+scanlineWeights(abs(1.0-uv_ratio.y), col2))/3.0;\nuv_ratio.y =uv_ratio.y-2.0/3.0*filter_;\nweights=weights+scanlineWeights(abs(uv_ratio.y), col)/3.0;\nweights2=weights2+scanlineWeights(abs(1.0-uv_ratio.y), col2)/3.0;\n#endif\n\nvec3 mul_res = (col * weights + col2 * weights2).rgb * vec3(cval);\n\n// dot-mask emulation:\n// Output pixels are alternately tinted green and magenta.\nvec3 dotMaskWeights = mix(\nvec3(1.0, 1.0 - DOTMASK, 1.0),\nvec3(1.0 - DOTMASK, 1.0, 1.0 - DOTMASK),\nfloor(mod(mod_factor, 2.0))\n);\n\nmul_res *= dotMaskWeights;\n\n// Convert the image gamma for display on our output device.\nmul_res = pow(mul_res, vec3(1.0 / monitorgamma));\n\n// Color the texel.\noutput_dummy _OUT;\n_OUT._color = vec4(mul_res, 1.0);\nFragColor = _OUT._color;\nreturn;\n} \n#endif\n\n'
},
_0x5bb4c9 = _0x2c1832(0x39),
_0x50f370 = _0x2c1832.n(_0x5bb4c9),
_0x246d02 = _0x2c1832(0xb),
_0x550f17 = _0x2c1832.n(_0x246d02),
_0x5ab74d = {
'addStyleHook': function() {
_0x3a8e2f(this.elements.container, this.config.selectors.container.replace('.', ''), true), _0x3a8e2f(this.elements.container, this.config.classNames.uiSupported, true), _0x3a8e2f(this.elements.container, this.config.classNames.hideControls, true);
},
'build': function() {
this.listeners.media(), _0x1e2c68.element(this.elements.controls) || (_0x2593da.inject.call(this), this.listeners.controls()), this.volume = null, this.muted = null, _0x2593da.updateVolume.call(this), _0x3a8e2f(this.elements.container, this.config.classNames.isTouch, this.touch), this.ready = true;
},
'toggleControls': function(_0x4dbb7a) {
var _0x17edbf = this.elements.controls;
if (_0x17edbf) {
var _0x2c1832 = 0x0,
_0x4b4cd5 = _0x23ffa1.call(this, '.' .concat(_0x449eac({
'ejs__dialogs': true
}), ' > .').concat(_0x449eac({
'ejs__dialog': true
})));
Array.from(_0x4b4cd5).forEach(function(_0x208a40, _0xd7fcf0) {
true !== _0x208a40.hidden && (_0x2c1832 += 0x1);
}), _0x2c1832 > 0x0 ? this.toggleControls(!0x1) : this.toggleControls(Boolean(_0x4dbb7a || this.paused || _0x17edbf.pressed || _0x17edbf.hover));
}
}
};
function _0x5272a8(_0x4d422a) {
return (_0x5272a8 = 'function' == typeof Symbol && 'symbol' == typeof Symbol.iterator ? function(_0x241239) {
return typeof _0x241239;
} : function(_0x39f252) {
return _0x39f252 && 'function' == typeof Symbol && _0x39f252.constructor === Symbol && _0x39f252 !== Symbol.prototype ? 'symbol' : typeof _0x39f252;
})(_0x4d422a);
}
function _0x3189ba(_0x4380c5, _0x3c0d58) {
if ('object' === _0x5272a8(_0x4380c5) && _0x4380c5.files && (_0x4380c5 = _0x4380c5.files[0x0]), this.littleEndian = !0x1, this.offset = 0x0, this._lastRead = null, 'object' === _0x5272a8(_0x4380c5) && _0x4380c5.name && _0x4380c5.size) {
if ('function' != typeof window.FileReader) throw new Error('Incompatible Browser');
this.fileName = _0x4380c5.name, this.fileType = _0x4380c5.type, this.fileSize = _0x4380c5.size, this._fileReader = new FileReader(), this._fileReader.marcFile = this, this._fileReader.addEventListener('load', function() {
this.marcFile._u8array = new Uint8Array(this.result), this.marcFile._dataView = new DataView(this.result), _0x3c0d58 && _0x3c0d58.call();
}, !0x1), this._fileReader.readAsArrayBuffer(_0x4380c5);
} else if ('object' === _0x5272a8(_0x4380c5) && 'string' == typeof _0x4380c5.fileName && 'boolean' == typeof _0x4380c5.littleEndian) {
this.fileName = _0x4380c5.fileName, this.fileType = _0x4380c5.fileType, this.fileSize = _0x4380c5.fileSize;
var _0x2c1832 = new ArrayBuffer(_0x4380c5);
this._u8array = new Uint8Array(this.fileType), this._dataView = new DataView(this.fileType), _0x4380c5.copyToFile(this, 0x0), _0x3c0d58 && _0x3c0d58.call();
} else if ('object' === _0x5272a8(_0x4380c5) && 'number' == typeof _0x4380c5.byteLength) this.fileName = 'file.bin', this.fileType = 'application/octet-stream', this.fileSize = _0x4380c5.byteLength, void 0x0 !== _0x4380c5.buffer && (_0x4380c5 = _0x4380c5.buffer), this._u8array = new Uint8Array(_0x4380c5), this._dataView = new DataView(_0x4380c5), _0x3c0d58 && _0x3c0d58.call();
else {
if ('number' != typeof _0x4380c5) throw new Error('Invalid source');
this.fileName = 'file.bin', this.fileType = 'application/octet-stream', this.fileSize = _0x4380c5;
_0x2c1832 = new ArrayBuffer(_0x4380c5);
this._u8array = new Uint8Array(_0x2c1832), this._dataView = new DataView(_0x2c1832), _0x3c0d58 && _0x3c0d58.call();
}
}
_0x3189ba.IS_MACHINE_LITTLE_ENDIAN = (_0x13fb79 = new ArrayBuffer(0x2), new DataView(_0x13fb79).setInt16(0x0, 0x100, true), 0x100 === new Int16Array(_0x13fb79)[0x0]), _0x3189ba.prototype.seek = function(_0x13e235) {
this.offset = _0x13e235;
}, _0x3189ba.prototype.skip = function(_0xfa033c) {
this.offset += _0xfa033c;
}, _0x3189ba.prototype.isEOF = function() {
return !(this.offset < this.fileSize);
}, _0x3189ba.prototype.slice = function(_0x2bfcbb, _0x8026e) {
var _0x2c1832;
return _0x8026e = _0x8026e || this.fileSize - _0x2bfcbb, void 0x0 !== this._u8array.buffer.slice ? ((_0x2c1832 = new _0x3189ba(0x0)).fileSize = _0x8026e, _0x2c1832._u8array = new Uint8Array(this._u8array.buffer.slice(_0x2bfcbb, _0x2bfcbb + _0x8026e))) : (_0x2c1832 = new _0x3189ba(_0x8026e), this.copyToFile(_0x2c1832, _0x2bfcbb, _0x8026e, 0x0)), _0x2c1832.fileName = this.fileName, _0x2c1832.fileType = this.fileType, _0x2c1832.littleEndian = this.littleEndian, _0x2c1832;
}, _0x3189ba.prototype.copyToFile = function(_0x51f9b5, _0x1ada3e, _0x3997cd, _0x38ae1a) {
void 0x0 === _0x38ae1a && (_0x38ae1a = _0x1ada3e), _0x3997cd = _0x3997cd || this.fileSize - _0x1ada3e;
for (var _0x3bc3d8 = 0x0; _0x3bc3d8 < _0x3997cd; _0x3bc3d8++) _0x51f9b5._u8array[_0x38ae1a + _0x3bc3d8] = this._u8array[_0x1ada3e + _0x3bc3d8];
}, _0x3189ba.prototype.save = function() {
var _0xa88a13;
try {
_0xa88a13 = new Blob([this._u8array], {
'type': this.fileType
});
} catch (_0x1dfb06) {
if (window.BlobBuilder = window.BlobBuilder || window.WebKitBlobBuilder || window.MozBlobBuilder || window.MSBlobBuilder, 'InvalidStateError' !== _0x1dfb06.name || !window.BlobBuilder) throw new Error('Incompatible Browser');
var _0x17edbf = new BlobBuilder();
_0x17edbf.append(this._u8array.buffer), _0xa88a13 = _0x17edbf.getBlob(this.fileType);
}
saveAs(_0xa88a13, this.fileName);
}, _0x3189ba.prototype.readU8 = function() {
return this._lastRead = this._u8array[this.offset], this.offset++, this._lastRead;
}, _0x3189ba.prototype.readU16 = function() {
return this.littleEndian ? this._lastRead = this._u8array[this.offset] + (this._u8array[this.offset + 0x1] << 0x8) : this._lastRead = (this._u8array[this.offset] << 0x8) + this._u8array[this.offset + 0x1], this.offset += 0x2, this._lastRead >>> 0x0;
}, _0x3189ba.prototype.readU24 = function() {
return this.littleEndian ? this._lastRead = this._u8array[this.offset] + (this._u8array[this.offset + 0x1] << 0x8) + (this._u8array[this.offset + 0x2] << 0x10) : this._lastRead = (this._u8array[this.offset] << 0x10) + (this._u8array[this.offset + 0x1] << 0x8) + this._u8array[this.offset + 0x2], this.offset += 0x3, this._lastRead >>> 0x0;
}, _0x3189ba.prototype.readU32 = function() {
return this.littleEndian ? this._lastRead = this._u8array[this.offset] + (this._u8array[this.offset + 0x1] << 0x8) + (this._u8array[this.offset + 0x2] << 0x10) + (this._u8array[this.offset + 0x3] << 0x18) : this._lastRead = (this._u8array[this.offset] << 0x18) + (this._u8array[this.offset + 0x1] << 0x10) + (this._u8array[this.offset + 0x2] << 0x8) + this._u8array[this.offset + 0x3], this.offset += 0x4, this._lastRead >>> 0x0;
}, _0x3189ba.prototype.readBytes = function(_0x455cfd) {
this._lastRead = new Array(_0x455cfd);
for (var _0x17edbf = 0x0; _0x17edbf < _0x455cfd; _0x17edbf++) this._lastRead[_0x17edbf] = this._u8array[this.offset + _0x17edbf];
return this.offset += _0x455cfd, this._lastRead;
}, _0x3189ba.prototype.readString = function(_0x4fe914) {
this._lastRead = '';
for (var _0x17edbf = 0x0; _0x17edbf < _0x4fe914 && this.offset + _0x17edbf < this.fileSize && this._u8array[this.offset + _0x17edbf] > 0x0; _0x17edbf++) this._lastRead = this._lastRead + String.fromCharCode(this._u8array[this.offset + _0x17edbf]);
return this.offset += _0x4fe914, this._lastRead;
}, _0x3189ba.prototype.writeU8 = function(_0x276206) {
this._u8array[this.offset] = _0x276206, this.offset++;
}, _0x3189ba.prototype.writeU16 = function(_0x1bfd8f) {
this.littleEndian ? (this._u8array[this.offset] = 0xff & _0x1bfd8f, this._u8array[this.offset + 0x1] = _0x1bfd8f >> 0x8) : (this._u8array[this.offset] = _0x1bfd8f >> 0x8, this._u8array[this.offset + 0x1] = 0xff & _0x1bfd8f), this.offset += 0x2;
}, _0x3189ba.prototype.writeU24 = function(_0x3d6067) {
this.littleEndian ? (this._u8array[this.offset] = 0xff & _0x3d6067, this._u8array[this.offset + 0x1] = (0xff00 & _0x3d6067) >> 0x8, this._u8array[this.offset + 0x2] = (0xff0000 & _0x3d6067) >> 0x10) : (this._u8array[this.offset] = (0xff0000 & _0x3d6067) >> 0x10, this._u8array[this.offset + 0x1] = (0xff00 & _0x3d6067) >> 0x8, this._u8array[this.offset + 0x2] = 0xff & _0x3d6067), this.offset += 0x3;
}, _0x3189ba.prototype.writeU32 = function(_0xa4dd26) {
this.littleEndian ? (this._u8array[this.offset] = 0xff & _0xa4dd26, this._u8array[this.offset + 0x1] = (0xff00 & _0xa4dd26) >> 0x8, this._u8array[this.offset + 0x2] = (0xff0000 & _0xa4dd26) >> 0x10, this._u8array[this.offset + 0x3] = (0xff000000 & _0xa4dd26) >> 0x18) : (this._u8array[this.offset] = (0xff000000 & _0xa4dd26) >> 0x18, this._u8array[this.offset + 0x1] = (0xff0000 & _0xa4dd26) >> 0x10, this._u8array[this.offset + 0x2] = (0xff00 & _0xa4dd26) >> 0x8, this._u8array[this.offset + 0x3] = 0xff & _0xa4dd26), this.offset += 0x4;
}, _0x3189ba.prototype.writeBytes = function(_0x5ad6b9) {
for (var _0x17edbf = 0x0; _0x17edbf < _0x5ad6b9.length; _0x17edbf++) this._u8array[this.offset + _0x17edbf] = _0x5ad6b9[_0x17edbf];
this.offset += _0x5ad6b9.length;
}, _0x3189ba.prototype.writeString = function(_0x58c146, _0x23e582) {
_0x23e582 = _0x23e582 || _0x58c146.length;
for (var _0x2c1832 = 0x0; _0x2c1832 < _0x58c146.length && _0x2c1832 < _0x23e582; _0x2c1832++) this._u8array[this.offset + _0x2c1832] = _0x58c146.charCodeAt(_0x2c1832);
for (; _0x2c1832 < _0x23e582; _0x2c1832++) this._u8array[this.offset + _0x2c1832] = 0x0;
this.offset += _0x23e582;
};
var _0x863031 = _0x3189ba,
_0x34e7cf = 0x0;
function _0x454881() {
this.records = [], this.truncate = !0x1;
}
function _0x288092(_0x1ab789) {
var _0x17edbf = new _0x454881();
_0x1ab789.seek(0x5);
for (var _0x2c1832 = 0x0, _0x195f00 = 0x0; !_0x1ab789.isEOF();) {
if (0x454f46 === (_0x2c1832 = _0x1ab789.readU24())) {
if (_0x1ab789.isEOF()) break;
if (_0x1ab789.offset + 0x3 === _0x1ab789.fileSize) {
_0x17edbf.truncate = _0x1ab789.readU24();
break;
}
}(_0x195f00 = _0x1ab789.readU16()) === _0x34e7cf ? _0x17edbf.addRLERecord(_0x2c1832, _0x1ab789.readU16(), _0x1ab789.readU8()) : _0x17edbf.addSimpleRecord(_0x2c1832, _0x1ab789.readBytes(_0x195f00));
}
return _0x17edbf;
}
_0x454881.prototype.addSimpleRecord = function(_0x37166f, _0x2e61e0) {
this.records.push({
'offset': _0x37166f,
'type': 0x1,
'length': _0x2e61e0.length,
'data': _0x2e61e0
});
}, _0x454881.prototype.addRLERecord = function(_0x4ae2fb, _0x507845, _0x92bb02) {
this.records.push({
'offset': _0x4ae2fb,
'type': _0x34e7cf,
'length': _0x507845,
'byte': _0x92bb02
});
}, _0x454881.prototype.toString = function() {
nSimpleRecords = 0x0, nRLERecords = 0x0;
for (var _0xa88a13 = 0x0; _0xa88a13 < this.records.length; _0xa88a13++) this.records[_0xa88a13].type === _0x34e7cf ? nRLERecords++ : nSimpleRecords++;
var _0x17edbf = 'Simple records: ' + nSimpleRecords;
return _0x17edbf += '\nRLE records: ' + nRLERecords, _0x17edbf += '\nTotal records: ' + this.records.length, this.truncate && (_0x17edbf += '\nTruncate at: 0x' + this.truncate.toString(0x10)), _0x17edbf;
}, _0x454881.prototype.export = function(_0x4d18dc) {
for (var _0x17edbf = 0x5, _0x2c1832 = 0x0; _0x2c1832 < this.records.length; _0x2c1832++) this.records[_0x2c1832].type === _0x34e7cf ? _0x17edbf += 0x8 : _0x17edbf += 0x5 + this.records[_0x2c1832].data.length;
_0x17edbf += 0x3, this.truncate && (_0x17edbf += 0x3), tempFile = new _0x863031(_0x17edbf), tempFile.fileName = _0x4d18dc + '.ips', tempFile.writeString('PATCH');
for (_0x2c1832 = 0x0; _0x2c1832 < this.records.length; _0x2c1832++) {
var _0x512785 = this.records[_0x2c1832];
tempFile.writeU24(_0x512785.offset), _0x512785.type === _0x34e7cf ? (tempFile.writeU16(0x0), tempFile.writeU16(_0x512785.length), tempFile.writeU8(_0x512785.byte)) : (tempFile.writeU16(_0x512785.data.length), tempFile.writeBytes(_0x512785.data));
}
return tempFile.writeString('EOF'), _0x512785.truncate && tempFile.writeU24(_0x512785.truncate), tempFile;
}, _0x454881.prototype.apply = function(_0xefa71d) {
var _0x17edbf;
if (this.truncate) _0x17edbf = _0xefa71d.slice(0x0, this.truncate);
else {
for (var _0x2c1832 = _0xefa71d.fileSize, _0x5a0a71 = 0x0; _0x5a0a71 < this.records.length; _0x5a0a71++) {
var _0x105576 = this.records[_0x5a0a71];
_0x105576.type === _0x34e7cf ? _0x105576.offset + _0x105576.length > _0x2c1832 && (_0x2c1832 = _0x105576.offset + _0x105576.length) : _0x105576.offset + _0x105576.data.length > _0x2c1832 && (_0x2c1832 = _0x105576.offset + _0x105576.data.length);
}
_0x2c1832 === _0xefa71d.fileSize ? _0x17edbf = _0xefa71d.slice(0x0, _0xefa71d.fileSize) : (_0x17edbf = new _0x863031(_0x2c1832), _0xefa71d.copyToFile(_0x17edbf, 0x0));
}
_0xefa71d.seek(0x0);
for (_0x5a0a71 = 0x0; _0x5a0a71 < this.records.length; _0x5a0a71++)
if (_0x17edbf.seek(this.records[_0x5a0a71].offset), this.records[_0x5a0a71].type === _0x34e7cf)
for (var _0x729105 = 0x0; _0x729105 < this.records[_0x5a0a71].length; _0x729105++) _0x17edbf.writeU8(this.records[_0x5a0a71].byte);
else _0x17edbf.writeBytes(this.records[_0x5a0a71].data);
return _0x17edbf;
};
'0123456789abcdef' .split('');
var _0x1aa7ba = function() {
for (var _0xa88a13, _0x17edbf = [], _0x2c1832 = 0x0; _0x2c1832 < 0x100; _0x2c1832++) {
_0xa88a13 = _0x2c1832;
for (var _0x32bcf1 = 0x0; _0x32bcf1 < 0x8; _0x32bcf1++) _0xa88a13 = 0x1 & _0xa88a13 ? 0xedb88320 ^ _0xa88a13 >>> 0x1 : _0xa88a13 >>> 0x1;
_0x17edbf[_0x2c1832] = _0xa88a13;
}
return _0x17edbf;
}();
function _0x2d78e9(_0x509ecf, _0x4219e1, _0xd66d2b) {
for (var _0x185f93 = _0x4219e1 ? new Uint8Array(_0x509ecf._u8array.buffer, _0x4219e1) : _0x509ecf._u8array, _0x20e2c1 = -0x1, _0x3d100a = _0xd66d2b ? _0x185f93.length - 0x4 : _0x185f93.length, _0x56a6e6 = 0x0; _0x56a6e6 < _0x3d100a; _0x56a6e6++) _0x20e2c1 = _0x20e2c1 >>> 0x8 ^ _0x1aa7ba[0xff & (_0x20e2c1 ^ _0x185f93[_0x56a6e6])];
return (-0x1 ^ _0x20e2c1) >>> 0x0;
}
var _0x37eff4 = 0x0,
_0x34d681 = 0x1,
_0x4c50e7 = 0x2,
_0xf9eeea = 0x3;
function _0x5b02d3() {
this.sourceSize = 0x0, this.targetSize = 0x0, this.metaData = '', this.actions = [], this.sourceChecksum = 0x0, this.targetChecksum = 0x0, this.patchChecksum = 0x0;
}
function _0x356089(_0x1bf770) {
_0x1bf770.readVLV = _0x45da2c, _0x1bf770.littleEndian = true;
var _0x17edbf = new _0x5b02d3();
_0x1bf770.seek(0x4), _0x17edbf.sourceSize = _0x1bf770.readVLV(), _0x17edbf.targetSize = _0x1bf770.readVLV();
var _0x2c1832 = _0x1bf770.readVLV();
_0x2c1832 && (_0x17edbf.metaData = _0x1bf770.readString(_0x2c1832));
for (var _0x157bba = _0x1bf770.fileSize - 0xc; _0x1bf770.offset < _0x157bba;) {
var _0x31dcc2 = _0x1bf770.readVLV(),
_0x21bafe = {
'type': 0x3 & _0x31dcc2,
'length': 0x1 + (_0x31dcc2 >> 0x2)
};
if (_0x21bafe.type === _0x34d681) _0x21bafe.bytes = _0x1bf770.readBytes(_0x21bafe.length);
else if (_0x21bafe.type === _0x4c50e7 || _0x21bafe.type === _0xf9eeea) {
var _0x4c6327 = _0x1bf770.readVLV();
_0x21bafe.relativeOffset = (0x1 & _0x4c6327 ? -0x1 : 0x1) * (_0x4c6327 >> 0x1);
}
_0x17edbf.actions.push(_0x21bafe);
}
if (_0x17edbf.sourceChecksum = _0x1bf770.readU32(), _0x17edbf.targetChecksum = _0x1bf770.readU32(), _0x17edbf.patchChecksum = _0x1bf770.readU32(), _0x17edbf.patchChecksum !== _0x2d78e9(_0x1bf770, 0x0, true)) throw new Error('error_crc_patch');
return _0x17edbf;
}
function _0x45da2c() {
for (var _0xa88a13 = 0x0, _0x17edbf = 0x1;;) {
var _0x2c1832 = this.readU8();
if (_0xa88a13 += (0x7f & _0x2c1832) * _0x17edbf, 0x80 & _0x2c1832) break;
_0xa88a13 += _0x17edbf <<= 0x7;
}
return this._lastRead = _0xa88a13, _0xa88a13;
}
function _0x152a53(_0x33f58d) {
for (;;) {
var _0x17edbf = 0x7f & _0x33f58d;
if (0x0 === (_0x33f58d >>= 0x7)) {
this.writeU8(0x80 | _0x17edbf);
break;
}
this.writeU8(_0x17edbf), _0x33f58d--;
}
}
function _0x5a4975(_0x221175) {
for (var _0x17edbf = 0x0;;) {
if (0x0 === (_0x221175 >>= 0x7)) {
_0x17edbf++;
break;
}
_0x17edbf++, _0x221175--;
}
return _0x17edbf;
}
function _0x562e68() {
this.offset = 0x0, this.next = null;
}
_0x5b02d3.prototype.toString = function() {
var _0xa88a13 = 'Source size: ' + this.sourceSize;
return _0xa88a13 += '\nTarget size: ' + this.targetSize, _0xa88a13 += '\nMetadata: ' + this.metaData, _0xa88a13 += '\n#Actions: ' + this.actions.length;
}, _0x5b02d3.prototype.validateSource = function(_0x48d2df, _0x3c15dd) {
return this.sourceChecksum === _0x2d78e9(_0x48d2df, _0x3c15dd);
}, _0x5b02d3.prototype.apply = function(_0x393f39, _0x76e509) {
if (_0x76e509 && !this.validateSource(_0x393f39)) throw new Error('error_crc_input');
for (var _0x2c1832 = new _0x863031(this.targetSize), _0x4df6ae = 0x0, _0xad0fa9 = 0x0, _0x38240a = 0x0; _0x38240a < this.actions.length; _0x38240a++) {
var _0x4b1026 = this.actions[_0x38240a];
if (_0x4b1026.type === _0x37eff4) _0x393f39.copyToFile(_0x2c1832, _0x2c1832.offset, _0x4b1026.length), _0x2c1832.skip(_0x4b1026.length);
else if (_0x4b1026.type === _0x34d681) _0x2c1832.writeBytes(_0x4b1026.bytes);
else if (_0x4b1026.type === _0x4c50e7) {
_0x4df6ae += _0x4b1026.relativeOffset;
for (var _0x5c2263 = _0x4b1026.length; _0x5c2263--;) _0x2c1832.writeU8(_0x393f39._u8array[_0x4df6ae]), _0x4df6ae++;
} else if (_0x4b1026.type === _0xf9eeea) {
_0xad0fa9 += _0x4b1026.relativeOffset;
for (_0x5c2263 = _0x4b1026.length; _0x5c2263--;) _0x2c1832.writeU8(_0x2c1832._u8array[_0xad0fa9]), _0xad0fa9++;
}
}
if (_0x76e509 && this.targetChecksum !== _0x2d78e9(_0x2c1832)) throw new Error('error_crc_output');
return _0x2c1832;
}, _0x5b02d3.prototype.export = function(_0x3e1e58) {
var _0x17edbf = 'BPS1' .length;
_0x17edbf += _0x5a4975(this.sourceSize), _0x17edbf += _0x5a4975(this.targetSize), _0x17edbf += _0x5a4975(this.metaData.length), _0x17edbf += this.metaData.length;
for (var _0x2c1832 = 0x0; _0x2c1832 < this.actions.length; _0x2c1832++) {
_0x17edbf += _0x5a4975(((_0x38dad6 = this.actions[_0x2c1832]).length - 0x1 << 0x2) + _0x38dad6.type), _0x38dad6.type === _0x34d681 ? _0x17edbf += _0x38dad6.length : _0x38dad6.type !== _0x4c50e7 && _0x38dad6.type !== _0xf9eeea || (_0x17edbf += _0x5a4975((Math.abs(_0x38dad6.relativeOffset) << 0x1) + (_0x38dad6.relativeOffset < 0x0 ? 0x1 : 0x0)));
}
var _0x2d1555 = new _0x863031(_0x17edbf += 0xc);
_0x2d1555.fileName = _0x3e1e58 + '.bps', _0x2d1555.littleEndian = true, _0x2d1555.writeVLV = _0x152a53, _0x2d1555.writeString('BPS1'), _0x2d1555.writeVLV(this.sourceSize), _0x2d1555.writeVLV(this.targetSize), _0x2d1555.writeVLV(this.metaData.length), _0x2d1555.writeString(this.metaData, this.metaData.length);
for (_0x2c1832 = 0x0; _0x2c1832 < this.actions.length; _0x2c1832++) {
var _0x38dad6 = this.actions[_0x2c1832];
_0x2d1555.writeVLV((_0x38dad6.length - 0x1 << 0x2) + _0x38dad6.type), _0x38dad6.type === _0x34d681 ? _0x2d1555.writeBytes(_0x38dad6.bytes) : _0x38dad6.type !== _0x4c50e7 && _0x38dad6.type !== _0xf9eeea || _0x2d1555.writeVLV((Math.abs(_0x38dad6.relativeOffset) << 0x1) + (_0x38dad6.relativeOffset < 0x0 ? 0x1 : 0x0));
}
return _0x2d1555.writeU32(this.sourceChecksum), _0x2d1555.writeU32(this.targetChecksum), _0x2d1555.writeU32(this.patchChecksum), _0x2d1555;
}, _0x562e68.prototype.delete = function() {
this.next && delete this.next;
};
var _0x7a6485 = 'UPS1';
function _0x42cd9e() {
this.records = [], this.sizeInput = 0x0, this.sizeOutput = 0x0, this.checksumInput = 0x0, this.checksumOutput = 0x0;
}
function _0x10dc1d(_0x5d0c48) {
for (;;) {
var _0x17edbf = 0x7f & _0x5d0c48;
if (0x0 === (_0x5d0c48 >>= 0x7)) {
this.writeU8(0x80 | _0x17edbf);
break;
}
this.writeU8(_0x17edbf), _0x5d0c48 -= 0x1;
}
}
function _0x454043() {
for (var _0xa88a13 = 0x0, _0x17edbf = 0x1;;) {
var _0x2c1832 = this.readU8();
if (-0x1 == _0x2c1832) throw new Error('Can\'t read UPS VLV at 0x' + (this.offset - 0x1).toString(0x10));
if (_0xa88a13 += (0x7f & _0x2c1832) * _0x17edbf, 0x0 != (0x80 & _0x2c1832)) break;
_0xa88a13 += _0x17edbf <<= 0x7;
}
return _0xa88a13;
}
function _0xca245c(_0x459173) {
for (var _0x17edbf = 0x0;;) {
if (_0x17edbf++, 0x0 === (_0x459173 >>= 0x7)) break;
_0x459173 -= 0x1;
}
return _0x17edbf;
}
function _0xd2202f(_0x5abb5e) {
var _0x17edbf = new _0x42cd9e();
_0x5abb5e.readVLV = _0x454043, _0x5abb5e.seek(_0x7a6485.length), _0x17edbf.sizeInput = _0x5abb5e.readVLV(), _0x17edbf.sizeOutput = _0x5abb5e.readVLV();
for (; _0x5abb5e.offset < _0x5abb5e.fileSize - 0xc;) {
for (var _0x2c1832 = _0x5abb5e.readVLV(), _0x3bca86 = []; _0x5abb5e.readU8();) _0x3bca86.push(_0x5abb5e._lastRead);
_0x17edbf.addRecord(_0x2c1832, _0x3bca86);
}
return _0x5abb5e.littleEndian = true, _0x17edbf.checksumInput = _0x5abb5e.readU32(), _0x17edbf.checksumOutput = _0x5abb5e.readU32(), _0x5abb5e.littleEndian = !0x1, _0x17edbf;
}
_0x42cd9e.prototype.addRecord = function(_0x162ca5, _0x39594d) {
this.records.push({
'offset': _0x162ca5,
'XORdata': _0x39594d
});
}, _0x42cd9e.prototype.toString = function() {
var _0xa88a13 = 'Records: ' + (void 0x0).records.length;
return _0xa88a13 += '\nInput file size: ' + (void 0x0).sizeInput, _0xa88a13 += '\nOutput file size: ' + (void 0x0).sizeOutput, _0xa88a13 += '\nInput file checksum: ' + padZeroes((void 0x0).checksumInput, 0x4), _0xa88a13 += '\nOutput file checksum: ' + padZeroes((void 0x0).checksumOutput, 0x4);
}, _0x42cd9e.prototype.export = function(_0x1a3e8b) {
var _0x17edbf = _0x7a6485.length;
_0x17edbf += _0xca245c(this.sizeInput), _0x17edbf += _0xca245c(this.sizeOutput);
for (var _0x2c1832 = 0x0; _0x2c1832 < this.records.length; _0x2c1832++) _0x17edbf += _0xca245c(this.records[_0x2c1832].offset), _0x17edbf += this.records[_0x2c1832].XORdata.length + 0x1;
_0x17edbf += 0xc, tempFile = new _0x863031(_0x17edbf), tempFile.writeVLV = _0x10dc1d, tempFile.fileName = _0x1a3e8b + '.ups', tempFile.writeString(_0x7a6485), tempFile.writeVLV(this.sizeInput), tempFile.writeVLV(this.sizeOutput);
for (_0x2c1832 = 0x0; _0x2c1832 < this.records.length; _0x2c1832++) tempFile.writeVLV(this.records[_0x2c1832].offset), tempFile.writeBytes(this.records[_0x2c1832].XORdata), tempFile.writeU8(0x0);
return tempFile.littleEndian = true, tempFile.writeU32(this.checksumInput), tempFile.writeU32(this.checksumOutput), tempFile.writeU32(_0x2d78e9(tempFile, 0x0, true)), tempFile;
}, _0x42cd9e.prototype.validateSource = function(_0x25cb26, _0xd0163c) {
return _0x2d78e9(_0x25cb26, _0xd0163c) === this.checksumInput;
}, _0x42cd9e.prototype.apply = function(_0x21d682, _0x41bbac) {
_0x41bbac && this.validateSource(_0x21d682), tempFile = new _0x863031(this.sizeOutput), _0x21d682.copyToFile(tempFile, 0x0, this.sizeInput), _0x21d682.seek(0x0);
for (var _0x2c1832 = 0x0; _0x2c1832 < this.records.length; _0x2c1832++) {
var _0x5ed6c9 = this.records[_0x2c1832];
tempFile.skip(_0x5ed6c9.offset), _0x21d682.skip(_0x5ed6c9.offset);
for (var _0x3faf05 = 0x0; _0x3faf05 < _0x5ed6c9.XORdata.length; _0x3faf05++) tempFile.writeU8((_0x21d682.isEOF() ? 0x0 : _0x21d682.readU8()) ^ _0x5ed6c9.XORdata[_0x3faf05]);
tempFile.skip(0x1), _0x21d682.skip(0x1);
}
return _0x41bbac && (_0x2d78e9(tempFile), this.checksumOutput), tempFile;
};
var _0xc778fa = _0x2c1832(0xa0),
_0x5a641d = _0x2c1832.n(_0xc778fa),
_0x4d7024 = {
'romdb': null,
'supportBatterySave': !0x1,
'hash': '2b35cacf70ae',
'hash2': 'f5cbb3f38c0bb20e4',
'hash3': '88cc8ad0c350400499a0',
'loading': null,
'gamePatch': null,
'saveFilenames': [],
'FS': null,
'Module': null,
'aspectRatio': 0x4 / 0x3,
'memData': null,
'wasmData': null,
'coreFileData': {},
'coreFileName': '',
'coreFileVersion': '',
'isMobileDevice': !!/Android|webOS|iPhone|iPad|iPod|BB10|BlackBerry|IEMobile|Opera Mini|Mobile|mobile/i .test(navigator.userAgent || ''),
'create': function() {
var _0xa88a13 = this;
window.URL = window.URL || window.webkitURL;
var _0x17edbf, _0x2c1832, _0x4ce206, _0x528f2b = !0x1,
_0x547484 = !0x1,
_0x5e24fa = !0x1,
_0x468801 = 0x0,
_0x31a5b3 = !0x1,
_0x1e0766 = {
'psx': ['bin', 'iso', 'cue', 'img', 'mdf', 'pbp', 'toc', 'cbn', 'm3u'],
'nds': ['nds', 'bin'],
'nes': ['fds', 'nes', 'unif', 'unf'],
'snes': ['smc', 'fig', 'sfc', 'gd3', 'gd7', 'dx2', 'bsx', 'swc'],
'snes2002': ['smc', 'fig', 'sfc', 'gd3', 'gd7', 'dx2', 'bsx', 'swc'],
'snes2005': ['smc', 'fig', 'sfc', 'gd3', 'gd7', 'dx2', 'bsx', 'swc'],
'snes2010': ['smc', 'fig', 'sfc', 'gd3', 'gd7', 'dx2', 'bsx', 'swc'],
'gb': ['gb', 'gbc', 'dmg'],
'gba': ['gb', 'gbc', 'gba'],
'vbanext': ['gba'],
'vb': ['vb', 'vboy', 'bin'],
'3do': ['iso', 'bin', 'cue'],
'lynx': ['lnx'],
'jaguar': ['j64', 'jag', 'rom', 'abs', 'cof', 'bin', 'prg'],
'a7800': ['a78', 'bin'],
'a2600': ['a26', 'bin'],
'ngp': ['ngp', 'ngc'],
'n64': ['n64', 'v64', 'z64', 'bin', 'u1', 'ndd'],
'pce': ['pce', 'bin', 'iso', 'cue', 'ccd', 'img', 'chd'],
'sega': ['mdx', 'md', 'smd', 'gen', 'bin', 'iso', 'cue', 'sms', 'gg', 'sg', '68k', 'chd'],
'segacd': ['mdx', 'md', 'smd', 'gen', 'bin', 'iso', 'cue', 'sms', 'gg', 'sg', '68k', 'chd'],
'32x': ['32x', 'bin', 'gen', 'smd', 'md', 'cue', 'iso', 'sms'],
'saturn': ['bin', 'cue', 'iso'],
'msx': ['rom', 'mx1', 'mx2', 'dsk', 'cas'],
'bluemsx': ['rom', 'ri', 'mx1', 'mx2', 'col', 'dsk', 'cas', 'sg', 'sc', 'm3u'],
'ws': ['ws', 'wsc'],
'arcade': ['zip'],
'fba0.2.97.29': ['zip'],
'mame2003': ['zip'],
'mame': ['zip']
},
_0x29078e = {
'psx': 'psx',
'nds': 'nds',
'nes': 'nes',
'snes': 'snes',
'snes2002': 'snes2002',
'snes2005': 'snes2005',
'snes2010': 'snes2010',
'gb': 'gb',
'gbc': 'gb',
'gba': 'gba',
'vbanext': 'vbanext',
'vb': 'vb',
'n64': 'n64',
'3do': '3do',
'segaMS': 'sega',
'segaGG': 'sega',
'segaMD': 'sega',
'segaCD': 'segacd',
'sega32x': '32x',
'segaSaturn': 'saturn',
'ngp': 'ngp',
'pce': 'pce',
'msx': 'bluemsx',
'atari2600': 'a2600',
'atari7800': 'a7800',
'lynx': 'lynx',
'ws': 'ws',
'arcade': 'arcade',
'fba0.2.97.29': 'fba0.2.97.29',
'jaguar': 'jaguar',
'mame2003': 'mame2003'
},
_0xc6823 = this,
_0x1143c5 = _0xc6823.system;
_0x17edbf = (_0xc6823.customPaths && typeof _0xc6823.customPaths['extract7z.js'] == 'string') ? _0xc6823.customPaths['extract7z.js'] : (_0xc6823.dataPath + 'extract7z.js');;
_0x2c1832 = (_0xc6823.customPaths && typeof _0xc6823.customPaths['extractzip.js'] == 'string') ? _0xc6823.customPaths['extractzip.js'] : (_0xc6823.dataPath + 'extractzip.js?v=1');
_0x4ce206 = (_0xc6823.customPaths && typeof _0xc6823.customPaths['libunrar.js'] == 'string') ? _0xc6823.customPaths['libunrar.js'] : (_0xc6823.dataPath + 'libunrar.js');
var _0xb2be2a = _0x428003('canvas', {});
this.game.appendChild(_0xb2be2a), _0x4d7024.loading = _0x428003('div', {
'class': this.config.classNames.loading
}), this.elements.container.appendChild(_0x4d7024.loading);
var _0x4fce24 = {
'loading-info': _0x449eac({
'loading-info': true
}),
'ad': _0x449eac({
'ad': true
}),
'close-ad': _0x449eac({
'close-ad': true
}),
'start-game': _0x449eac({
'start-game': true
}),
'on': _0x449eac({
'on': true
}),
'p1': _0x449eac({
'p1': true
}),
'p2': _0x449eac({
'p2': true
}),
'p3': _0x449eac({
'p3': true
}),
'p4': _0x449eac({
'p4': true
})
},
_0x4e198f = '';
if (typeof _0xc6823.adUrl === 'string') {
_0x4d7024.loading.innerHTML = '\n <div>\n <div class="'+_0x4fce24.ad+'" style="display:block !important;width:300px !important">\n <div class="'+_0x4fce24['close-ad']+'" hidden><a href="#" onclick="return false"></a></div>\n <iframe scrolling="no" frameborder="no" style="width:300px !important;height:250px !important;display: block !important;" src="'+_0xc6823.adUrl+'"></iframe>\n </div>\n <div class="'+_0x4fce24['loading-info']+'" style="position: absolute;bottom: 20px;left: 50%;transform: translateX(-50%);">\n <div class="'+_0x4fce24.p1+'"></div>\n <div class="'+_0x4fce24.p2+'"></div>\n <div class="'+_0x4fce24.p3+'"></div>\n </div>\n <div style="position: absolute;bottom: 20px;left: 50%;transform: translateX(-50%);">\n <a href="#" onclick="return false" class="'+_0x4fce24['start-game']+'">'+(_0xb9b2ff.get('playNow', _0xc6823.config))+'</a>\n </div>\n </div>';
var _0x2e2264 = _0x428003('iframe', {
'src': _0xc6823.adUrl,
'scrolling': 'no',
'frameborder': 'no',
'style': 'width:300px !important;height:250px !important;display: block !important;'
});
(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)).removeChild(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad, ' iframe'))), _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)).appendChild(_0x2e2264), _0x2d904a.webgl.DETECTED) || (_0x132da7(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24['start-game'])), true), _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p3)).innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Failed to initialize WebGL.</strong>');
_0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad, ' iframe')).style.setProperty('display', 'block', 'important'), _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad, ' iframe')).style.setProperty('visibility', 'visible', 'important'), _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)).style.setProperty('display', 'block', 'important'), _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)).style.setProperty('visibility', 'visible', 'important')
} else {
_0x4d7024.loading.innerHTML = '\n <div>\n <div class="'.concat(_0x4fce24['loading-info'], '" style="position: absolute;bottom: 20px;left: 50%;transform: translateX(-50%);">\n <div class="').concat(_0x4fce24.p1, '"></div>\n <div class="').concat(_0x4fce24.p2, '"></div>\n <div class="').concat(_0x4fce24.p3, '"></div>\n </div>\n <div style="position: absolute;bottom: 20px;left: 50%;transform: translateX(-50%);">\n <a href="#" onclick="return false" class="').concat(_0x4fce24['start-game'], '">').concat(_0xb9b2ff.get('playNow', _0xc6823.config), '</a>\n </div>\n </div>');
if (! _0x2d904a.webgl.DETECTED) {
_0x132da7(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24['start-game'])), true), _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p3)).innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Failed to initialize WebGL.</strong>';
}
}
var _0x18a437 = window.URL || window.webkitURL;
_0x550f17.a.interceptors.request.use(function(_0x1cd992) {
return _0x1cd992.headers['Content-Type'] = 'application/x-www-form-urlencoded', 'options' === _0x1cd992.method.toLowerCase() && (_0x1cd992.method = 'get'), _0x1cd992;
}, function(_0x5150c8) {
return Promise.reject(_0x5150c8);
});
var _0x394c59 = function(_0x571737) {
var _0x17edbf;
return 0x50 === _0x571737[0x0] && 0x4b === _0x571737[0x1] && 0x3 === _0x571737[0x2] && 0x4 === _0x571737[0x3] ? _0x17edbf = 'zip' : 0x50 === _0x571737[0x0] && 0x4b === _0x571737[0x1] && 0x5 === _0x571737[0x2] && 0x6 === _0x571737[0x3] ? _0x17edbf = 'zip' : 0x50 === _0x571737[0x0] && 0x4b === _0x571737[0x1] && 0x7 === _0x571737[0x2] && 0x8 === _0x571737[0x3] ? _0x17edbf = 'zip' : 0x37 === _0x571737[0x0] && 0x7a === _0x571737[0x1] && 0xbc === _0x571737[0x2] && 0xaf === _0x571737[0x3] && 0x27 === _0x571737[0x4] && 0x1c === _0x571737[0x5] ? _0x17edbf = '7z' : 0x52 === _0x571737[0x0] && 0x61 === _0x571737[0x1] && 0x72 === _0x571737[0x2] && 0x21 === _0x571737[0x3] && 0x1a === _0x571737[0x4] && 0x7 === _0x571737[0x5] && 0x0 == _0x571737[0x6] && (_0x17edbf = 'rar'), _0x17edbf;
},
_0xa24f60 = function(_0xbf2b8b) {
var _0x17edbf = 'importScripts("'+_0xbf2b8b+'");',
_0x2c1832 = new Blob([_0x17edbf], {
'type': 'application/javascript'
}),
_0x501a8e = window.URL.createObjectURL(_0x2c1832);
return new Promise(function(_0x2d8147, _0x1b03b2) {
_0x2d8147(new Worker(_0x501a8e));
});
},
_0x41d0bd = function(_0x3a41ff, _0x2b541e) {
var _0x2c1832 = '';
if ('rar' == _0x2b541e) _0x2c1832 = '\nvar dataToPass = [];\nModule = {\n monitorRunDependencies: function(left) {\n if (left == 0) {\n setTimeout(function() {\n unrar(dataToPass, null);\n }, 100);\n }\n },\n onRuntimeInitialized: function() {\n },\n locateFile: function(file) {\n return \'' .concat(_0x3a41ff+'.mem', '\';\n }\n};\nimportScripts("').concat(_0x3a41ff, '");\nvar unrar = function(data, password) {\n var cb = function(fileName, fileSize, progress) {\n postMessage({"t":4,"current":progress,"total":fileSize, "name": fileName});\n };\n\n var rarContent = readRARContent(data.map(function(d) {\n return {\n name: d.name,\n content: new Uint8Array(d.content)\n }\n }), password, cb)\n var rec = function(entry) {\n if (entry.type === \'file\') {\n postMessage({"t":2,"file":entry.fullFileName,"size":entry.fileSize,"data":entry.fileContent});\n } else if (entry.type === \'dir\') {\n Object.keys(entry.ls).forEach(function(k) {\n rec(entry.ls[k]);\n })\n } else {\n throw "Unknown type";\n }\n }\n rec(rarContent);\n postMessage({"t":1});\n return rarContent;\n};\nonmessage = function(data) {\n dataToPass.push({name: \'test.rar\', content: data.data});\n};\n ');
var _0x22326b = new Blob([_0x2c1832], {
'type': 'application/javascript'
}),
_0x3635b1 = window.URL.createObjectURL(_0x22326b);
return new Promise(function(_0x2d169f, _0x341b0d) {
_0x2d169f(new Worker(_0x3635b1));
});
},
_0x24de8d = null,
_0x4e171c = null,
_0xa884ea = null,
_0x1ffb98 = null;
try {
_0x24de8d = new _0x50f370.a({
'dbVersion': 0x1,
'storePrefix': 'ejs-',
'storeName': 'system',
'keyPath': null,
'autoIncrement': !0x1,
'onStoreReady': function() {
_0x528f2b = true;
},
'onError': function() {
_0x528f2b = true;
}
}), _0x4e171c = new _0x50f370.a({
'dbVersion': 0x1,
'storePrefix': 'ejs-',
'storeName': 'roms',
'keyPath': null,
'autoIncrement': !0x1,
'onStoreReady': function() {
_0x547484 = true;
},
'onError': function() {
_0x547484 = true;
}
}), _0xa884ea = new _0x50f370.a({
'dbVersion': 0x1,
'storePrefix': 'ejs-',
'storeName': 'bios',
'keyPath': null,
'autoIncrement': !0x1,
'onStoreReady': function() {
_0x5e24fa = true;
},
'onError': function() {
_0x5e24fa = true;
}
}), _0x1ffb98 = new _0x50f370.a({
'dbVersion': 0x1,
'storePrefix': 'ejs-',
'storeName': 'romsdata',
'keyPath': null,
'autoIncrement': !0x1,
'onStoreReady': function() {},
'onError': function() {}
});
} catch (_0x4fc9b6) {
_0x528f2b = true, _0x547484 = true, _0x5e24fa = true, console.log(_0x4fc9b6);
}
_0x1093f4.call(_0xc6823, _0xc6823.elements.container, 'start-game', function(_0x3d9f57) {
_0xc6823.started = true, _0xc6823.playing = true, _0x132da7(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24['loading-info'])), true);
if (_0xdcec2a && (_0xdcec2a.virtualGamepadContainer == 'undefined' || _0xdcec2a.virtualGamepadContainer === null)) {
_0x5ab74d.toggleControls.call(_0xc6823, true);
}
var _0x2c1832 = _0x449eac({
'ejs__tooltip--visible': true
}),
_0x10efa5 = _0xc6823.elements.buttons.gamepad.querySelector('.' .concat(_0x449eac({
'ejs__tooltip': true
})));
_0x3a8e2f(_0x10efa5, _0x2c1832, true), setTimeout(function() {
_0x3a8e2f(_0x10efa5, _0x2c1832, !0x1);
}, 0x1388)
if (typeof _0xc6823.adUrl === 'string') {
_0x132da7(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24['close-ad'])), !0x1), setTimeout(function() {
_0xc6823.started && _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)) && _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)).parentNode.removeChild(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)));
}, 0x2710)
}
_0x132da7(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24['start-game'])), true);
var _0x30fd44 = _0x5dc0c0(_0x2593da.defaultCoreOptionsValues.call(_0xa88a13), _0x2593da.storage.get('core-options') || {});
'n64' === _0xc6823.system && _0x30fd44 && Object.keys(_0x30fd44).forEach(function(_0x3015d2) {
'glupen64-aspect' === _0x3015d2 && '16:9' === _0x30fd44[_0x3015d2] && (_0xb2be2a.height = _0xb2be2a.width / 0x10 * 0x9, _0x4d7024.aspectRatio = 0x10 / 0x9);
}), 'arcade' !== _0xa88a13.system && 'fba0.2.97.29' !== _0xa88a13.system || _0x30fd44 && Object.keys(_0x30fd44).forEach(function(_0x490c06) {
'fba-aspect' === _0x490c06 && 'PAR' === _0x30fd44[_0x490c06] && (_0xb2be2a.height = _0xb2be2a.width / 0x10 * 0x9, _0x4d7024.aspectRatio = 0x10 / 0x9);
}), 'gba' === _0xc6823.system && (_0xb2be2a.height = _0xb2be2a.width / 1.5, _0x4d7024.aspectRatio = 1.5), window.setTimeout(function() {}, 0x96),
function() {
var _0x3d9f57 = _0xa88a13;
if ('undefined' != typeof RI) {
document.removeEventListener('mousemove', RI.eventHandler, !0x1), _0xb2be2a.removeEventListener('mousedown', RI.canvasEventHandler, !0x1), _0xb2be2a.removeEventListener('mouseup', RI.canvasEventHandler, !0x1);
_0x3d9f57.elements.container.addEventListener('mousemove', function(_0x44f409) {
var _0x3d9f57 = 0x0,
_0x2c1832 = 0x0,
_0x2cc8a1 = _0x44f409.offsetX - _0x4d7024.Module.canvas.offsetLeft,
_0x5744ed = _0x44f409.offsetY - _0x4d7024.Module.canvas.offsetTop;
switch (_0x44f409.type) {
case 'mousemove':
_0x2cc8a1 < 0x0 ? (_0x2cc8a1 = 0x0, _0x3d9f57 = -_0x4d7024.Module.canvas.offsetWidth) : _0x2cc8a1 > _0x4d7024.Module.canvas.offsetWidth ? (_0x2cc8a1 = _0x4d7024.Module.canvas.offsetWidth, _0x3d9f57 = _0x4d7024.Module.canvas.offsetWidth) : _0x3d9f57 = _0x2cc8a1 - RI.currentX, _0x5744ed < 0x0 ? (_0x5744ed = 0x0, _0x2c1832 = -_0x4d7024.Module.canvas.offsetHeight) : _0x5744ed > _0x4d7024.Module.canvas.offsetHeight ? (_0x5744ed = _0x4d7024.Module.canvas.offsetHeight, _0x2c1832 = _0x4d7024.Module.canvas.offsetHeight) : _0x2c1832 = _0x5744ed - RI.currentY, RI.currentX = _0x2cc8a1, RI.currentY = _0x5744ed;
for (var _0x3e03cb = 0x0; _0x3e03cb < RI.contexts.length; _0x3e03cb += 0x1) HEAP32[RI.contexts[_0x3e03cb].state + 0x20 >> 0x2] = _0x3d9f57, HEAP32[RI.contexts[_0x3e03cb].state + 0x24 >> 0x2] = _0x2c1832;
}
}, !0x1), _0x3d9f57.elements.container.addEventListener('mousedown', RI.canvasEventHandler, !0x1), _0x3d9f57.elements.container.addEventListener('mouseup', RI.canvasEventHandler, !0x1);
var _0x2c1832 = function(_0x5d0cbb) {
if (_0x5d0cbb.targetTouches[0x0]) {
var _0x3d9f57, _0x2c1832, _0x19a4e0 = 0x0,
_0x2b7b61 = 0x0,
_0x1e0f64 = _0x5d0cbb.target.getBoundingClientRect(),
_0x2c55b8 = _0x5d0cbb.targetTouches[0x0].clientX - _0x1e0f64.left,
_0x3a7025 = _0x5d0cbb.targetTouches[0x0].clientY - _0x1e0f64.top;
_0x2c55b8 < 0x0 ? (_0x2c55b8 = 0x0, _0x19a4e0 = -_0x4d7024.Module.canvas.offsetWidth) : _0x2c55b8 > _0x4d7024.Module.canvas.offsetWidth ? (_0x2c55b8 = _0x4d7024.Module.canvas.offsetWidth, _0x19a4e0 = _0x4d7024.Module.canvas.offsetWidth) : _0x19a4e0 = _0x2c55b8 - RI.currentX, _0x3a7025 < 0x0 ? (_0x3a7025 = 0x0, _0x2b7b61 = -_0x4d7024.Module.canvas.offsetHeight) : _0x3a7025 > _0x4d7024.Module.canvas.offsetHeight ? (_0x3a7025 = _0x4d7024.Module.canvas.offsetHeight, _0x2b7b61 = _0x4d7024.Module.canvas.offsetHeight) : _0x2b7b61 = _0x3a7025 - RI.currentY, RI.currentX = _0x2c55b8, RI.currentY = _0x3a7025;
for (var _0x43b670 = 0x0; _0x43b670 < RI.contexts.length; _0x43b670 += 0x1) HEAP32[RI.contexts[_0x43b670].state + 0x20 >> 0x2] = _0x19a4e0, HEAP32[RI.contexts[_0x43b670].state + 0x24 >> 0x2] = _0x2b7b61;
switch (_0x5d0cbb.type) {
case 'touchend':
case 'touchstart':
if (0x0 === _0x5d0cbb.button) _0x2c1832 = 0x28;
else {
if (0x2 !== _0x5d0cbb.button) break;
_0x2c1832 = 0x29;
}
_0x3d9f57 = 'touchend' === _0x5d0cbb.type ? 0x0 : 0x1;
for (var _0x443e8b = 0x0; _0x443e8b < RI.contexts.length; _0x443e8b += 0x1) HEAP8[RI.contexts[_0x443e8b].state + _0x2c1832 >> 0x0] = _0x3d9f57;
}
}
};
_0x3d9f57.elements.container.addEventListener('touchstart', _0x2c1832, !0x1), _0x3d9f57.elements.container.addEventListener('touchend', _0x2c1832, !0x1);
} else {
var _0x518e5a = function(_0x45d640) {
var _0x3d9f57 = new MouseEvent(_0x45d640.type, _0x45d640);
_0x4d7024.Module.canvas.dispatchEvent(_0x3d9f57);
};
_0x530042.call(_0xa88a13, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).addEventListener('mousemove', _0x518e5a, !0x1), _0x530042.call(_0xa88a13, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).addEventListener('mousedown', _0x518e5a, !0x1), _0x530042.call(_0xa88a13, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).addEventListener('mouseup', _0x518e5a, !0x1);
}
}(), 'msx' === _0xc6823.system && function(_0x3c016a) {
if ('undefined' != typeof RI) {
document.removeEventListener('keydown', RI.eventHandler, !0x1), document.removeEventListener('keyup', RI.eventHandler, !0x1);
var _0x3d9f57 = function(_0x97e67b) {
var _0x3d9f57 = _0x97e67b.keyCode,
_0x2c1832 = _0x3d9f57 >> 0x3,
_0x3340c0 = 0x1 << (0x7 & _0x3d9f57);
switch (_0x97e67b.type) {
case 'keyup':
case 'keydown':
if (_0x2c1832 >= 0x20) throw 'key code error! bad code: ' + _0x3d9f57;
for (var _0x126f58 = 0x0; _0x126f58 < RI.contexts.length; _0x126f58++) {
var _0x4ca77a = HEAP8[RI.contexts[_0x126f58].state + _0x2c1832 >> 0x0];
'keyup' === _0x97e67b.type ? _0x4ca77a &= ~_0x3340c0 : _0x4ca77a |= _0x3340c0, HEAP8[RI.contexts[_0x126f58].state + _0x2c1832 >> 0x0] = _0x4ca77a;
}
}
};
document.addEventListener('keydown', _0x3d9f57, !0x1), document.addEventListener('keyup', _0x3d9f57, !0x1);
}
}(), setTimeout(function() {
var _0xa88a13;
_0xa88a13 = _0x59aa33.info(), null === _0xdcec2a.setVolume && 'undefined' != typeof RA ? (RA.queueAudio = function() {
var _0x3d9f57, _0x2c1832 = _0xc6823.volume,
_0x2cbc4 = RA.bufIndex;
_0x3d9f57 = RA.bufIndex ? RA.buffers[RA.bufIndex - 0x1].endTime : RA.context.currentTime, RA.buffers[_0x2cbc4].endTime = _0x3d9f57 + RA.buffers[_0x2cbc4].duration;
var _0x4e7320 = RA.context.createBufferSource();
if (_0x4e7320.buffer = RA.buffers[_0x2cbc4], _0xc6823.muted && (_0x2c1832 = 0x0), 0x1 === _0x2c1832) _0x4e7320.connect(RA.context.destination);
else {
var _0x3da8fc = RA.context.createGain();
_0x4e7320.connect(_0x3da8fc), _0x3da8fc.connect(RA.context.destination), _0x59aa33.isEdge || 'chrome' === _0xa88a13.name && parseInt(_0xa88a13.version, 0xa) <= 0x3c || 'firefox' === _0xa88a13.name && parseInt(_0xa88a13.version, 0xa) <= 0x38 || 'opera' === _0xa88a13.name && parseInt(_0xa88a13.version, 0xa) <= 0x2c ? _0x3da8fc.gain.value = _0x2c1832 : 'firefox' === _0xa88a13.name ? _0x3da8fc.gain.setValueAtTime(_0x2c1832, RA.context.currentTime) : _0x3da8fc.gain.setValueAtTime(_0x2c1832, RA.context.currentTime, 0x0);
}
_0x4e7320.start(_0x3d9f57), RA.bufIndex += 0x1, RA.bufOffset = 0x0;
}, _0x59aa33.isIos && window.addEventListener('touchstart', function() {
if (RA.context) {
RA.context.resume();
var _0xa88a13 = RA.context.createBufferSource();
_0xa88a13.connect(RA.context.destination), _0xa88a13.start();
}
}, !0x1)) : _0xdcec2a.setVolume();
}, 0x1f4), setTimeout(function() {
_0xc6823.elements.container.focus();
}, 0x1e), void 0x0 === window.addRunDependency && (window.Module = void 0x0);
var _0x36b9cc = _0x4d7024.FS.readFile(_0xc6823.startName);
if (_0x36b9cc.length <= 0x2000000 && '.cue' != _0xc6823.startName.substr(-0x4)) {
var _0x4fa964;
if ('snes' === _0xc6823.system || ['snes2002', 'snes2005', 'snes2010'].includes(_0xc6823.system)) {
var _0x4b2f70 = _0x36b9cc.length % 0x400;
_0x4fa964 = new _0x863031(_0x4b2f70 ? _0x36b9cc.slice ? _0x36b9cc.slice(_0x4b2f70) : _0x36b9cc.subarray(_0x4b2f70) : _0x36b9cc);
} else _0x4fa964 = new _0x863031(_0x36b9cc);
var _0x21e040 = _0x5a641d.a.create(),
_0x2b8478 = new FileReader(),
_0x15068c = 0x0,
_0x1646ca = _0x4fa964._u8array.length,
_0x4c84da = function() {
if (_0x15068c < _0x1646ca) {
var _0xa88a13 = Math.min(_0x15068c + 0x200000, _0x1646ca);
_0x2b8478.readAsArrayBuffer(new Blob([_0x4fa964._u8array.slice(_0x15068c, _0xa88a13)])), _0x15068c = _0xa88a13;
} else console.log(_0x21e040.hex()), _0xc6823.hash = _0x21e040.hex(), 'function' == typeof window.ejsromshash && window.ejsromshash(_0xc6823.hash);
};
_0x2b8478.onload = function(_0x26a751) {
try {
_0x21e040.update(_0x26a751.target.result), _0x4c84da();
} catch (_0x5c9e53) {
console.log(_0x5c9e53);
}
}, _0x4c84da();
}
})
if (typeof _0xc6823.adUrl === 'string') {
_0x1093f4.call(_0xc6823, _0x4d7024.loading.querySelector('.' .concat(_0x4fce24['close-ad'])), 'click', function(_0x96b13f) {
_0x96b13f instanceof MouseEvent && _0xc6823.started && _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)) && _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad)).parentNode.removeChild(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24.ad))), _0xc6823.elements.container.focus();
});
}
var _0x16cf3e = function() {
_0x4e171c.db && _0x4e171c.getAll(function(_0x1a4eaf) {
Array.isArray(_0x1a4eaf) && _0x1a4eaf.length > 0xa && _0x1a4eaf.sort(function(_0x155e15, _0x4a9e7f) {
return (_0x4a9e7f.lastaccess ? _0x4a9e7f.lastaccess : 0x0) - (_0x155e15.lastaccess ? _0x155e15.lastaccess : 0x0);
}).forEach(function(_0x43d1af, _0x2db4e2) {
_0x2db4e2 > 0xa && !_0x4d7024.saveFilenames.includes(_0x43d1af.key) ? (_0x4e171c.remove(_0x43d1af.key), _0x1ffb98.db && _0x1ffb98.remove(_0x43d1af.key)) : (delete _0x43d1af.data, _0x4e171c.put(_0x43d1af.key, _0x43d1af));
});
}, function() {
_0x4e171c.clear(), _0x1ffb98.db && _0x1ffb98.clear();
});
},
_0x5f3757 = function(_0x2bd107) {
if (_0x4d7024.gamePatch) {
var _0x17edbf = _0x2bd107.startName.split('/'),
_0x2c1832 = _0x17edbf.pop().split('.').pop(),
_0x11dc72 = (_0x4d7024.gamePatch.split('.').pop(), _0x4d7024.gamePatch.split('/').pop().split('.').shift()),
_0x453ec0 = '' .concat(_0x17edbf.join('/'), '/').concat(_0x11dc72, '.').concat(_0x2c1832);
_0x453ec0 = _0x453ec0.replace(/\#/g, '');
var _0x5438db = _0x4d7024.gamePatch;
if (_0x4d7024.FS.rename(_0x2bd107.startName, _0x453ec0), _0x2bd107.startName = _0x453ec0, 'snes' === _0x2bd107.system || ['snes2002', 'snes2005', 'snes2010'].includes(_0x2bd107.system)) {
var _0x4758bc, _0x4b4000, _0x4aa31e, _0x2d9175 = new _0x863031(_0x4d7024.FS.readFile(_0x2bd107.startName)),
_0x567355 = _0x2d78e9(_0x2d9175),
_0x3d2ad0 = new _0x863031(_0x4d7024.FS.readFile(_0x5438db)),
_0x4a229b = _0x3d2ad0.readString(0x6),
_0x20786d = _0x2d9175._u8array.length % 0x400;
_0x4a229b.startsWith('PATCH') ? (_0x4758bc = _0x288092(_0x3d2ad0), 0x0 === _0x20786d && 'B19ED489' === _0x567355.toString(0x10).toUpperCase() && ((_0x4b4000 = new Uint8Array(_0x2d9175._u8array.length + 0x200)).set(new Uint8Array(0x200), 0x0), _0x4b4000.set(_0x2d9175._u8array, 0x200), _0x2d9175 = new _0x863031(_0x4b4000))) : _0x4a229b.startsWith(_0x7a6485) ? _0x4758bc = _0xd2202f(_0x3d2ad0) : _0x4a229b.startsWith('BPS1') && (_0x4758bc = _0x356089(_0x3d2ad0), _0x20786d > 0x0 && 'A31BEAD4' === _0x567355.toString(0x10).toUpperCase() && (_0x4b4000 = _0x2d9175._u8array.slice ? _0x2d9175._u8array.slice(_0x20786d) : _0x2d9175._u8array.subarray(_0x20786d), _0x2d9175 = new _0x863031(_0x4b4000)));
try {
if (_0x4aa31e = _0x4758bc.apply(_0x2d9175, !0x1)) {
var _0x1ffde3 = _0x4aa31e._u8array.length % 0x400;
_0x1ffde3 > 0x0 && (_0x4b4000 = _0x4aa31e._u8array.slice ? _0x4aa31e._u8array.slice(_0x1ffde3) : _0x4aa31e._u8array.subarray(_0x1ffde3), _0x4aa31e = new _0x863031(_0x4b4000)), _0x4d7024.FS.unlink(_0x5438db), _0x4d7024.FS.unlink(_0x2bd107.startName), _0x4d7024.FS.writeFile(_0x2bd107.startName, _0x4aa31e._u8array);
}
} catch (_0x2ef20f) {}
} else {
var _0x4108d8, _0x5e086b, _0x454a20 = new _0x863031(_0x4d7024.FS.readFile(_0x2bd107.startName)),
_0x4991ea = new _0x863031(_0x4d7024.FS.readFile(_0x5438db)),
_0x130f1f = _0x4991ea.readString(0x6);
_0x130f1f.startsWith('PATCH') ? _0x4108d8 = _0x288092(_0x4991ea) : _0x130f1f.startsWith(_0x7a6485) ? _0x4108d8 = _0xd2202f(_0x4991ea) : _0x130f1f.startsWith('BPS1') && (_0x4108d8 = _0x356089(_0x4991ea));
try {
(_0x5e086b = _0x4108d8.apply(_0x454a20, !0x1)) && (_0x4d7024.FS.unlink(_0x5438db), _0x4d7024.FS.unlink(_0x2bd107.startName), _0x4d7024.FS.writeFile(_0x2bd107.startName, _0x5e086b._u8array));
} catch (_0x3ed519) {}
}
}
},
_0x4bd781 = function() {
_0x31a5b3 ? (window.Module.TOTAL_MEMORY = 0x10000000, ['jaguar'].includes(_0xa88a13.system) && (window.Module.TOTAL_MEMORY = 0x10000000), ['nds', 'nes', 'snes', 'arcade', 'mame'].includes(_0xa88a13.system) && (window.Module.TOTAL_MEMORY = 0x0), ['snes2002', 'snes2005', 'snes2010'].includes(_0xa88a13.system) && (window.Module.TOTAL_MEMORY = 0x0), 'gba' == _0xa88a13.system && (window.Module.TOTAL_MEMORY = 0x0), '3do' == _0xa88a13.system && (window.Module.TOTAL_MEMORY = 0x0), 'n64' == _0xa88a13.system && (window.Module.TOTAL_MEMORY = 0x0)) : ('nes' === _0xa88a13.system && (window.Module.TOTAL_MEMORY = 0x8000000), window.Module.TOTAL_MEMORY = _0x468801 + 0x4000000, ['arcade', 'mame', 'mame2003', 'fba0.2.97.29'].includes(_0xa88a13.system) && (window.Module.TOTAL_MEMORY = _0x468801 + 0x10000000, _0x4d7024.isMobileDevice && (window.Module.TOTAL_MEMORY = 0x12c00000)), ['n64', 'nds', 'jaguar'].includes(_0xa88a13.system) && (window.Module.TOTAL_MEMORY = _0x468801 + 0x10000000, _0x4d7024.isMobileDevice && (window.Module.TOTAL_MEMORY = 0x12c00000)), 'gba' == _0xa88a13.system && (window.Module.TOTAL_MEMORY = 0x10000000)), 'undefined' != typeof EJS_MEMORY && (window.Module.TOTAL_MEMORY = EJS_MEMORY);
},
_0x3512e9 = function(_0x1bc287, _0x1844e3) {
var _0x55627a = _0xa88a13,
_0x1cfda7 = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p3)),
_0x429d6b = (_0x1bc287.split('.').pop(), _0x29078e[_0x1143c5]),
_0xb0c5d8 = -0x1,
_0x567713 = '',
_0x567717 = function() {
if (typeof _0x55627a.loadStateOnStart == 'string') {
fetch(_0x55627a.loadStateOnStart).then(function(response) {
response.arrayBuffer().then(function(ab) {
var a = new Uint8Array(ab)
EJS_loadState(a)
})
})
}
},
_0x99321 = function(_0x52e6f3) {
if (_0x52e6f3.data) {
if (0x1 === _0x52e6f3.data.t) {
_0x1cfda7.innerHTML = 'Game Data ready', _0x52e6f3.target.terminate();
try {
_0x4bd781(), _0x5f3757(_0x55627a), _0x16cf3e();
var _0x17edbf = 'callMain';
!_0x55627a.startName && _0x567713 && (_0x55627a.startName = _0x567713);
var _0x2c1832 = [_0x55627a.startName,
[_0x4d7024.hash, _0x4d7024.hash2, _0x4d7024.hash3].join('')
];
'undefined' != typeof EJS_DEBUG_XX && true === EJS_DEBUG_XX && _0x2c1832.unshift('-v'), _0x4d7024.Module[_0x17edbf](_0x2c1832), _0xbae705.call(_0x55627a, _0x55627a.elements.container, 'start-game');
} catch (_0x4a1471) {
console.log(_0x4a1471), _0x1cfda7.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Failed to start game</strong>';
}
try{_0x567717();}catch(e){}
}
if (0x2 === _0x52e6f3.data.t) {
var _0x25aae3 = _0x52e6f3.data.file.split('/'),
_0x1bc287 = _0x25aae3.pop(),
_0x1844e3 = _0x1bc287.split('.').pop();
_0x1bc287 = _0x1bc287.replace(/\#/g, ''), _0x4d7024.FS.createPath('/', _0x25aae3.join('/'), true, true);
for (var _0x36bace = _0x1e0766[_0x429d6b].length - 0x1; _0x36bace >= 0x0; _0x36bace -= 0x1) _0x1e0766[_0x429d6b][_0x36bace] === _0x1844e3.toLowerCase() && _0x36bace > _0xb0c5d8 && (_0xb0c5d8 = _0x36bace, _0x55627a.startName = ['', _0x25aae3.join('/'), _0x1bc287].join('/'), _0x55627a.startName = _0x55627a.startName.replace(/\/+/, '/'));
_0x1bc287 && (_0x4d7024.FS.createDataFile('/' .concat(_0x25aae3.join('/')), _0x1bc287, _0x52e6f3.data.data, true, !0x1), _0x468801 += _0x52e6f3.data.data.length), _0x567713 = '/' .concat(_0x25aae3.join('/'), '/').concat(_0x1bc287);
}
if (0x4 === _0x52e6f3.data.t && _0x52e6f3.data.total > 0x0) {
var _0x163fc4 = Math.floor(_0x52e6f3.data.current / _0x52e6f3.data.total * 0x64),
_0x5448a3 = _0x52e6f3.data.name || '';
_0x163fc4 <= 0x64 && _0x163fc4 > 0x0 && (_0x1cfda7.innerHTML = 'Decompress Game Data ' .concat(_0x5448a3, ' ').concat(_0x163fc4, '%'));
}
}
},
_0x1e67c4 = !0x1;
if (!['arcade', 'mame', 'mame2003', 'fba0.2.97.29'].includes(_0x55627a.system)) {
var _0x20a78c = _0x1844e3.slice ? _0x1844e3.slice(0x0, 0xa) : _0x1844e3.subarray(0x0, 0xa),
_0x1761cf = _0x394c59(_0x20a78c);
'7z' === _0x1761cf ? (_0xa24f60(_0x17edbf).then(function(_0xea623e) {
_0xea623e.onmessage = _0x99321, _0xea623e.postMessage(_0x1844e3);
}), _0x1e67c4 = true) : 'zip' === _0x1761cf ? (_0xa24f60(_0x2c1832).then(function(_0x5a81e4) {
_0x5a81e4.onmessage = _0x99321, _0x5a81e4.postMessage(_0x1844e3);
}), _0x1e67c4 = true) : 'rar' == _0x1761cf && (_0x41d0bd(_0x4ce206, 'rar').then(function(_0x5f204f) {
_0x5f204f.onmessage = _0x99321, _0x5f204f.postMessage(_0x1844e3);
}), _0x1e67c4 = true);
}
if (!_0x1e67c4) {
var _0x1c661d = _0x1bc287.replace(/\#/g, '');
try {
_0x468801 += _0x1844e3.length, _0x4d7024.FS.createDataFile('/', _0x1c661d, _0x1844e3, true, !0x1);
} catch (_0x210043) {
console.log(_0x210043);
}
_0x4bd781(), _0x55627a.startName = _0x1c661d, _0x5f3757(_0x55627a), _0x16cf3e();
try {
var _0x165f73 = 'callMain',
_0x4dd87f = [_0x55627a.startName,
[_0x4d7024.hash, _0x4d7024.hash2, _0x4d7024.hash3].join('')
];
'undefined' != typeof EJS_DEBUG_XX && true === EJS_DEBUG_XX && _0x4dd87f.unshift('-v'), _0x4d7024.Module[_0x165f73](_0x4dd87f), _0xbae705.call(_0x55627a, _0x55627a.elements.container, 'start-game');
} catch (_0x42970c) {
console.log(_0x42970c), _0x1cfda7.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Failed to start game</strong>';
}
try{_0x567717();}catch(e){}
}
},
_0x5048db = function() {
var _0x17edbf = _0xa88a13,
_0x2c1832 = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p3)),
_0x50eb85 = _0xa88a13.config,
_0x3cf3cf = (_0x50eb85.gameId, _0x50eb85.gameUrl),
_0x594488 = _0x50eb85.system;
'vbanext' == _0x594488 && (_0x594488 = 'gba');
var _0x154dc5 = _0x3cf3cf.split('/'),
_0x139f1c = _0x154dc5[_0x154dc5.length - 0x1];
_0x139f1c.indexOf('?') > -0x1 && (_0x139f1c = _0x139f1c.substr(0x0, _0x139f1c.indexOf('?')));
_0x139f1c.split('.').pop();
_0x2c1832.innerHTML = 'Download Game Data', ['arcade', 'mame', 'mame2003', 'fba0.2.97.29'].includes(_0x17edbf.system) && _0x17edbf.config.gameParentUrl && (_0x2c1832.innerHTML += ' (2/2)'), _0x17edbf.config.gamePatchUrl && (_0x2c1832.innerHTML += ' (2/2)');
new Promise(async function(resolve, reject) {
if (! _0x3cf3cf.startsWith('blob:')) {
resolve(await _0x550f17.a.head(_0x3cf3cf, {}))
} else {
var a = await fetch(_0x3cf3cf)
var a = await a.blob()
resolve({headers:{'content-length': a.size, 'content-type': 'text/plain'}})
}
}).then(function(_0x3d703e) {
var _0x21b526 = _0x3d703e.headers['content-length'],
_0x7e8d4c = _0x3d703e.headers['content-type'],
_0x107e8a = (_0x3d703e.headers['last-modified'], '' .concat(_0x594488, '-').concat(_0x139f1c)),
_0x43a689 = function() {
_0x550f17.a.get(_0x3cf3cf, {
'onDownloadProgress': function(_0x4e7133) {
var _0x13b999 = _0x4e7133.total ? '' .concat(Math.floor(_0x4e7133.loaded / _0x4e7133.total * 0x64), '%') : '';
['arcade', 'mame', 'mame2003', 'fba0.2.97.29'].includes(_0x17edbf.system) && _0x17edbf.config.gameParentUrl ? _0x2c1832.innerHTML = 'Download Game Data (2/2) ' .concat(_0x13b999) : _0x17edbf.config.gamePatchUrl ? _0x2c1832.innerHTML = 'Download Game Data (2/2) ' .concat(_0x13b999) : _0x2c1832.innerHTML = 'Download Game Data ' .concat(_0x13b999);
},
'responseType': 'arraybuffer'
}).then(function(_0x22a1f4) {
var _0x17edbf = new Uint8Array(_0x22a1f4.data);
if (_0xa88a13.config.gameUrl.startsWith('blob:') || _0xa88a13.config.gameUrl.startsWith('file:') || _0xa88a13.config.gameUrl.startsWith('chrome-extension:') || ((window.location.protocol == 'file:' || window.location.protocol == 'chrome-extension:') && _0xa88a13.config.gameUrl.startsWith('/') && _0xa88a13.config.gameUrl.split(':').length == 1)) {_0x3512e9(_0x139f1c, _0x17edbf);return;};
if (_0x107e8a && _0x4e171c.db)
if (_0x17edbf.length > 0x6400000)
for (var _0x2c1832 = Math.ceil(_0x17edbf.length / 0x6400000), _0x257c4d = 0x0; _0x257c4d < _0x2c1832; _0x257c4d += 0x1) {
var _0x47d2ae = _0x17edbf.slice ? _0x17edbf.slice(0x6400000 * _0x257c4d, 0x6400000 * (_0x257c4d + 0x1)) : _0x17edbf.subarray(0x6400000 * _0x257c4d, 0x6400000 * (_0x257c4d + 0x1)),
_0x6ce9f3 = '' .concat(_0x107e8a);
_0x257c4d > 0x0 && (_0x6ce9f3 = '' .concat(_0x107e8a, '-part-').concat(_0x257c4d)), _0x4d7024.saveFilenames.push(_0x6ce9f3), _0x4e171c.put(_0x6ce9f3, {
'filename': _0x139f1c,
'filesize': _0x17edbf.length,
'filetype': _0x7e8d4c,
'key': _0x6ce9f3,
'lastaccess': new Date().valueOf(),
'next': '' .concat(_0x107e8a, '-part-').concat(_0x257c4d + 0x1)
}), _0x1ffb98.db && _0x1ffb98.put(_0x6ce9f3, {
'filename': _0x139f1c,
'filesize': _0x17edbf.length,
'filetype': _0x7e8d4c,
'key': _0x6ce9f3,
'data': _0x47d2ae
});
} else _0x4d7024.saveFilenames.push(_0x107e8a), _0x4e171c.put(_0x107e8a, {
'filename': _0x139f1c,
'filesize': _0x17edbf.length,
'filetype': _0x7e8d4c,
'key': _0x107e8a,
'lastaccess': new Date().valueOf()
}), _0x1ffb98.db && _0x1ffb98.put(_0x107e8a, {
'filename': _0x139f1c,
'filesize': _0x17edbf.length,
'filetype': _0x7e8d4c,
'key': _0x107e8a,
'data': _0x17edbf
});
_0x3512e9(_0x139f1c, _0x17edbf);
}).catch(function(_0x2e4d62) {
console.log(_0x2e4d62), _0x2c1832.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
};
_0x1ffb98.db ? _0x1ffb98.get(_0x107e8a, function(_0x5afc4a) {
if (_0x5afc4a && _0x5afc4a.filename === _0x139f1c && _0x5afc4a.filesize - _0x21b526 == 0x0 && _0x5afc4a.filetype === _0x7e8d4c && _0x5afc4a.key)
if (_0x4e171c.get(_0x107e8a, function(_0x2585bd) {
_0x2585bd.lastaccess = new Date().valueOf(), _0x4e171c.put(_0x2585bd.key, _0x2585bd);
}), _0x5afc4a.filesize > 0x6400000) {
var _0x17edbf = new Uint8Array(_0x5afc4a.filesize);
_0x17edbf.set(_0x5afc4a.data);
for (var _0x2c1832 = Math.ceil(_0x5afc4a.filesize / 0x6400000), _0x36180f = [], _0x3c1e3a = 0x1; _0x3c1e3a < _0x2c1832; _0x3c1e3a += 0x1) _0x36180f.push('' .concat(_0x107e8a, '-part-').concat(_0x3c1e3a)), _0x4e171c.get('' .concat(_0x107e8a, '-part-').concat(_0x3c1e3a), function(_0x4f1393) {
_0x4f1393.lastaccess = new Date().valueOf(), _0x4e171c.put(_0x4f1393.key, _0x4f1393);
});
_0x1ffb98.getBatch(_0x36180f, function(_0x47ffae) {
_0x47ffae.filter(function(_0x3d76ae) {
return _0x3d76ae;
}).length !== _0x36180f.length ? _0x43a689() : (_0x47ffae.forEach(function(_0x15960c, _0x92d423) {
_0x17edbf.set(_0x15960c.data, 0x6400000 * (_0x92d423 + 0x1));
}), _0x3512e9(_0x5afc4a.filename, _0x17edbf));
}, function() {}, 'sparse');
} else _0x3512e9(_0x5afc4a.filename, _0x5afc4a.data);
else _0x43a689();
}) : _0x43a689();
}).catch(function(_0x5ec6be) {
_0x2c1832.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
},
_0x139f68 = function() {
var _0x2591d9 = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p3)),
_0x5c896f = _0xa88a13.config.gamePatchUrl;
if (_0x5c896f) {
var _0x227b23 = _0x5c896f.split('/'),
_0x179b43 = _0x227b23[_0x227b23.length - 0x1];
_0x179b43.indexOf('?') > -0x1 && (_0x179b43 = _0x179b43.substr(0x0, _0x179b43.indexOf('?')));
_0x179b43.split('.').pop();
_0x2591d9.innerHTML = 'Download Game Data (1/2)', _0x550f17.a.get(_0x5c896f, {
'onDownloadProgress': function(_0x3da9ab) {
_0x2591d9.innerHTML = 'Download Game Data (1/2) ';
var _0x17edbf = _0x3da9ab.total ? '' .concat(Math.floor(_0x3da9ab.loaded / _0x3da9ab.total * 0x64), '%') : '';
_0x2591d9.innerHTML += _0x17edbf;
},
'responseType': 'arraybuffer'
}).then(function(_0x159ae9) {
var _0x39a81f = new Uint8Array(_0x159ae9.data);
! function(_0xe531f0, _0x1097ca) {
var _0xf6806b = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p3)),
_0x59c7c1 = (_0xe531f0.split('.').pop(), function(_0x36004e) {
if (_0x36004e.data) {
if (0x1 === _0x36004e.data.t && (_0x36004e.target.terminate(), _0x5048db()), 0x2 === _0x36004e.data.t) {
var _0x17edbf = _0x36004e.data.file.split('/'),
_0x2c1832 = _0x17edbf.pop(),
_0x2b320e = _0x2c1832.split('.').pop();
_0x4d7024.FS.createPath('/', _0x17edbf.join('/'), true, true), _0x4d7024.FS.createDataFile('/' + _0x17edbf.join('/'), _0x2c1832, _0x36004e.data.data, true, !0x1), _0x468801 += _0x36004e.data.data.length, ['ips', 'bps', 'ups'].includes(_0x2b320e) && !_0x4d7024.gamePatch && (_0x4d7024.gamePatch = '/' + _0x17edbf.join('/') + '/' + _0x2c1832);
}
if (0x4 === _0x36004e.data.t && _0x36004e.data.total > 0x0) {
var _0x1097ca = Math.floor(_0x36004e.data.current / _0x36004e.data.total * 0x64),
_0x95ac83 = _0x36004e.data.name || '';
_0x1097ca <= 0x64 && _0x1097ca > 0x0 && (_0xf6806b.innerHTML = 'Decompress Game Patch ' .concat(_0x95ac83, ' ').concat(_0x1097ca, '%'));
}
}
}),
_0x4c332a = !0x1,
_0x3de3e4 = _0x1097ca.slice ? _0x1097ca.slice(0x0, 0xa) : _0x1097ca.subarray(0x0, 0xa),
_0x17f736 = _0x394c59(_0x3de3e4);
'7z' === _0x17f736 ? (_0xa24f60(_0x17edbf).then(function(_0x20601d) {
_0x20601d.onmessage = _0x59c7c1, _0x20601d.postMessage(_0x1097ca);
}), _0x4c332a = true) : 'zip' === _0x17f736 ? (_0xa24f60(_0x2c1832).then(function(_0x46622d) {
_0x46622d.onmessage = _0x59c7c1, _0x46622d.postMessage(_0x1097ca);
}), _0x4c332a = true) : 'rar' == _0x17f736 && (_0x41d0bd(_0x4ce206, 'rar').then(function(_0x2ecd24) {
_0x2ecd24.onmessage = messageCallBack, _0x2ecd24.postMessage(_0x1097ca);
}), _0x4c332a = true), _0x4c332a || (_0x468801 += _0x1097ca.length, _0x4d7024.gamePatch = '/' .concat(_0xe531f0), _0x4d7024.FS.createDataFile('/', _0xe531f0, _0x1097ca, true, !0x1), _0x5048db());
}(_0x179b43, _0x39a81f);
}).catch(function(_0x314293) {
console.log(_0x314293), _0x2591d9.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
} else _0x5048db();
},
_0x452592 = function() {
var _0x17edbf = _0xa88a13,
_0x2c1832 = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p3)),
_0x5a21f7 = _0xa88a13.config.gameParentUrl;
if (_0x5a21f7 && ['arcade', 'mame', 'mame2003', 'fba0.2.97.29'].includes(_0x17edbf.system)) {
var _0x71bb4d = _0x5a21f7.split('/'),
_0xa1889f = _0x71bb4d[_0x71bb4d.length - 0x1];
_0xa1889f.indexOf('?') > -0x1 && (_0xa1889f = _0xa1889f.substr(0x0, _0xa1889f.indexOf('?')));
_0xa1889f.split('.').pop();
_0x2c1832.innerHTML = 'Download Game Data (1/2)', _0x17edbf.config.gamePatchUrl && (_0x2c1832.innerHTML = 'Download Game Data (1/3)'), _0x550f17.a.head(_0x5a21f7, {}).then(function(_0x35c23a) {
var _0x17edbf = _0x35c23a.headers['content-length'],
_0x500ba1 = _0x35c23a.headers['content-type'],
_0x316472 = (_0x35c23a.headers['last-modified'], '' .concat(_0x1143c5, '-').concat(_0xa1889f)),
_0x31f098 = function() {
_0x550f17.a.get(_0x5a21f7, {
'onDownloadProgress': function(_0x5e449f) {
var _0x17edbf = _0x5e449f.total ? '' .concat(Math.floor(_0x5e449f.loaded / _0x5e449f.total * 0x64), '%') : '';
_0x2c1832.innerHTML = 'Download Game Data (1/2) ' .concat(_0x17edbf);
},
'responseType': 'arraybuffer'
}).then(function(_0x191007) {
var _0x17edbf = new Uint8Array(_0x191007.data);
if (_0x316472 && _0x4e171c.db)
if (_0x17edbf.length > 0x6400000)
for (var _0x2c1832 = Math.ceil(_0x17edbf.length / 0x6400000), _0x3f44d7 = 0x0; _0x3f44d7 < _0x2c1832; _0x3f44d7 += 0x1) {
var _0x15a529 = _0x17edbf.slice ? _0x17edbf.slice(0x6400000 * _0x3f44d7, 0x6400000 * (_0x3f44d7 + 0x1)) : _0x17edbf.subarray(0x6400000 * _0x3f44d7, 0x6400000 * (_0x3f44d7 + 0x1)),
_0x4a61f1 = '' .concat(_0x316472);
_0x3f44d7 > 0x0 && (_0x4a61f1 = '' .concat(_0x316472, '-part-').concat(_0x3f44d7)), _0x4d7024.saveFilenames.push(_0x4a61f1), _0x4e171c.put(_0x4a61f1, {
'filename': _0xa1889f,
'filesize': _0x17edbf.length,
'filetype': _0x500ba1,
'key': _0x4a61f1,
'lastaccess': new Date().valueOf(),
'next': '' .concat(_0x316472, '-part-').concat(_0x3f44d7 + 0x1)
}), _0x1ffb98.db && _0x1ffb98.put(_0x4a61f1, {
'filename': _0xa1889f,
'filesize': _0x17edbf.length,
'filetype': _0x500ba1,
'key': _0x4a61f1,
'data': _0x15a529
});
} else _0x4d7024.saveFilenames.push(_0x316472), _0x4e171c.put(_0x316472, {
'filename': _0xa1889f,
'filesize': _0x17edbf.length,
'filetype': _0x500ba1,
'key': _0x316472,
'lastaccess': new Date().valueOf()
}), _0x1ffb98.db && _0x1ffb98.put(_0x316472, {
'filename': _0xa1889f,
'filesize': _0x17edbf.length,
'filetype': _0x500ba1,
'key': _0x316472,
'data': _0x17edbf
});
var _0x54d79 = _0xa1889f.replace(/\#/g, '');
try {
_0x468801 += _0x17edbf.length, _0x4d7024.FS.createDataFile('/', _0x54d79, _0x17edbf, true, !0x1), _0x139f68();
} catch (_0x125736) {
console.log(_0x125736);
}
}).catch(function(_0x516cc3) {
console.log(_0x516cc3), _0x2c1832.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
};
_0x1ffb98.db ? _0x1ffb98.get(_0x316472, function(_0x3e612b) {
if (_0x3e612b && _0x3e612b.filename === _0xa1889f && _0x3e612b.filesize - _0x17edbf == 0x0 && _0x3e612b.filetype === _0x500ba1 && _0x3e612b.key)
if (_0x3e612b.filesize > 0x6400000) {
var _0x2c1832 = new Uint8Array(_0x3e612b.filesize);
_0x2c1832.set(_0x3e612b.data);
for (var _0xa2197 = Math.ceil(_0x3e612b.filesize / 0x6400000), _0x6f30ae = [], _0x32aa3c = 0x1; _0x32aa3c < _0xa2197; _0x32aa3c += 0x1) _0x6f30ae.push('' .concat(_0x316472, '-part-').concat(_0x32aa3c));
_0x1ffb98.getBatch(_0x6f30ae, function(_0x198a02) {
_0x198a02.filter(function(_0x1c8a88) {
return _0x1c8a88;
}).length !== _0x6f30ae ? _0x31f098() : (_0x198a02.forEach(function(_0x22b824, _0x16eecf) {
_0x2c1832.set(_0x22b824.data, 0x6400000 * (_0x16eecf + 0x1));
}), _0x4d7024.FS.createDataFile('/', _0x3e612b.filename, _0x2c1832, true, !0x1), _0x139f68());
}, function() {}, 'sparse');
} else _0x4d7024.FS.createDataFile('/', _0x3e612b.filename, _0x3e612b.data, true, !0x1), _0x139f68();
else _0x31f098();
}) : _0x31f098();
}).catch(function(_0x20f2e3) {
_0x2c1832.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
} else _0x139f68();
},
_0x57d85d = function(_0x5e7c8b, _0x3cff36) {
var _0x3ea97 = _0xa88a13,
_0x50119 = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p2)),
_0x19a43e = (_0x5e7c8b.split('.').pop(), function(_0x1e8643) {
if (_0x1e8643.data) {
if (0x1 === _0x1e8643.data.t && (_0x50119.innerHTML = 'BIOS ready', _0x1e8643.target.terminate(), _0x452592()), 0x2 === _0x1e8643.data.t) {
var _0x17edbf = _0x1e8643.data.file.split('/'),
_0x2c1832 = _0x17edbf.pop();
_0x2c1832.split('.').pop();
_0x4d7024.FS.createPath('/', _0x17edbf.join('/'), true, true), _0x4d7024.FS.createDataFile('/' + _0x17edbf.join('/'), _0x2c1832, _0x1e8643.data.data, true, !0x1), _0x468801 += _0x1e8643.data.data.length;
}
if (0x4 === _0x1e8643.data.t && _0x1e8643.data.total > 0x0) {
var _0x149f09 = Math.floor(_0x1e8643.data.current / _0x1e8643.data.total * 0x64),
_0x5e7c8b = _0x1e8643.data.name || '';
_0x149f09 <= 0x64 && _0x149f09 > 0x0 && (_0x50119.innerHTML = 'Decompress BIOS ' .concat(_0x5e7c8b, ' ').concat(_0x149f09, '%'));
}
}
}),
_0x3a6560 = !0x1,
_0x12e229 = _0x3cff36.slice ? _0x3cff36.slice(0x0, 0xa) : _0x3cff36.subarray(0x0, 0xa),
_0x80852b = _0x394c59(_0x12e229);
'7z' === _0x80852b ? (_0xa24f60(_0x17edbf).then(function(_0x1b6a80) {
_0x1b6a80.onmessage = _0x19a43e, _0x1b6a80.postMessage(_0x3cff36);
}), _0x3a6560 = true) : 'zip' !== _0x80852b || ['arcade', 'mame', 'mame2003', 'fba0.2.97.29'].includes(_0x3ea97.system) ? 'rar' === _0x80852b && (_0x41d0bd(_0x4ce206, 'rar').then(function(_0x259be0) {
_0x259be0.onmessage = messageCallBack, _0x259be0.postMessage(_0x3cff36);
}), _0x3a6560 = true) : (_0xa24f60(_0x2c1832).then(function(_0x31c70d) {
_0x31c70d.onmessage = _0x19a43e, _0x31c70d.postMessage(_0x3cff36);
}), _0x3a6560 = true), _0x3a6560 || (_0x468801 += _0x3cff36.length, _0x50119.innerHTML = 'BIOS ready', _0x4d7024.FS.createDataFile('/', _0x5e7c8b, _0x3cff36, true, !0x1), _0x452592());
},
_0x10b71a = function() {
_0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p1)).innerHTML = 'Game Core ready';
var _0x17edbf = _0xa88a13.config.biosUrl,
_0x2c1832 = _0xa88a13;
_0xdcec2a.bindFunction.call(_0xa88a13), _0xdcec2a.setGamepadDialog.call(_0xa88a13), _0xdcec2a.setCacheDialog.call(_0xa88a13), _0xdcec2a.setLoadingDialog.call(_0xa88a13), _0xdcec2a.setNetplayDialog.call(_0xa88a13), _0xdcec2a.setCheatDialog.call(_0xa88a13), _0xdcec2a.initGamepad.call(_0xa88a13), _0xdcec2a.initKeyboard.call(_0xa88a13), _0xdcec2a.listeners.call(_0xa88a13), _0x4d7024.FS.createFolder('/', 'etc', true, true), _0x4d7024.FS.mkdir('/data'), _0x4d7024.FS.mkdir('/data/saves'), 'undefined' != typeof IDBFS ? _0x4d7024.FS.mount(IDBFS, {}, '/data/saves') : _0x4d7024.FS.mount(_0x4d7024.FS.filesystems.IDBFS, {}, '/data/saves'), _0x4d7024.FS.mkdir('/shader'), _0x4d7024.FS.syncfs(true, function(_0x1a8f12) {}), _0xdcec2a.initShaders();
var _0x14d39f = '';
_0x14d39f += 'savefile_directory = /data/saves\n', _0x14d39f += 'video_vsync = true\n', _0x14d39f += 'screenshot_directory = /\n', _0x14d39f += 'video_shader = /shader/shader.glslp\n', _0x14d39f += 'video_shader_enable = true\n', _0x14d39f += 'video_font_enable = false\n', _0x14d39f += 'video_scale = 1.0\n', _0x14d39f += 'video_gpu_screenshot = false\n', _0x14d39f += 'audio_latency = 96\n';
var _0x36edb7 = _0x2593da.defaultCoreOptionsValues.call(_0xa88a13);
_0x2c1832.lightgun && ('nes' === _0xa88a13.system && (_0x14d39f += 'input_libretro_device_p2 = "258"\n'), 'segaMS' !== _0xa88a13.system && 'segaMD' !== _0xa88a13.system && 'segaCD' !== _0xa88a13.system || (_0x36edb7.genesis_plus_gx_gun_cursor = 'yes', _0x36edb7.genesis_plus_gx_invert_mouse = 'yes', _0x36edb7.genesis_plus_gx_bram = 'per game', _0x14d39f += 'input_libretro_device_p2 = "260"\n'), 'snes' === _0xa88a13.system && (_0x14d39f += 'input_libretro_device_p2 = "260"\n'), ['snes2002', 'snes2005', 'snes2010', 'snesnext'].includes(_0xa88a13.system) && (_0x14d39f += 'input_libretro_device_p2 = "260"\n')), _0x2c1832.mouse && ('snes' === _0xa88a13.system && (_0x14d39f += 'input_libretro_device_p1 = 2\n'), ['snes2002', 'snes2005', 'snes2010', 'snesnext'].includes(_0xa88a13.system) && (_0x14d39f += 'input_libretro_device_p1 = 2\n')), _0x2c1832.multitap && ('snes' === _0xa88a13.system && (_0x14d39f += 'input_libretro_device_p2 = 257\n'), ['snes2002', 'snes2005', 'snes2010', 'snesnext'].includes(_0xa88a13.system) && (_0x14d39f += 'input_libretro_device_p2 = 257\n')), 'n64' === _0xa88a13.system && (_0x14d39f += 'input_libretro_device_p1 = 5\n'), _0x2c1832.system, _0x14d39f += 'fastforward_ratio = 1.0\n', _0x14d39f += 'video_smooth = false\n';
var _0x55117c = _0x5dc0c0(_0x36edb7, _0x2593da.storage.get('core-options') || {}),
_0x32dd27 = '';
if (_0x55117c && Object.keys(_0x55117c).forEach(function(_0x2950d3) {
if (_0x2c1832.lightgun && 'nes' === _0x2c1832.system && 'system_type' === _0x2950d3) return !0x1;
_0x32dd27 += '' .concat(_0x2950d3, ' = "').concat(_0x55117c[_0x2950d3], '"\n'), _0x1093f4.call(_0x2c1832, _0x2c1832.elements.container, 'start-game', function(_0x362c5c) {
_0xdcec2a.setVariable(_0x2950d3, _0x55117c[_0x2950d3]);
});
}), _0x4d7024.FS.createDataFile('/etc', 'retroarch.cfg', _0x14d39f, true, true), _0x4d7024.FS.createFolder('/home/web_user', '.config', true, true), _0x4d7024.FS.createFolder('/home/web_user/.config', 'retroarch', true, true), _0x4d7024.FS.createDataFile('/home/web_user/.config/retroarch', 'retroarch-core-options.cfg', _0x32dd27, true, true), _0x17edbf) {
var _0x12e137 = _0x17edbf.split('/'),
_0x103241 = _0x12e137[_0x12e137.length - 0x1];
_0x103241.indexOf('?') > -0x1 && (_0x103241 = _0x103241.substr(0x0, _0x103241.indexOf('?')));
_0x103241.split('.').pop();
var _0x1f8424 = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p2));
_0x550f17.a.head(_0x17edbf, {}).then(function(_0x4bd3bd) {
var _0x8e30bf = _0x4bd3bd.headers['content-length'],
_0x14309b = _0x4bd3bd.headers['content-type'],
_0x51bfef = (_0x4bd3bd.headers['last-modified'], _0x2c1832.system),
_0x574747 = function() {
_0x550f17.a.get(_0x17edbf, {
'onDownloadProgress': function(_0x107517) {
var _0x17edbf = _0x107517.total ? '' .concat(Math.floor(_0x107517.loaded / _0x107517.total * 0x64), '%') : '';
_0x1f8424.innerHTML = 'Download BIOS ' .concat(_0x17edbf);
},
'responseType': 'arraybuffer'
}).then(function(_0x4b0c1d) {
var _0x17edbf = new Uint8Array(_0x4b0c1d.data);
_0xa884ea.db && _0xa884ea.put(_0x1143c5, {
'filename': _0x103241,
'filesize': _0x17edbf.length,
'filetype': _0x14309b,
'data': _0x17edbf
}), _0x57d85d(_0x103241, _0x17edbf);
}).catch(function(_0x58e374) {
_0x1f8424.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
};
_0xa884ea.db ? _0xa884ea.get(_0x51bfef, function(_0x5cac21) {
_0x5cac21 && _0x5cac21.filename === _0x103241 && _0x5cac21.filesize - _0x8e30bf == 0x0 && _0x5cac21.filetype === _0x14309b ? _0x57d85d(_0x5cac21.filename, _0x5cac21.data) : _0x574747();
}) : _0x574747();
}).catch(function(_0x4f39c1) {
_0x1f8424.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
} else _0x452592();
},
_0x29b999 = function(_0x172278) {
var _0x17edbf;
_0x17edbf = _0x172278.byteLength ? new Uint8Array(_0x172278) : _0x172278, 'undefined' != typeof GLOBAL_BASE ? _0x4d7024.Module.HEAPU8.set(_0x17edbf, GLOBAL_BASE) : 'undefined' != typeof Runtime && Runtime.GLOBAL_BASE ? _0x4d7024.Module.HEAPU8.set(_0x17edbf, Runtime.GLOBAL_BASE) : _0x4d7024.Module.HEAPU8.set(_0x17edbf, 0x8);
},
_0x4f0fcc = function(_0x18cb26) {
var _0x2c1832 = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p1)),
_0x247459 = '',
_0x54d695 = function() {
null !== _0x4d7024.wasmData ? (window.Module.wasmBinary = _0x4d7024.wasmData, _0x4bd781(), window.Module.monitorRunDependencies = function(_0x59580a) {
0x0 === _0x59580a && _0x10b71a();
}, window.Module.onRuntimeInitialized = function() {}) : (_0x4bd781(), window.Module.monitorRunDependencies = function(_0x55a24a) {
console.log('monitorRunDependencies, ', _0x55a24a);
}, window.Module.onRuntimeInitialized = function() {
_0x4d7024.Module || (_0x4d7024.Module = window.Module), _0x4d7024.Module && _0x4d7024.Module.FS ? _0x4d7024.FS = _0x4d7024.Module.FS : _0x4d7024.FS = window.FS, console.log('onRuntimeInitialized'), _0x10b71a();
});
var _0x18cb26 = window.Module;
if (null !== _0x4d7024.wasmData && (_0x18cb26.wasmBinary = _0x4d7024.wasmData), _0x59aa33.isIos && null !== _0x4d7024.wasmData) {
var _0x17edbf = new Blob([_0x247459], {
'type': 'text/javascript'
}),
_0x2c1832 = _0x428003('script', {
'src': _0x18a437.createObjectURL(_0x17edbf)
});
document.body.appendChild(_0x2c1832), _0x2c1832.onload = function() {
void 0x0 === window.addRunDependency && 'undefined' != typeof EmulatorJS && (_0x4d7024.Module = EmulatorJS(_0x18cb26), EmulatorJS = void 0x0), null !== _0x4d7024.memData && _0x29b999(_0x4d7024.memData), _0x4d7024.Module || (_0x4d7024.Module = window.Module), _0x4d7024.Module && _0x4d7024.Module.FS ? _0x4d7024.FS = _0x4d7024.Module.FS : _0x4d7024.FS = window.FS;
};
} else _0x247459 = function(_0x3df03e) {
for (var _0x17edbf = [], _0x2c1832 = 0x0; _0x2c1832 < _0x3df03e.length; _0x2c1832 += 0x1) {
var _0xcf0289 = _0x3df03e[_0x2c1832];
_0xcf0289 > 0xff && (_0xcf0289 &= 0xff), _0x17edbf.push(String.fromCharCode(_0xcf0289));
}
return _0x17edbf.join('');
}(_0x247459), window.eval(_0x247459), void 0x0 === window.addRunDependency && 'undefined' != typeof EmulatorJS && (_0x4d7024.Module = EmulatorJS(_0x18cb26), EmulatorJS = void 0x0), null !== _0x4d7024.memData && _0x29b999(_0x4d7024.memData), _0x4d7024.Module || (_0x4d7024.Module = window.Module), _0x4d7024.Module && _0x4d7024.Module.FS ? _0x4d7024.FS = _0x4d7024.Module.FS : _0x4d7024.FS = window.FS, window.EJS_MODULE = _0x4d7024.Module, window.ejs_loader = _0x4d7024, window.ejs_media = _0xdcec2a;
_0x247459 = null;
};
_0x1093f4.call(_0xc6823, _0xc6823.elements.container, 'script-loaded', function(_0x467eee) {
_0x54d695();
});
var _0x21d6a1 = function(_0x3f49ec) {
if (_0x3f49ec.data) {
if (0x1 === _0x3f49ec.data.t && (_0xbae705.call(_0xc6823, _0xc6823.elements.container, 'script-loaded'), _0x3f49ec.target.terminate()), 0x2 === _0x3f49ec.data.t) {
var _0x17edbf = _0x3f49ec.data.file.split('/').pop(),
_0x58b5a7 = _0x17edbf.split('.').pop();
_0x4d7024.coreFileData[_0x17edbf] = _0x3f49ec.data.data, 'js' !== _0x58b5a7 || _0x17edbf.includes('.worker.js') || (_0x247459 = _0x3f49ec.data.data), 'mem' === _0x58b5a7 && (_0x4d7024.memData = _0x3f49ec.data.data), 'wasm' === _0x58b5a7 && (_0x4d7024.wasmData = _0x3f49ec.data.data);
}
0x4 === _0x3f49ec.data.t && _0x3f49ec.data.total > 0x0 && (_0x2c1832.innerHTML = 'Decompress Game Core ' .concat(Math.floor(_0x3f49ec.data.current / _0x3f49ec.data.total * 0x64), '%'));
}
};
_0xa24f60(_0x17edbf).then(function(_0x48c633) {
_0x48c633.onmessage = _0x21d6a1, _0x48c633.postMessage(_0x18cb26);
});
},
_0x26d6a1 = function(_0x3787ba) {
return _0x132da7(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24['start-game'])), true),
function() {
var _0x3787ba = _0x4d7024.loading.querySelector('.' .concat(_0x4fce24.p1)),
_0x2c1832 = _0xa88a13;
_0x3787ba.innerHTML = 'Loading...';
var _0x1c9c57 = setInterval(function() {
if (true === _0x528f2b && true === _0x547484 && true === _0x5e24fa) {
window.Module = {
'TOTAL_MEMORY': 0x10000000,
'noInitialRun': true,
'arguments': [],
'preRun': [],
'postRun': [],
'canvas': _0xb2be2a,
'print': function(_0x4a8983) {
if (window.EJS_DEBUG_XX === true) {
console.log(_0x4a8983);
}
},
'printErr': function(_0x367bee) {
if (window.EJS_DEBUG_XX === true) {
console.log(_0x367bee);
}
},
'totalDependencies': 0x0,
'monitorRunDependencies': function(_0x5d4b07) {},
'locateFile': function(_0x3fe7ca) {
var _0x3787ba = null;
if (_0x3fe7ca.includes('.worker.js')) {
Object.keys(_0x4d7024.coreFileData).includes(_0x3fe7ca) && (_0x3787ba = _0x4d7024.coreFileData[_0x3fe7ca]);
var _0x2c1832 = new Blob([''], {
'type': 'application/javascript'
});
return window.URL.createObjectURL(_0x2c1832);
}
return _0x3fe7ca.includes('.js.mem') && (Object.keys(_0x4d7024.coreFileData).includes(_0x3fe7ca) && (_0x3787ba = _0x4d7024.coreFileData[_0x3fe7ca]), _0x3787ba) ? (_0x4d7024.memData = null, _0x3787ba.buffer) : _0x3fe7ca;
},
'readAsync': function(_0x20d016, _0x9d2de4, _0x1425ee) {
if (_0x20d016 instanceof ArrayBuffer) setTimeout(function() {
_0x9d2de4(_0x20d016);
}, 0x1f4);
else {
var _0x164012 = new XMLHttpRequest();
_0x164012.open('GET', _0x20d016, true), _0x164012.responseType = 'arraybuffer', _0x164012.onload = function() {
0xc8 == _0x164012.status || 0x0 == _0x164012.status && _0x164012.response ? _0x9d2de4(_0x164012.response) : _0x1425ee();
}, _0x164012.onerror = _0x1425ee, _0x164012.send(null);
}
}
};
var _0xa88a13, _0x1dedcd = Math.ceil(new Date().valueOf() / 0x3e8);
var _0xa88a13 = (_0xc6823.customPaths && typeof _0xc6823.customPaths['v.json'] == 'string') ? _0xc6823.customPaths['v.json'] : (_0xc6823.dataPath + 'v.json?t='+_0x1dedcd);
var _0x3641d6 = function(_0x16049b, _0x55a075) {
var path = (_0xc6823.customPaths && typeof _0xc6823.customPaths[_0x16049b] == 'string') ? _0xc6823.customPaths[_0x16049b] : (_0xc6823.dataPath+_0x16049b+'?v='+_0x55a075);
_0x550f17.a.get(path, {
'onDownloadProgress': function(_0x117e6b) {
var _0x55a075 = _0x117e6b.total ? '' .concat(Math.floor(_0x117e6b.loaded / _0x117e6b.total * 0x64), '%') : '';
_0x3787ba.innerHTML = 'Download Game Core ' .concat(_0x55a075);
},
'responseType': 'arraybuffer'
}).then(function(_0x5eb80b) {
var _0x519406 = new Uint8Array(_0x5eb80b.data);
_0x24de8d.db && _0x24de8d.put(_0x16049b, {
'version': _0x55a075,
'data': _0x519406
});
var _0xea4c63 = _0x519406.slice ? _0x519406.slice(0xc) : _0x519406.subarray(0xc);
_0xea4c63.set([0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c, 0x0, 0x3], 0x0), _0x519406 = null, _0x4f0fcc(_0xea4c63);
}).catch(function(_0x2e06c8) {
_0x3787ba.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
};
_0x550f17.a.get(_0xa88a13, {}).then(function(_0x578a2b) {
var _0x2458d5, _0x5d075f = _0x29078e[_0x2c1832.system],
_0x28ee7f = _0x578a2b.data[_0x2c1832.system],
_0x124167 = _0x28ee7f.version ? _0x28ee7f.version : 0x1,
_0x254bc2 = Boolean(_0x28ee7f.netplay),
_0x5b1dcd = Boolean(_0x28ee7f.state),
_0x9a1f1 = Boolean(_0x28ee7f.asmjs),
_0x3e4345 = Boolean(_0x28ee7f.wasm);
if ('nds' == _0x2c1832.system && ! _0x2d904a.wasm) {
_0x5b1dcd = !0x1;
};
//alert(_0x5b1dcd) // is save/load state supported
if (_0xc6823.statesSupported === false) {
_0x5b1dcd = false;
};
_0xc6823.setStatesSupported(_0x5b1dcd);
_0x4d7024.coreFileVersion = _0x124167;
_0x4d7024.coreFileName = _0x5d075f;
var _0x2eb141 = !0x1;
['webgl2', 'experimental-webgl2'].includes(_0x2d904a.webgl.WEBGL_CONTEXT) && (_0x2eb141 = true);
'undefined' != typeof EJS_N64_LEGACY && 0x1 == EJS_N64_LEGACY && (_0x2eb141 = !0x1);
if ('mame' == _0x2c1832.system) {
var _0x4921ff = _0x2c1832.config,
_0x38239e = (_0x4921ff.gameId, _0x4921ff.gameUrl.split('/')),
_0x29349f = _0x38239e[_0x38239e.length - 0x1];
_0x29349f.indexOf('?') > -0x1 && (_0x29349f = _0x29349f.substr(0x0, _0x29349f.indexOf('?')));
var _0x36b5a0;
_0x29349f.split('.').pop();
_0x36b5a0 = 'https://www.emulatorjs.com/api/mame.php?name='.concat(_0x29349f), _0x550f17.a.get(_0x36b5a0, {
'responseType': 'text'
}).then(function(_0xf30170) {
var _0x3787ba = _0xf30170.data.split('|');
_0x4d7024.coreFileName = '' .concat(_0x2c1832.system, '-').concat(_0x3787ba[0x0]), _0x5b1dcd = '1' === _0x3787ba[0x1], _0x1e2c68.element(_0x2c1832.elements.buttons.saveState) && _0x132da7(_0x2c1832.elements.buttons.saveState, !_0x5b1dcd), _0x1e2c68.element(_0x2c1832.elements.buttons.loadState) && _0x132da7(_0x2c1832.elements.buttons.loadState, !_0x5b1dcd), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x2), !_0x5b1dcd), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x3), !_0x5b1dcd), _0x3787ba[0x0] ? (_0x2d904a.wasm && _0x3e4345 ? (_0x2458d5 = '' .concat(_0x2c1832.system, '-').concat(_0x3787ba[0x0], '-wasm.data'), _0x31a5b3 = true) : (_0x3787ba.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Webassembly support is not detected in this browser</strong>', _0x2458d5 = ''), _0x2458d5 && (_0x24de8d.db ? _0x24de8d.get(_0x2458d5, function(_0x655c87) {
if (_0x655c87 && _0x655c87.version === _0x124167) {
var _0x3787ba = _0x655c87.data.slice ? _0x655c87.data.slice(0xc) : _0x655c87.data.subarray(0xc);
_0x3787ba.set([0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c, 0x0, 0x3], 0x0), _0x4f0fcc(_0x3787ba);
} else _0x3641d6(_0x2458d5, _0x124167);
}) : _0x3641d6(_0x2458d5, _0x124167))) : _0x3787ba.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Unsupported Game</strong>';
}).catch(function(_0x941dfc) {
console.log(_0x941dfc), _0x3787ba.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
});
} else _0x2d904a.wasm && _0x3e4345 ? (_0x2458d5 = '' .concat(_0x5d075f, '-wasm.data'), 'n64' !== _0x2c1832.system || _0x2eb141 || (_0x2458d5 = '' .concat(_0x5d075f, '-legacy-wasm.data')), _0x31a5b3 = true) : _0x9a1f1 && (_0x2458d5 = '' .concat(_0x5d075f, '-asmjs.data'), 'n64' !== _0x2c1832.system || _0x2eb141 || (_0x2458d5 = '' .concat(_0x5d075f, '-legacy-asmjs.data'))), 'undefined' != typeof EJS_CUSTOM_COREFILE && (_0x2458d5 = EJS_CUSTOM_COREFILE), _0x2458d5 ? _0x24de8d.db ? _0x24de8d.get(_0x2458d5, function(_0x47a6fd) {
if (_0x47a6fd && _0x47a6fd.version === _0x124167) {
var _0x3787ba = _0x47a6fd.data.slice ? _0x47a6fd.data.slice(0xc) : _0x47a6fd.data.subarray(0xc);
_0x3787ba.set([0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c, 0x0, 0x3], 0x0), _0x4f0fcc(_0x3787ba);
} else _0x3641d6(_0x2458d5, _0x124167);
}) : _0x3641d6(_0x2458d5, _0x124167) : _0x3787ba.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Please upgrade your browser to the latest version</strong>';
_0x254bc2 || _0x1e2c68.element(_0x2c1832.elements.buttons.netplay) && _0x132da7(_0x2c1832.elements.buttons.netplay, true), _0x1e2c68.element(_0x2c1832.elements.buttons.saveState) && _0x132da7(_0x2c1832.elements.buttons.saveState, !_0x5b1dcd), _0x1e2c68.element(_0x2c1832.elements.buttons.loadState) && _0x132da7(_0x2c1832.elements.buttons.loadState, !_0x5b1dcd), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x2), !_0x5b1dcd), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x3), !_0x5b1dcd);
}).catch(function(_0x2d06a9) {
_0x3787ba.innerHTML = '<strong style="color:#f00;text-shadow: 0px 0px 3px;">Network Error</strong>';
}), clearInterval(_0x1c9c57), _0x4d7024.romdb = _0x4e171c;
}
}, 0x1f4);
}(), !0x1;
};
_0x455c85(_0x4d7024.loading.querySelector('.' .concat(_0x4fce24['start-game'])), 'click touchstart', _0x26d6a1);
if (_0xc6823.startOnLoad === true) {
_0x26d6a1()
}
}
},
_0x27f4c4 = _0x4d7024,
_0x4a2390 = _0x2c1832(0xa1);
function _0x3c0613(_0x3eef01) {
return function(_0x3d9b1e) {
if (Array.isArray(_0x3d9b1e)) {
for (var _0x17edbf = 0x0, _0x2c1832 = new Array(_0x3d9b1e.length); _0x17edbf < _0x3d9b1e.length; _0x17edbf++) _0x2c1832[_0x17edbf] = _0x3d9b1e[_0x17edbf];
return _0x2c1832;
}
}(_0x3eef01) || function(_0x425c1a) {
if (Symbol.iterator in Object(_0x425c1a) || '[object Arguments]' === Object.prototype.toString.call(_0x425c1a)) return Array.from(_0x425c1a);
}(_0x3eef01) || function() {
throw new TypeError('Invalid attempt to spread non-iterable instance');
}();
}
function _0xa08ac0(_0x514af5, _0x4089be) {
for (var _0x2c1832 = 0x0; _0x2c1832 < _0x4089be.length; _0x2c1832++) {
var _0x3347c8 = _0x4089be[_0x2c1832];
_0x3347c8.enumerable = _0x3347c8.enumerable || !0x1, _0x3347c8.configurable = true, 'value' in _0x3347c8 && (_0x3347c8.writable = true), Object.defineProperty(_0x514af5, _0x3347c8.key, _0x3347c8);
}
}
var _0x1eda08, _0x4fc27f, _0xf43a92, _0x214f42, _0x3a9bfc, _0x4d8495 = (_0x1eda08 = ['a[href]', 'area[href]', 'input:not([disabled]):not([type="hidden"]):not([aria-hidden])', 'select:not([disabled]):not([aria-hidden])', 'textarea:not([disabled]):not([aria-hidden])', 'button:not([disabled]):not([aria-hidden])', 'iframe', 'object', 'embed', '[contenteditable]', '[tabindex]:not([tabindex^="-"])'], _0x4fc27f = function() {
function _0x5566a2(_0x1fcb21) {
var _0x2c1832 = _0x1fcb21.targetModal,
_0x29e6ab = _0x1fcb21.triggers,
_0x114128 = void 0x0 === _0x29e6ab ? [] : _0x29e6ab,
_0x5211ae = _0x1fcb21.onShow,
_0x402faa = void 0x0 === _0x5211ae ? function() {} : _0x5211ae,
_0x120cc3 = _0x1fcb21.onClose,
_0x48ad1c = void 0x0 === _0x120cc3 ? function() {} : _0x120cc3,
_0x6a1522 = _0x1fcb21.openTrigger,
_0x306932 = void 0x0 === _0x6a1522 ? 'data-micromodal-trigger' : _0x6a1522,
_0x446302 = _0x1fcb21.closeTrigger,
_0x2f0676 = void 0x0 === _0x446302 ? 'data-micromodal-close' : _0x446302,
_0x2a4c0e = _0x1fcb21.disableScroll,
_0x2297fa = void 0x0 !== _0x2a4c0e && _0x2a4c0e,
_0x5a3f47 = _0x1fcb21.disableFocus,
_0x4a8131 = void 0x0 !== _0x5a3f47 && _0x5a3f47,
_0x42179f = _0x1fcb21.awaitCloseAnimation,
_0x27da4d = void 0x0 !== _0x42179f && _0x42179f,
_0x1b535e = _0x1fcb21.debugMode,
_0x33d8ca = void 0x0 !== _0x1b535e && _0x1b535e,
_0xee6cb0 = _0x1fcb21.closeByEsckey,
_0x54de6a = void 0x0 === _0xee6cb0 || _0xee6cb0;
! function(_0x27e736, _0x56010c) {
if (!(_0x27e736 instanceof _0x56010c)) throw new TypeError('Cannot call a class as a function');
}(this, _0x5566a2), this.modal = document.getElementById(_0x2c1832), this.config = {
'debugMode': _0x33d8ca,
'disableScroll': _0x2297fa,
'openTrigger': _0x306932,
'closeTrigger': _0x2f0676,
'onShow': _0x402faa,
'onClose': _0x48ad1c,
'awaitCloseAnimation': _0x27da4d,
'disableFocus': _0x4a8131,
'closeByEsckey': _0x54de6a
}, _0x114128.length > 0x0 && this.registerTriggers.apply(this, _0x3c0613(_0x114128)), this.onClick = this.onClick.bind(this), this.onKeydown = this.onKeydown.bind(this);
}
var _0x17edbf, _0x2c1832, _0x5021cc;
return _0x17edbf = _0x5566a2, (_0x2c1832 = [{
'key': 'registerTriggers',
'value': function() {
for (var _0x5566a2 = this, _0x17edbf = arguments.length, _0x2c1832 = new Array(_0x17edbf), _0x4e24fa = 0x0; _0x4e24fa < _0x17edbf; _0x4e24fa++) _0x2c1832[_0x4e24fa] = arguments[_0x4e24fa];
_0x2c1832.forEach(function(_0x511ded) {
_0x511ded.addEventListener('click', function() {
return _0x5566a2.showModal();
});
});
}
}, {
'key': 'showModal',
'value': function() {
this.activeElement = document.activeElement, this.modal.setAttribute('aria-hidden', 'false'), this.modal.classList.add(_0x449eac({
'is-open': true
})), this.setFocusToFirstNode(), this.addEventListeners(), this.config.onShow(this.modal);
}
}, {
'key': 'closeModal',
'value': function() {
var _0x5566a2 = this.modal;
this.modal.setAttribute('aria-hidden', 'true'), this.removeEventListeners(), this.config.onClose(this.modal), this.config.awaitCloseAnimation ? this.modal.addEventListener('animationend', function _0x17edbf() {
_0x5566a2.classList.remove(_0x449eac({
'is-open': true
})), _0x5566a2.removeEventListener('animationend', _0x17edbf, !0x1);
}, !0x1) : _0x5566a2.classList.remove(_0x449eac({
'is-open': true
}));
}
}, {
'key': 'addEventListeners',
'value': function() {
this.modal.addEventListener('touchstart', this.onClick), this.modal.addEventListener('click', this.onClick), document.addEventListener('keydown', this.onKeydown);
}
}, {
'key': 'removeEventListeners',
'value': function() {
this.modal.removeEventListener('touchstart', this.onClick), this.modal.removeEventListener('click', this.onClick), document.removeEventListener('keydown', this.onKeydown);
}
}, {
'key': 'onClick',
'value': function(_0x6c9df3) {
_0x6c9df3.target.hasAttribute(this.config.closeTrigger) && (this.closeModal(), _0x6c9df3.preventDefault());
}
}, {
'key': 'onKeydown',
'value': function(_0xe2d057) {
this.config.closeByEsckey && 0x1b === _0xe2d057.keyCode && this.closeModal(_0xe2d057), 0x9 === _0xe2d057.keyCode && this.maintainFocus(_0xe2d057);
}
}, {
'key': 'getFocusableNodes',
'value': function() {
var _0x5566a2 = this.modal.querySelectorAll(_0x1eda08);
return Object.keys(_0x5566a2).map(function(_0x1bd160) {
return _0x5566a2[_0x1bd160];
});
}
}, {
'key': 'setFocusToFirstNode',
'value': function() {
if (!this.config.disableFocus) {
var _0x5566a2 = this.getFocusableNodes();
_0x5566a2.length && _0x5566a2[0x0].focus();
}
}
}, {
'key': 'maintainFocus',
'value': function(_0x15092b) {
var _0x17edbf = this.getFocusableNodes();
if (this.modal.contains(document.activeElement)) {
var _0x2c1832 = _0x17edbf.indexOf(document.activeElement);
_0x15092b.shiftKey && 0x0 === _0x2c1832 && (_0x17edbf[_0x17edbf.length - 0x1].focus(), _0x15092b.preventDefault()), _0x15092b.shiftKey || _0x2c1832 !== _0x17edbf.length - 0x1 || (_0x17edbf[0x0].focus(), _0x15092b.preventDefault());
} else _0x17edbf[0x0].focus();
}
}]) && _0xa08ac0(_0x17edbf.prototype, _0x2c1832), _0x5021cc && _0xa08ac0(_0x17edbf, _0x5021cc), _0x5566a2;
}(), _0xf43a92 = null, _0x214f42 = function(_0x31caa5) {
if (!document.getElementById(_0x31caa5)) return !0x1;
}, _0x3a9bfc = function(_0x48c135, _0x61240d) {
if (function(_0x28b4fb) {
_0x28b4fb.length <= 0x0 && (console.warn('MicroModal v' .concat(version, ': ❗Please specify at least one %c\'micromodal-trigger\''), 'background-color: #f8f9fa;color: #50596c;font-weight: bold;', 'data attribute.'), console.warn('%cExample:', 'background-color: #f8f9fa;color: #50596c;font-weight: bold;', '<a href="#" data-micromodal-trigger="my-modal"></a>'));
}(_0x48c135), !_0x61240d) return true;
for (var _0x2c1832 in _0x61240d) _0x214f42(_0x2c1832);
return true;
}, {
'init': function(_0x3f1b32) {
var _0x17edbf = Object.assign({}, {
'openTrigger': 'data-micromodal-trigger'
}, _0x3f1b32),
_0x2c1832 = _0x3c0613(document.querySelectorAll('[' .concat(_0x17edbf.openTrigger, ']'))),
_0x537e55 = function(_0x5e363a, _0x9390ce) {
var _0x2c1832 = [];
return _0x5e363a.forEach(function(_0x305145) {
var _0x13d441 = _0x305145.attributes[_0x9390ce].value;
void 0x0 === _0x2c1832[_0x13d441] && (_0x2c1832[_0x13d441] = []), _0x2c1832[_0x13d441].push(_0x305145);
}), _0x2c1832;
}(_0x2c1832, _0x17edbf.openTrigger);
if (true !== _0x17edbf.debugMode || !0x1 !== _0x3a9bfc(_0x2c1832, _0x537e55))
for (var _0x383dff in _0x537e55) {
var _0x34a8e0 = _0x537e55[_0x383dff];
_0x17edbf.targetModal = _0x383dff, _0x17edbf.triggers = _0x3c0613(_0x34a8e0), new _0x4fc27f(_0x17edbf);
}
},
'show': function(_0x2a3fcf, _0x472cd6) {
var _0x2c1832 = _0x472cd6 || {};
return _0x2c1832.targetModal = _0x2a3fcf, true === _0x2c1832.debugMode && !0x1 === _0x214f42(_0x2a3fcf) ? _0xf43a92 = null : (_0xf43a92 = new _0x4fc27f(_0x2c1832)).showModal(), _0xf43a92;
},
'close': function() {
_0xf43a92.closeModal();
}
}),
_0x378b5c = {
'defaultControllers': {
0: {
0: {
'value': '88',
'value2': '1'
},
1: {
'value': '83',
'value2': '3'
},
2: {
'value': '16',
'value2': '8'
},
3: {
'value': '13',
'value2': '9'
},
4: {
'value': '38',
'value2': '12'
},
5: {
'value': '40',
'value2': '13'
},
6: {
'value': '37',
'value2': '14'
},
7: {
'value': '39',
'value2': '15'
},
8: {
'value': '90',
'value2': '0'
},
9: {
'value': '65',
'value2': '2'
},
10: {
'value': '81',
'value2': '4'
},
11: {
'value': '69',
'value2': '5'
},
12: {
'value': '82',
'value2': '6'
},
13: {
'value': '87',
'value2': '7'
},
14: {},
15: {},
16: {
'value': '72'
},
17: {
'value': '70'
},
18: {
'value': '71'
},
19: {
'value': '84'
},
20: {'value': '76'},
21: {'value': '74'},
22: {'value': '75'},
23: {'value': '73'},
24: {},
25: {},
26: {}
},
1: {},
2: {},
3: {}
},
'controllers': {},
'virtualGamepadContainer': null,
'keyMap': {
8: 'backspace',
9: 'tab',
13: 'enter',
16: 'shift',
17: 'ctrl',
18: 'alt',
19: 'pause/break',
20: 'caps lock',
27: 'escape',
32: 'space',
33: 'page up',
34: 'page down',
35: 'end',
36: 'home',
37: 'left arrow',
38: 'up arrow',
39: 'right arrow',
40: 'down arrow',
45: 'insert',
46: 'delete',
48: '0',
49: '1',
50: '2',
51: '3',
52: '4',
53: '5',
54: '6',
55: '7',
56: '8',
57: '9',
65: 'a',
66: 'b',
67: 'c',
68: 'd',
69: 'e',
70: 'f',
71: 'g',
72: 'h',
73: 'i',
74: 'j',
75: 'k',
76: 'l',
77: 'm',
78: 'n',
79: 'o',
80: 'p',
81: 'q',
82: 'r',
83: 's',
84: 't',
85: 'u',
86: 'v',
87: 'w',
88: 'x',
89: 'y',
90: 'z',
91: 'left window key',
92: 'right window key',
93: 'select key',
96: 'numpad 0',
97: 'numpad 1',
98: 'numpad 2',
99: 'numpad 3',
100: 'numpad 4',
101: 'numpad 5',
102: 'numpad 6',
103: 'numpad 7',
104: 'numpad 8',
105: 'numpad 9',
106: 'multiply',
107: 'add',
109: 'subtract',
110: 'decimal point',
111: 'divide',
112: 'f1',
113: 'f2',
114: 'f3',
115: 'f4',
116: 'f5',
117: 'f6',
118: 'f7',
119: 'f8',
120: 'f9',
121: 'f10',
122: 'f11',
123: 'f12',
144: 'num lock',
145: 'scroll lock',
186: 'semi-colon',
187: 'equal sign',
188: 'comma',
189: 'dash',
190: 'period',
191: 'forward slash',
192: 'grave accent',
219: 'open bracket',
220: 'back slash',
221: 'close braket',
222: 'single quote'
},
'gamepad': null,
'allowCheat': true,
'connection': null,
'connected': !0x1,
'playerName': '',
'players': [],
'waitingList': {},
'roomMaster': null,
'inputsData': {},
'coreOptionData': {},
'currentFrame': 0x0,
'wait': !0x1,
'progressHelper': {},
'icons': {
'lock': '<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="lock" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512" class="svg-inline--fa fa-lock fa-w-14 fa-3x"><path fill="currentColor" d="M400 224h-24v-72C376 68.2 307.8 0 224 0S72 68.2 72 152v72H48c-26.5 0-48 21.5-48 48v192c0 26.5 21.5 48 48 48h352c26.5 0 48-21.5 48-48V272c0-26.5-21.5-48-48-48zm-104 0H152v-72c0-39.7 32.3-72 72-72s72 32.3 72 72v72z" class=""></path></svg>'
},
'disableInput': !0x1,
'disableControl': function(_0x5459be) {
if (_0x5459be) {
for (var _0x17edbf = 0x0; _0x17edbf < 0x18; _0x17edbf += 0x1) _0x378b5c.simulateInputFn(0x0, _0x17edbf, 0x0), _0x378b5c.simulateInputFn(0x1, _0x17edbf, 0x0), _0x378b5c.simulateInputFn(0x2, _0x17edbf, 0x0), _0x378b5c.simulateInputFn(0x3, _0x17edbf, 0x0);
_0x378b5c.disableInput = true;
} else _0x378b5c.disableInput = !0x1;
},
'loadRoomsListTimer': null,
'loadRoomsList': function() {},
'stopLoadRooms': function() {
_0x378b5c.loadRoomsListTimer && clearTimeout(_0x378b5c.loadRoomsListTimer);
},
'openRoom': function() {},
'isPaused': null,
'setVolume': null,
'setVariable': null,
'simulateInput': null,
'simulateInputFn': null,
'toggleShader': null,
'saveState': null,
'screenRecord': null,
'loadState': null,
'quickSaveState': null,
'quickLoadState': null,
'changeStateSlot': null,
'getScreenData': null,
'getStateInfo': null,
'setCheat': null,
'resetCheat': null,
'getGameCoreOptions': null,
'classNames': {
'dialog-container': _0x449eac({
'dialog-container': true
}),
'tabs': _0x449eac({
'tabs': true
}),
'dialog-title': _0x449eac({
'dialog-title': true
}),
'dialog-content': _0x449eac({
'dialog-content': true
}),
'dialog-buttons': _0x449eac({
'dialog-buttons': true
}),
'overlay': _0x449eac({
'overlay': true
}),
'key-setting-popup': _0x449eac({
'key-setting-popup': true
}),
'tabs-panel': _0x449eac({
'tabs-panel': true
}),
'tabs-content': _0x449eac({
'tabs-content': true
}),
'button-container': _0x449eac({
'button-container': true
}),
'set': _0x449eac({
'set': true
}),
'active': _0x449eac({
'active': true
}),
'btn-cancel': _0x449eac({
'btn-cancel': true
}),
'btn-reset': _0x449eac({
'btn-reset': true
}),
'btn-clear': _0x449eac({
'btn-clear': true
}),
'btn-create': _0x449eac({
'btn-create': true
}),
'btn-submit': _0x449eac({
'btn-submit': true
}),
'btn-join-room': _0x449eac({
'btn-join-room': true
}),
'btn-quit': _0x449eac({
'btn-quit': true
}),
'btn-create-room': _0x449eac({
'btn-create-room': true
}),
'netplay-player-name': _0x449eac({
'netplay-player-name': true
}),
'netplay-roomlist': _0x449eac({
'netplay-roomlist': true
}),
'netplay-room-name-input': _0x449eac({
'netplay-room-name-input': true
}),
'netplay-room-password-input': _0x449eac({
'netplay-room-password-input': true
}),
'netplay-create-room-set': _0x449eac({
'netplay-create-room-set': true
}),
'netplay-room': _0x449eac({
'netplay-room': true
}),
'modal': _0x449eac({
'modal': true
}),
'modal__overlay': _0x449eac({
'modal__overlay': true
}),
'modal__container': _0x449eac({
'modal__container': true
}),
'modal__header': _0x449eac({
'modal__header': true
}),
'modal__title': _0x449eac({
'modal__title': true
}),
'modal__close': _0x449eac({
'modal__close': true
}),
'modal__content': _0x449eac({
'modal__content': true
}),
'modal__btn': _0x449eac({
'modal__btn': true
}),
'modal__btn-primary': _0x449eac({
'modal__btn-primary': true
}),
'micromodal-slide': _0x449eac({
'micromodal-slide': true
}),
'modal__errmsg': _0x449eac({
'modal__errmsg': true
}),
'modal__footer': _0x449eac({
'modal__footer': true
}),
'cheats-add': _0x449eac({
'cheats-add': true
}),
'cheats-list': _0x449eac({
'cheats-list': true
}),
'cheat-code-input': _0x449eac({
'cheat-code-input': true
}),
'cheat-name-input': _0x449eac({
'cheat-name-input': true
}),
'ejs-switch': _0x449eac({
'ejs-switch': true
}),
'ejs-delete-cheat': _0x449eac({
'ejs-delete-cheat': true
})
},
'initShaders': function() {
for (var _0xa88a13 = _0x5032e6, _0x17edbf = Object.keys(_0xa88a13), _0x2c1832 = '', _0x26fb6a = 0x0; _0x26fb6a < _0x17edbf.length; _0x26fb6a += 0x1) _0x2c1832 = _0xa88a13[_0x17edbf[_0x26fb6a]], _0x27f4c4.FS.writeFile('/shader/' .concat(_0x17edbf[_0x26fb6a]), _0x2c1832), _0x2593da.storage.get('shader') === _0x17edbf[_0x26fb6a] && _0x27f4c4.FS.writeFile('/shader/shader.glslp', _0x2c1832);
},
'setGamepadDialog': function() {
var _0xa88a13 = this;
_0xa88a13.elements.dialogs.gamepad.innerHTML = '\n <div class="' + _0x378b5c.classNames['dialog-container'] + '">\n <div class="' + _0x378b5c.classNames['dialog-title'] + '">\n <h4>Control Settings</h4>\n </div>\n \n <ul class="' + _0x378b5c.classNames.tabs + '" id="controls-tabs">\n <li class="tabs-title" role="presentation"><a href="#" onclick="return false" role="tab" aria-controls="controls-0" aria-selected="false" id="controls-0-label">Player 1</a></li>\n <li class="tabs-title" role="presentation"><a href="#" onclick="return false" role="tab" aria-controls="controls-1" aria-selected="false" id="controls-1-label">Player 2</a></li>\n <li class="tabs-title" role="presentation"><a href="#" onclick="return false" role="tab" aria-controls="controls-2" aria-selected="false" id="controls-2-label">Player 3</a></li>\n <li class="tabs-title" role="presentation"><a href="#" onclick="return false" role="tab" aria-controls="controls-3" aria-selected="false" id="controls-3-label">Player 4</a></li>\n </ul>\n <div class="' + _0x378b5c.classNames['dialog-content'] + '">\n <div class="' + _0x378b5c.classNames['tabs-content'] + ' controls">\n \n </div>\n </div>\n <div class="' + _0x378b5c.classNames['dialog-buttons'] + '">\n <a href="#" onclick="return false" class="' + _0x378b5c.classNames['btn-submit'] + '">Update</a>\n <a href="#" onclick="return false" class="' + _0x378b5c.classNames['btn-reset'] + '">Reset</a>\n <a href="#" onclick="return false" class="' + _0x378b5c.classNames['btn-clear'] + '">Clear</a>\n <a href="#" onclick="return false" class="' + _0x378b5c.classNames['btn-cancel'] + '">Cancel</a>\n </div>\n <div class="' + _0x378b5c.classNames.overlay + '" hidden>\n <div class="' + _0x378b5c.classNames['key-setting-popup'] + '">\n <span>[Select]</span><br />\n <div style="color:#fff !important">Press keyboard or gamepad</div>\n </div>\n </div>\n </div>\n '
var _0x17edbf = '\n <div class="' .concat(_0x378b5c.classNames['tabs-panel'], '" id="controls-{index}" hidden>\n <div>\n <div style="">\n <div class="gamepad" style="font-size:12px">Connected gamepad: <span class="gamepad-name">n/a</span></div>\n </div>\n <div style="width:25%;float:left"> </div>\n <div style="font-size:12px;width:50%;float:left">\n <div class="row">\n <div style="text-align:center;width:50%;float:left">Gamepad</div>\n <div style="text-align:center;width:50%;float:left">Keyboard</div>\n </div>\n </div>\n <div style="clear:both"></div>\n </div>\n </div>'),
_0x2c1832 = '\n <div class="' .concat(_0x378b5c.classNames['button-container'], '" data-id="{id}" data-index="{index}" data-label="{label}" style="margin-bottom:10px">\n <div style="width:25%;float:left;font-size:12px">\n <label>{label}:</label>\n </div>\n <div style="width:50%;float:left">\n <div>\n <div style="width:50%;float:left;padding: 0 5px;">\n <input style="text-align:center;height:25px;width: 100%;" type="text" data-id="{id}" data-value="" data-type="2" data-index="{index}" readonly="" placeholder="">\n </div>\n <div style="width:50%;float:left;padding: 0 5px;">\n <input style="text-align:center;height:25px;width: 100%;" type="text" data-id="{id}" data-value="" data-type="1" data-index="{index}" readonly="" placeholder="">\n </div>\n <div style="clear:both"></div>\n </div>\n </div>\n <div style="width:25%;float:left">\n <a class="').concat(_0x378b5c.classNames.set, '" href="#" onclick="return false">Set</a>\n </div>\n <div style="clear:both"></div>\n </div>')
if ('nes' === _0xa88a13.system) {
var _0x48ee51 = {
0: 'B',
2: 'SELECT',
3: 'START',
4: 'UP',
5: 'DOWN',
6: 'LEFT',
7: 'RIGHT',
8: 'A',
24: 'QUICK SAVE STATE',
25: 'QUICK LOAD STATE',
26: 'CHANGE STATE SLOT'
}
} else if ('snes' === _0xa88a13.system) {
var _0x48ee51 = {
0: 'B',
1: 'Y',
2: 'SELECT',
3: 'START',
4: 'UP',
5: 'DOWN',
6: 'LEFT',
7: 'RIGHT',
8: 'A',
9: 'X',
10: 'L',
11: 'R',
24: 'QUICK SAVE STATE',
25: 'QUICK LOAD STATE',
26: 'CHANGE STATE SLOT'
};
} else if ('n64' === _0xa88a13.system) {
var _0x48ee51 = {
0: 'B',
3: 'START',
4: 'UP',
5: 'DOWN',
6: 'LEFT',
7: 'RIGHT',
8: 'A',
10: 'L',
11: 'R',
12: 'Z',
19: 'L STICK UP',
18: 'L STICK DOWN',
17: 'L STICK LEFT',
16: 'L STICK RIGHT',
23: 'R STICK UP',
22: 'R STICK DOWN',
21: 'R STICK LEFT',
20: 'R STICK RIGHT',
24: 'QUICK SAVE STATE',
25: 'QUICK LOAD STATE',
26: 'CHANGE STATE SLOT'
};
} else if ('nds' === _0xa88a13.system) {
var _0x48ee51 = {
0: 'B',
1: 'Y',
2: 'SELECT',
3: 'START',
4: 'UP',
5: 'DOWN',
6: 'LEFT',
7: 'RIGHT',
8: 'A',
9: 'X',
10: 'L',
11: 'R',
14: 'Microphone',
24: 'QUICK SAVE STATE',
25: 'QUICK LOAD STATE',
26: 'CHANGE STATE SLOT'
};
} else {
var _0x48ee51 = {
0: 'B',
1: 'Y',
2: 'SELECT',
3: 'START',
4: 'UP',
5: 'DOWN',
6: 'LEFT',
7: 'RIGHT',
8: 'A',
9: 'X',
10: 'L',
11: 'R',
12: 'L2',
13: 'R2',
14: 'L3',
15: 'R3',
19: 'L STICK UP',
18: 'L STICK DOWN',
17: 'L STICK LEFT',
16: 'L STICK RIGHT',
23: 'R STICK UP',
22: 'R STICK DOWN',
21: 'R STICK LEFT',
20: 'R STICK RIGHT',
24: 'QUICK SAVE STATE',
25: 'QUICK LOAD STATE',
26: 'CHANGE STATE SLOT'
};
}
if ('arcade' === _0xa88a13.system) {
_0x48ee51[2] = 'INSERT COIN'
}
if (_0xa88a13.statesSupported === false) {
delete _0x48ee51[24]
delete _0x48ee51[25]
delete _0x48ee51[26]
}
for (var _0x501790 = [0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x0, 0x9, 0x1, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x13, 0x12, 0x11, 0x10, 0x17, 0x16, 0x15, 0x14, 0x18, 0x19, 26], _0x50fd12 = function(_0x88827f) {
_0xa88a13.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames['tabs-content'])).innerHTML += _0x17edbf.replace(/{index}/g, _0x88827f), _0x501790.forEach(function(_0x1bf162) {
_0x48ee51[_0x1bf162] && (_0xa88a13.elements.dialogs.gamepad.querySelector('#controls-' .concat(_0x88827f)).innerHTML += _0x2c1832.replace(/{index}/g, _0x88827f).replace(/{id}/g, _0x1bf162).replace(/{label}/g, _0x48ee51[_0x1bf162]));
}), _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="16"][data-index="' .concat(_0x88827f, '"][data-type="2"]')) && (_0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="16"][data-index="' .concat(_0x88827f, '"][data-type="2"]')).style.backgroundColor = '#ccc'), _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="17"][data-index="' .concat(_0x88827f, '"][data-type="2"]')) && (_0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="17"][data-index="' .concat(_0x88827f, '"][data-type="2"]')).style.backgroundColor = '#ccc'), _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="18"][data-index="' .concat(_0x88827f, '"][data-type="2"]')) && (_0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="18"][data-index="' .concat(_0x88827f, '"][data-type="2"]')).style.backgroundColor = '#ccc'), _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="19"][data-index="' .concat(_0x88827f, '"][data-type="2"]')) && (_0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="19"][data-index="' .concat(_0x88827f, '"][data-type="2"]')).style.backgroundColor = '#ccc'), _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="20"][data-index="' .concat(_0x88827f, '"][data-type="2"]')) && (_0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="20"][data-index="' .concat(_0x88827f, '"][data-type="2"]')).style.backgroundColor = '#ccc'), _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="21"][data-index="' .concat(_0x88827f, '"][data-type="2"]')) && (_0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="21"][data-index="' .concat(_0x88827f, '"][data-type="2"]')).style.backgroundColor = '#ccc'), _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="22"][data-index="' .concat(_0x88827f, '"][data-type="2"]')) && (_0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="22"][data-index="' .concat(_0x88827f, '"][data-type="2"]')).style.backgroundColor = '#ccc'), _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="23"][data-index="' .concat(_0x88827f, '"][data-type="2"]')) && (_0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="23"][data-index="' .concat(_0x88827f, '"][data-type="2"]')).style.backgroundColor = '#ccc');
}, _0x3f0824 = 0x0; _0x3f0824 <= 0x3; _0x3f0824 += 0x1) _0x50fd12(_0x3f0824);
Object.keys(_0x378b5c.controllers).forEach(function(_0x23d419) {
_0x378b5c.controllers[_0x23d419] && Object.keys(_0x378b5c.controllers[_0x23d419]).forEach(function(_0x21e62d) {
var _0x294e35 = _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="' .concat(_0x21e62d, '"][data-index="').concat(_0x23d419, '"][data-type="1"]')),
_0x1849bc = _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="' .concat(_0x21e62d, '"][data-index="').concat(_0x23d419, '"][data-type="2"]'));
if (!_0x294e35 || !_0x1849bc) return !0x1;
var _0x19d43b = _0x378b5c.controllers[_0x23d419][_0x21e62d];
_0x19d43b && (_0x19d43b.type && 0x1 !== parseInt(_0x19d43b.type, 0xa) ? _0x1849bc.value = '' : _0x19d43b.value && (_0x294e35.value = _0x378b5c.keyMap[_0x19d43b.value] || '', _0x294e35.setAttribute('data-value', _0x19d43b.value)), _0x19d43b.value2 && (_0x1849bc.value = isNaN(_0x19d43b.value2) ? (_0x19d43b.value2) : ('button ' .concat(parseInt(_0x19d43b.value2, 0xa) + 0x1)), _0x1849bc.setAttribute('data-value', _0x19d43b.value2)));
});
}), _0x1093f4.call(this, _0xa88a13.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames['btn-submit'])), 'click', function(_0x5f19bd) {
var _0x2c1832 = _0xa88a13.elements.dialogs.gamepad.querySelectorAll('input'),
_0x2844f7 = {};
return Array.from(_0x2c1832).forEach(function(_0x33c024) {
var _0x5f19bd, _0x2c1832 = _0x33c024.getAttribute('data-index'),
_0x124d0a = _0x33c024.getAttribute('data-value');
0x2 === parseInt(_0x33c024.getAttribute('data-type'), 0xa) && (_0x5f19bd = _0x33c024.getAttribute('data-value'));
var _0x54b9af = _0x33c024.getAttribute('data-id');
_0x2844f7[_0x2c1832] || (_0x2844f7[_0x2c1832] = {}), _0x2844f7[_0x2c1832][_0x54b9af] || (_0x2844f7[_0x2c1832][_0x54b9af] = {}), _0x124d0a && (_0x2844f7[_0x2c1832][_0x54b9af].value = _0x124d0a), _0x5f19bd && (_0x2844f7[_0x2c1832][_0x54b9af].value2 = _0x5f19bd);
}), _0x2593da.storage.set({
'controllers': _0x2844f7
}), _0x378b5c.controllers = _0x2844f7, _0x132da7(_0xa88a13.elements.dialogs.gamepad, true), _0xa88a13.elements.container.focus(), _0x5f19bd.stopPropagation(), !0x1;
}), _0x1093f4.call(this, _0xa88a13.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames['btn-reset'])), 'click', function(_0x5f19bd) {
_0x378b5c.controllers = JSON.parse(JSON.stringify(_0x378b5c.defaultControllers))
localStorage.removeItem(_0x2593da.storage.key)
var _0x2c1832 = _0xa88a13.elements.dialogs.gamepad.querySelectorAll('input')
for (var i=0; i<_0x2c1832.length; i++) {
var player = _0x2c1832[i].getAttribute('data-index')
var controlOrGamepad = _0x2c1832[i].getAttribute('data-type')
var buttonId = _0x2c1832[i].getAttribute('data-id')
var a = (controlOrGamepad == 1) ? 'value' : 'value2'
if (_0x378b5c.defaultControllers[player] && _0x378b5c.defaultControllers[player][buttonId] && _0x378b5c.defaultControllers[player][buttonId][a]) {
if (controlOrGamepad == 1) {
_0x2c1832[i].value = _0x378b5c.keyMap[_0x378b5c.defaultControllers[player][buttonId][a]]
} else {
_0x2c1832[i].value = 'button ' + (parseInt(_0x378b5c.defaultControllers[player][buttonId][a])+1)
}
_0x2c1832[i].setAttribute('data-value', _0x378b5c.defaultControllers[player][buttonId][a])
} else {
_0x2c1832[i].value = ''
_0x2c1832[i].setAttribute('data-value', '')
}
}
}), _0x1093f4.call(this, _0xa88a13.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames['btn-clear'])), 'click', function(_0x5f19bd) {
var c = {0:{}, 1:{}, 2:{}, 3:{}}
for (var k in _0x48ee51) {
c[0][k] = {}
c[1][k] = {}
c[2][k] = {}
c[3][k] = {}
}
_0x378b5c.controllers = c
localStorage.setItem(_0x2593da.storage.key, JSON.stringify({controllers: _0x378b5c.controllers}))
var _0x2c1832 = _0xa88a13.elements.dialogs.gamepad.querySelectorAll('input')
for (var i=0; i<_0x2c1832.length; i++) {
_0x2c1832[i].value = ''
_0x2c1832[i].setAttribute('data-value', '')
}
}), _0x1093f4.call(this, _0xa88a13.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames['btn-cancel'])), 'click', function(_0x16d598) {
return _0x132da7(_0xa88a13.elements.dialogs.gamepad, true), _0xa88a13.elements.container.focus(), _0x16d598.stopPropagation(), !0x1;
}), Array.from(_0xa88a13.elements.dialogs.gamepad.querySelectorAll('.' .concat(_0x378b5c.classNames.tabs, ' li'))).forEach(function(_0x523f7b) {
_0x1093f4.call(_0xa88a13, _0x523f7b, 'mousedown', function(_0xa99a78) {
_0x3a8e2f(_0xa88a13.elements.dialogs.gamepad.querySelectorAll('.' .concat(_0x378b5c.classNames.tabs, ' li')), _0x378b5c.classNames.active, !0x1), _0x3a8e2f(_0x523f7b, _0x378b5c.classNames.active, true);
var _0xb8a227 = _0x523f7b.querySelector('a').getAttribute('aria-controls');
return Array.from(_0xa88a13.elements.dialogs.gamepad.querySelectorAll('.' .concat(_0x378b5c.classNames['tabs-content'], ' .').concat(_0x378b5c.classNames['tabs-panel']))).forEach(function(_0x1c29a4) {
_0x132da7(_0x1c29a4, true);
}), _0x132da7(_0xa88a13.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames['tabs-content'], ' #').concat(_0xb8a227)), !0x1), _0xa99a78.stopPropagation(), !0x1;
});
}), _0xbae705.call(_0xa88a13, _0xa88a13.elements.dialogs.gamepad.querySelector('[aria-controls="controls-0"]').parentNode, 'mousedown');
var _0x11b11a = _0xa88a13.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames.overlay));
_0x1093f4.call(_0xa88a13, _0x11b11a, 'mousedown', function(_0x15c7de) {
_0x132da7(_0x11b11a, true), _0x15c7de.stopPropagation();
}), Array.from(_0xa88a13.elements.dialogs.gamepad.querySelectorAll('.' .concat(_0x378b5c.classNames['button-container']))).forEach(function(_0x16f092) {
_0x1093f4.call(_0xa88a13, _0x16f092, 'mousedown', function(_0xdb1c93) {
_0xbae705.call(_0xa88a13, _0x16f092.querySelector('a.' .concat(_0x378b5c.classNames.set)), _0xdb1c93.type, true, {});
});
}), Array.from(_0xa88a13.elements.dialogs.gamepad.querySelectorAll('.' .concat(_0x378b5c.classNames['tabs-panel'], ' a.').concat(_0x378b5c.classNames.set))).forEach(function(_0x397818) {
_0x1093f4.call(_0xa88a13, _0x397818, 'mousedown', function(_0x1e1d79) {
var _0x2c1832 = _0x397818.parentNode.parentNode,
_0x4e649c = _0x2c1832.getAttribute('data-id'),
_0x431719 = _0x2c1832.getAttribute('data-index'),
_0x4a6f6f = _0x2c1832.getAttribute('data-label');
_0x132da7(_0x11b11a, !0x1), _0x11b11a.setAttribute('data-id', _0x4e649c), _0x11b11a.setAttribute('data-index', _0x431719);
var _0x179623 = '[ ' .concat(_0x4a6f6f, ' ]');
_0x378b5c.gamepad.gamepads[_0x431719] && _0x4e649c < 0x10 ? _0x179623 += '<br /><span style="font-size:12px">Gamepad:' .concat(_0x378b5c.gamepad.gamepads[_0x431719].id, '</span><br />Press keyboard or gamepad<br/>Press escape (esc) to clear') : _0x179623 += '<br />Press keyboard<br/>Press escape (esc) to clear', _0x11b11a.querySelector('.' .concat(_0x378b5c.classNames['key-setting-popup'])).innerHTML = _0x179623, _0x1e1d79.stopPropagation();
});
});
},
'setCacheDialog': function() {
var _0xa88a13 = this;
_0xa88a13.elements.dialogs.cache.innerHTML = '\n <div class="' .concat(_0x378b5c.classNames['dialog-container'], '">\n <div class="').concat(_0x378b5c.classNames['dialog-title'], '">\n <h4>Cache Manager</h4>\n </div>\n <div class="').concat(_0x378b5c.classNames['dialog-content'], '">\n\n </div>\n <div class="').concat(_0x378b5c.classNames['dialog-buttons'], '">\n <a href="#" onclick="return false" class="').concat(_0x378b5c.classNames['btn-cancel'], '">Close</a>\n </div>\n </div>\n '), _0x1093f4.call(this, _0xa88a13.elements.dialogs.cache.querySelector('.' .concat(_0x378b5c.classNames['btn-cancel'])), 'click', function(_0x5d6721) {
return _0x132da7(_0xa88a13.elements.dialogs.cache, true), _0xa88a13.elements.container.focus(), _0x5d6721.stopPropagation(), !0x1;
}), _0x1093f4.call(this, _0xa88a13.elements.dialogs.cache.querySelector('.' .concat(_0x378b5c.classNames['dialog-title'], ' a')), 'click', function(_0x46992d) {
return _0x27f4c4.romdb && (_0x27f4c4.romdb.clear(), _0xa88a13.elements.dialogs.cache.querySelector('.' .concat(_0x378b5c.classNames['dialog-content'])).innerHTML = '<div style="text-align:center">Empty</div>'), _0x46992d.stopPropagation(), !0x1;
});
},
'showLoading': function(_0x108b55) {
_0x4d8495.show('modal-6ed698f3d04061f5', {
'closeByEsckey': !0x1
}), _0x108b55 && console.log(''), _0x132da7(this.elements.dialogs.loading, !0x1);
},
'hideLoading': function() {
_0x4d8495.close(), _0x132da7(this.elements.dialogs.loading, true);
},
'setLoadingDialog': function() {
this.elements.dialogs.loading && (this.elements.dialogs.loading.innerHTML = '\n <div class="' .concat(_0x378b5c.classNames.modal, ' ').concat(_0x378b5c.classNames['micromodal-slide'], '" id="modal-6ed698f3d04061f5" aria-hidden="true">\n <div class="').concat(_0x378b5c.classNames.modal__overlay, '" tabindex="-1">\n <div class="').concat(_0x378b5c.classNames.modal__container, '" role="dialog" aria-modal="true" aria-labelledby="modal-6ed698f3d04061f5-title">\n Loading...\n </div>\n </div>\n </div>'));
},
'setCheatDialog': function() {
var _0xa88a13 = this;
if (_0xa88a13.elements.dialogs.cheat) {
_0xa88a13.elements.dialogs.cheat.innerHTML = '\n <div class="' .concat(_0x378b5c.classNames['dialog-container'], '">\n <div class="').concat(_0x378b5c.classNames['dialog-title'], '">\n <h4>Cheats</h4>\n </div>\n <div class="').concat(_0x378b5c.classNames['dialog-content'], '">\n <div class="').concat(_0x378b5c.classNames['cheats-list'], '">\n \n </div>\n </div>\n <div class="').concat(_0x378b5c.classNames['dialog-buttons'], '">\n <a href="#" onclick="return false" class="').concat(_0x378b5c.classNames['cheats-add'], '">Add Cheat</a>\n <a href="#" onclick="return false" class="').concat(_0x378b5c.classNames['btn-submit'], '">OK</a>\n <a href="#" onclick="return false" class="').concat(_0x378b5c.classNames['btn-cancel'], '">Close</a>\n </div>\n </div>\n\n \n\n <div class="').concat(_0x378b5c.classNames.modal, ' ').concat(_0x378b5c.classNames['micromodal-slide'], '" id="modal-85cd7a1c543a484b" aria-hidden="true">\n <div class="').concat(_0x378b5c.classNames.modal__overlay, '" tabindex="-1" data-modal-close>\n <div class="').concat(_0x378b5c.classNames.modal__container, '" style="width:100%" role="dialog" aria-modal="true" aria-labelledby="modal-85cd7a1c543a484b-title">\n <div class="').concat(_0x378b5c.classNames.modal__header, '">\n <h2 class="').concat(_0x378b5c.classNames.modal__title, '" id="modal-85cd7a1c543a484b-title">\n Add Cheat Code\n </h2>\n <button class="').concat(_0x378b5c.classNames.modal__close, '" aria-label="Close modal" data-modal-close></button>\n </div>\n <main class="').concat(_0x378b5c.classNames.modal__content, '" id="modal-85cd7a1c543a484b-content">\n\n <div class="').concat(_0x378b5c.classNames.modal__errmsg, '"></div>\n <strong>Code</strong><br />\n <textarea style="width:100%;height:80px;" class="').concat(_0x378b5c.classNames['cheat-code-input'], '"></textarea><br />\n <strong>Description</strong><br />\n <input type="text" class="').concat(_0x378b5c.classNames['cheat-name-input'], '" /><br />\n\n </main>\n <footer class="').concat(_0x378b5c.classNames.modal__footer, '">\n <button class="').concat(_0x378b5c.classNames.modal__btn, ' ').concat(_0x378b5c.classNames['modal__btn-primary'], '">Submit</button>\n <button class="').concat(_0x378b5c.classNames.modal__btn, '" data-modal-close aria-label="Close">Close</button>\n </footer>\n </div>\n </div>\n </div>\n ');
var _0x17edbf = _0xa88a13.elements.dialogs.cheat.querySelector('#modal-85cd7a1c543a484b');
_0x1093f4.call(this, _0xa88a13.elements.dialogs.cheat.querySelector('.' .concat(_0x378b5c.classNames['cheats-add'])), 'click', function(_0x2a74b5) {
return _0x4d8495.show('modal-85cd7a1c543a484b', {
'closeTrigger': 'data-modal-close'
}), !0x1;
});
var _0x2c1832 = _0xa88a13.elements.dialogs.cheat.querySelector('.' .concat(_0x378b5c.classNames['cheats-list']));
_0x1093f4.call(this, _0x17edbf.querySelector('.' .concat(_0x378b5c.classNames['modal__btn-primary'])), 'click', function(_0x1c01ce) {
var _0x184e2c = _0x17edbf.querySelector('.' .concat(_0x378b5c.classNames['cheat-name-input'])).value,
_0x15ccbb = _0x17edbf.querySelector('.' .concat(_0x378b5c.classNames['cheat-code-input'])).value;
if (_0x184e2c && _0x15ccbb) {
var _0x2c3108 = _0x2c1832.querySelectorAll('input:checked');
_0x17edbf.querySelector('.' .concat(_0x378b5c.classNames['cheat-name-input'])).value = '', _0x17edbf.querySelector('.' .concat(_0x378b5c.classNames['cheat-code-input'])).value = '', _0x4d8495.close();
var _0x5b2cff = _0xa88a13.cheats.length;
_0xa88a13.cheats.push([_0x184e2c, _0x15ccbb]), _0x2c1832.innerHTML += '<div class="' .concat(_0x378b5c.classNames['ejs-switch'], '">\n <input type="checkbox" value="').concat(_0x5b2cff, '" id="ejs-cheat-switch-').concat(_0x5b2cff, '">\n <label for="ejs-cheat-switch-').concat(_0x5b2cff, '">').concat(_0x184e2c, '</label>\n <a href="#" data-idx="').concat(_0x5b2cff, '" onclick="return false" class="').concat(_0x378b5c.classNames['ejs-delete-cheat'], '">×</a>\n </div>'), _0x2c3108.forEach(function(_0x1fb248, _0x7bb84b) {
_0x2c1832.querySelector('#ejs-cheat-switch-' .concat(_0x7bb84b)).setAttribute('checked', 'checked');
});
}
return 'localStorage' in window && localStorage.setItem('' .concat(_0xa88a13.system, '-').concat(_0xa88a13.startName, '-cheats'), JSON.stringify(_0xa88a13.cheats)), _0x1c01ce.stopPropagation(), !0x1;
});
var _0x257001 = function() {
if ('localStorage' in window) {
var _0x17edbf = localStorage.getItem('' .concat(_0xa88a13.system, '-').concat(_0xa88a13.startName, '-cheats'));
try {
_0x17edbf = JSON.parse(_0x17edbf);
} catch (_0x101b3e) {}
_0x17edbf && (_0xa88a13.cheats = _0x17edbf);
}
_0x2c1832.innerHTML = '', _0xa88a13.cheats && Array.isArray(_0xa88a13.cheats) && _0xa88a13.cheats.forEach(function(_0x245035, _0x116996) {
var _0x4ac35a = _0x245035[0x0];
_0x245035[0x1];
_0x2c1832.innerHTML += '<div class="' .concat(_0x378b5c.classNames['ejs-switch'], '">\n <input type="checkbox" value="').concat(_0x116996, '" id="ejs-cheat-switch-').concat(_0x116996, '">\n <label for="ejs-cheat-switch-').concat(_0x116996, '">').concat(_0x4ac35a, '</label>\n <a href="#" data-idx="').concat(_0x116996, '" onclick="return false" class="').concat(_0x378b5c.classNames['ejs-delete-cheat'], '">×</a>\n </div>');
});
};
_0x1093f4.call(_0xa88a13, _0x2c1832, 'click', function(_0x576733) {
if (_0x576733.target && _0x576733.target.classList.contains(_0x378b5c.classNames['ejs-delete-cheat']) && _0x576733.target.getAttribute('data-idx')) {
var _0x5b10b7 = _0x576733.target.getAttribute('data-idx');
_0xa88a13.cheats.splice(_0x5b10b7, 0x1), 'localStorage' in window && localStorage.setItem('' .concat(_0xa88a13.system, '-').concat(_0xa88a13.startName, '-cheats'), JSON.stringify(_0xa88a13.cheats));
var _0x557895 = [];
_0x2c1832.querySelectorAll('input').forEach(function(_0xab0ba8, _0x1f3597) {
_0x557895[_0x1f3597] = !!_0x2c1832.querySelector('#ejs-cheat-switch-' .concat(_0x1f3597, ':checked'));
}), _0x557895.splice(_0x5b10b7, 0x1), _0x257001(), _0x557895.forEach(function(_0x2e22be, _0x2247c4) {
_0x2e22be && _0x2c1832.querySelector('#ejs-cheat-switch-' .concat(_0x2247c4)).setAttribute('checked', 'checked');
});
}
return _0x576733.stopPropagation(), !0x1;
}), _0x1093f4.call(_0xa88a13, _0xa88a13.elements.container, 'start-game', function(_0x4c266e) {
_0x257001();
}), _0x1093f4.call(this, _0xa88a13.elements.dialogs.cheat.querySelector('.' .concat(_0x378b5c.classNames['btn-cancel'])), 'click', function(_0x4adb2c) {
return _0x132da7(_0xa88a13.elements.dialogs.cheat, true), _0x4adb2c.stopPropagation(), _0xa88a13.elements.container.focus(), !0x1;
}), _0x1093f4.call(this, _0xa88a13.elements.dialogs.cheat.querySelector('.' .concat(_0x378b5c.classNames['btn-submit'])), 'click', function(_0x5785f6) {
return _0x378b5c.resetCheat(), _0x2c1832.querySelectorAll('input:checked').forEach(function(_0xeb7426, _0x45b02d) {
var _0x27e2c1 = _0xa88a13.cheats[_0xeb7426.value];
_0x27e2c1 && _0x378b5c.setCheat(_0x45b02d, 0x1, _0x27e2c1[0x1]);
}), _0x132da7(_0xa88a13.elements.dialogs.cheat, true), _0xa88a13.elements.container.focus(), !0x1;
}), 'arcade' !== _0xa88a13.system && 'mame' !== _0xa88a13.system && 'mame2003' !== _0xa88a13.system && 'mame2010' !== _0xa88a13.system || (_0x1e2c68.element(_0xa88a13.elements.buttons.cheat) && _0x132da7(_0xa88a13.elements.buttons.cheat, true), _0x378b5c.allowCheat = !0x1), _0x378b5c.setCheat || (_0x132da7(_0xa88a13.elements.buttons.cheat, true), _0x378b5c.allowCheat = !0x1);
}
},
'setNetplayDialog': function() {
var _0xa88a13 = this,
_0x17edbf = this;
if (_0x17edbf.elements.dialogs.netplay) {
_0x17edbf.elements.dialogs.netplay.innerHTML = '\n <div class="' .concat(_0x378b5c.classNames['dialog-container'], '">\n <div class="').concat(_0x378b5c.classNames['dialog-title'], '">\n <h4>Netplay</h4>\n </div>\n <div class="').concat(_0x378b5c.classNames['dialog-content'], '">\n <div class="').concat(_0x378b5c.classNames['netplay-roomlist'], '" hidden>\n <strong>Rooms</strong>\n <table style="width:100%" cellspacing=0>\n <thead>\n <tr>\n <td>Room Name</td>\n <td>Players</td>\n <td></td>\n </tr>\n </thead>\n <tbody>\n <tr>\n <td></td>\n <td></td>\n <td></td>\n </tr>\n </tbody>\n </table>\n </div>\n\n <div class="').concat(_0x378b5c.classNames['netplay-room'], '" hidden>\n <strong>Room Name</strong>\n <div data-room-password>Password: <span></span></div>\n <table style="width:100%" cellspacing="0">\n <thead>\n <tr>\n <td>Player</td>\n <td>Name</td>\n <td></td>\n </tr>\n </thead>\n <tbody>\n <tr>\n <td>1</td>\n <td>Name 1</td>\n <td></td>\n </tr>\n <tr>\n <td>2</td>\n <td>Name 2</td>\n <td></td>\n </tr>\n <tr>\n <td>3</td>\n <td>Name 3</td>\n <td></td>\n </tr>\n <tr>\n <td>4</td>\n <td>Name 4</td>\n <td></td>\n </tr>\n </tbody>\n </table>\n </div>\n </div>\n <div class="').concat(_0x378b5c.classNames['dialog-buttons'], '">\n <a href="#" onclick="return false" class="').concat(_0x378b5c.classNames['btn-quit'], '">Quit Room</a>\n <a href="#" onclick="return false" class="').concat(_0x378b5c.classNames['btn-create-room'], '">Create a Room</a>\n <a href="#" onclick="return false" class="').concat(_0x378b5c.classNames['btn-cancel'], '">Close</a>\n </div>\n </div>\n\n \n <div class="').concat(_0x378b5c.classNames.modal, ' ').concat(_0x378b5c.classNames['micromodal-slide'], '" id="modal-9de6c4e9ce2b9361" aria-hidden="true">\n <div class="').concat(_0x378b5c.classNames.modal__overlay, '" tabindex="-1">\n <div class="').concat(_0x378b5c.classNames.modal__container, '" role="dialog" aria-modal="true" aria-labelledby="modal-9de6c4e9ce2b9361-title">\n <div class="').concat(_0x378b5c.classNames.modal__header, '">\n <h2 class="').concat(_0x378b5c.classNames.modal__title, '" id="modal-9de6c4e9ce2b9361-title">\n Set Player Name\n </h2>\n </div>\n <main class="').concat(_0x378b5c.classNames.modal__content, '" id="modal-9de6c4e9ce2b9361-content">\n\n <div class="').concat(_0x378b5c.classNames.modal__errmsg, '"></div>\n <strong>Player Name</strong><br />\n <input type="text" maxlength="10" class="').concat(_0x378b5c.classNames['netplay-player-name'], '" /><br />\n\n </main>\n <footer class="').concat(_0x378b5c.classNames.modal__footer, '">\n <button class="').concat(_0x378b5c.classNames.modal__btn, ' ').concat(_0x378b5c.classNames['modal__btn-primary'], '">Submit</button>\n </footer>\n </div>\n </div>\n </div>\n \n <div class="').concat(_0x378b5c.classNames.modal, ' ').concat(_0x378b5c.classNames['micromodal-slide'], '" id="modal-85cd7a1c543a484a" aria-hidden="true">\n <div class="').concat(_0x378b5c.classNames.modal__overlay, '" tabindex="-1" data-modal-close>\n <div class="').concat(_0x378b5c.classNames.modal__container, '" role="dialog" aria-modal="true" aria-labelledby="modal-85cd7a1c543a484a-title">\n <div class="').concat(_0x378b5c.classNames.modal__header, '">\n <h2 class="').concat(_0x378b5c.classNames.modal__title, '" id="modal-85cd7a1c543a484a-title">\n Create a Room\n </h2>\n <button class="').concat(_0x378b5c.classNames.modal__close, '" aria-label="Close modal" data-modal-close></button>\n </div>\n <main class="').concat(_0x378b5c.classNames.modal__content, '" id="modal-85cd7a1c543a484a-content">\n\n <div class="').concat(_0x378b5c.classNames.modal__errmsg, '"></div>\n <strong>Room Name</strong><br />\n <input type="text" maxlength="10" class="').concat(_0x378b5c.classNames['netplay-room-name-input'], '" /><br />\n <strong>Max Players</strong><br />\n <select data-max-players>\n <option value="2">2</option>\n <option value="3">3</option>\n <option value="4">4</option>\n </select>\n <br />\n <strong>Password (Optional)</strong><br />\n <input type="text" maxlength="10" class="').concat(_0x378b5c.classNames['netplay-room-password-input'], '" /><br />\n\n </main>\n <footer class="').concat(_0x378b5c.classNames.modal__footer, '">\n <button class="').concat(_0x378b5c.classNames.modal__btn, ' ').concat(_0x378b5c.classNames['modal__btn-primary'], '">Submit</button>\n <button class="').concat(_0x378b5c.classNames.modal__btn, '" data-modal-close aria-label="Close">Close</button>\n </footer>\n </div>\n </div>\n </div>\n \n <div class="').concat(_0x378b5c.classNames.modal, ' ').concat(_0x378b5c.classNames['micromodal-slide'], '" id="modal-5aa765d61d8327de" aria-hidden="true">\n <div class="').concat(_0x378b5c.classNames.modal__overlay, '" tabindex="-1" data-modal-close>\n <div class="').concat(_0x378b5c.classNames.modal__container, '" role="dialog" aria-modal="true" aria-labelledby="modal-5aa765d61d8327de-title">\n <div class="').concat(_0x378b5c.classNames.modal__header, '">\n <h2 class="').concat(_0x378b5c.classNames.modal__title, '" id="modal-5aa765d61d8327de-title">\n Password\n </h2>\n <button class="').concat(_0x378b5c.classNames.modal__close, '" aria-label="Close modal" data-modal-close></button>\n </div>\n <main class="').concat(_0x378b5c.classNames.modal__content, '" id="modal-5aa765d61d8327de-content">\n\n <div class="').concat(_0x378b5c.classNames.modal__errmsg, '"></div>\n <input type="text" maxlength="10" class="').concat(_0x378b5c.classNames['netplay-room-password-input'], '" /><br />\n\n </main>\n <footer class="').concat(_0x378b5c.classNames.modal__footer, '">\n <button class="').concat(_0x378b5c.classNames.modal__btn, ' ').concat(_0x378b5c.classNames['modal__btn-primary'], '">Submit</button>\n <button class="').concat(_0x378b5c.classNames.modal__btn, '" data-modal-close aria-label="Close">Close</button>\n </footer>\n </div>\n </div>\n </div>\n \n \n <div class="').concat(_0x378b5c.classNames.modal, ' ').concat(_0x378b5c.classNames['micromodal-slide'], '" id="modal-7d8fd50ed642340b" aria-hidden="true">\n <div class="').concat(_0x378b5c.classNames.modal__overlay, '" tabindex="-1" data-modal-close>\n <div class="').concat(_0x378b5c.classNames.modal__container, '" role="dialog" aria-modal="true" aria-labelledby="modal-7d8fd50ed642340b-title">\n <div class="').concat(_0x378b5c.classNames.modal__header, '">\n <h2 class="').concat(_0x378b5c.classNames.modal__title, '" id="modal-7d8fd50ed642340b-title"></h2>\n <button class="').concat(_0x378b5c.classNames.modal__close, '" aria-label="Close modal" data-modal-close></button>\n </div>\n <main class="').concat(_0x378b5c.classNames.modal__content, '" id="modal-7d8fd50ed642340b-content">\n \n </main>\n <footer class="').concat(_0x378b5c.classNames.modal__footer, '">\n <button class="').concat(_0x378b5c.classNames.modal__btn, ' ').concat(_0x378b5c.classNames['modal__btn-primary'], '" data-modal-close>OK</button>\n </footer>\n </div>\n </div>\n </div>\n \n '), _0x1093f4.call(this, _0x17edbf.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['btn-cancel'])), 'click', function(_0x3d4554) {
return _0x132da7(_0x17edbf.elements.dialogs.netplay, true), _0x378b5c.disableControl(!0x1), _0x378b5c.stopLoadRooms(), _0x3d4554.stopPropagation(), _0x17edbf.elements.container.focus(), !0x1;
}), window.setInterval(function() {
var _0xa88a13 = '<ul>';
_0x378b5c.players.forEach(function(_0x20c45e, _0x14d0b2) {
var _0x57ec9c = null;
_0x20c45e ? (_0x378b5c.connection.peers[_0x20c45e] ? _0x57ec9c = _0x378b5c.connection.peers[_0x20c45e].extra : _0x378b5c.connection.userid === _0x20c45e && (_0x57ec9c = _0x378b5c.connection.extra), _0xa88a13 += '<li>' .concat(_0x14d0b2 + 0x1, '. ').concat(_0x57ec9c.name, '</li>')) : _0xa88a13 += '<li>' .concat(_0x14d0b2 + 0x1, '. </li>');
}), _0xa88a13 += '</ul>', _0x17edbf.elements.widgets.netplay.innerHTML = _0xa88a13;
}, 0x1f4);
var _0x2c1832 = _0x17edbf.elements.dialogs.netplay.querySelector('#modal-9de6c4e9ce2b9361'),
_0x36bde0 = _0x17edbf.elements.dialogs.netplay.querySelector('#modal-85cd7a1c543a484a'),
_0x5289c1 = _0x17edbf.elements.dialogs.netplay.querySelector('#modal-7d8fd50ed642340b'),
_0x325651 = _0x17edbf.elements.dialogs.netplay.querySelector('#modal-5aa765d61d8327de'),
_0x3d0d28 = {};
_0x378b5c.connection = new _0x4a2390.a();
var _0x10d51e = _0xa88a13.socketUrl;
if (! _0x10d51e.endsWith('/')) {
_0x10d51e += '/';
};
_0x378b5c.connection.socketURL = '/', _0x378b5c.connection.socketURL = _0x10d51e, _0x378b5c.connection.socketMessageEvent = 'emulatorjs-message', _0x378b5c.connection.socketCustomEvent = 'emulatorjs-custom-message';
var _0xda20e9 = _0x59aa33.info();
_0x378b5c.connection.extra = {
'name': _0x378b5c.playerName,
'game_id': _0x17edbf.config.gameId,
'browser_name': _0xda20e9.name,
'room_name': ' ',
'domain': document.domain
}, _0x378b5c.connection.socketMessageEvent = 'netplay', _0x378b5c.connection.enableFileSharing = true, _0x378b5c.connection.enableLogs = !0x1, _0x378b5c.connection.session = {
'data': true
}, _0x378b5c.connection.sdpConstraints.mandatory = {
'OfferToReceiveAudio': !0x1,
'OfferToReceiveVideo': !0x1
}, _0x378b5c.connection.onclose = function(_0x4fec07) {}, _0x378b5c.connection.iceServers = [{
'urls': 'turn:turn.emulatorjs.com:3478',
'credential': 'dSnmrtehverella',
'username': 'ejs'
}, {
'urls': ['stun:stun.l.google.com:19302', 'stun:stun1.l.google.com:19302', 'stun:stun2.l.google.com:19302', 'stun:stun.l.google.com:19302?transport=udp']
}], _0x378b5c.connection.maxParticipantsAllowed = 0x4, _0x378b5c.netPlayInitFrame = 0x0;
var _0x1eb137 = 0x0,
_0x52eb7d = !0x1;
Module.postMainLoop = function() {
var _0xa88a13;
if (_0x378b5c.currentFrame = parseInt(_0x378b5c.currentFrameNum(), 0xa), _0x378b5c.connected)
if (_0x1eb137 += 0x1, _0x378b5c.connection.isInitiator) _0x1eb137 >= 0x1 && function() {
for (var _0xa88a13 = [], _0x17edbf = _0x378b5c.currentFrame - 0x1; _0x17edbf < _0x378b5c.currentFrame; _0x17edbf += 0x1) _0x378b5c.inputsData[_0x17edbf] ? _0x378b5c.inputsData[_0x17edbf].forEach(function(_0x2a02da, _0x1b2066) {
_0xa88a13.push('' .concat(_0x2a02da.index, '|').concat(_0x2a02da.key, '|').concat(_0x2a02da.status, '|').concat(_0x2a02da.frame));
}) : _0xa88a13.push('-1|||' .concat(_0x17edbf));
_0x378b5c.connection.send(JSON.stringify({
'act': 'sync-control',
'value': _0xa88a13
})), _0x1eb137 = 0x0, _0x378b5c.currentFrame % 0x64 == 0x0 && Object.keys(_0x378b5c.inputsData).forEach(function(_0x3125e4, _0x31ea67) {
_0x3125e4 < _0x378b5c.currentFrame - 0x32 && (_0x378b5c.inputsData[_0x3125e4] = null, delete _0x378b5c.inputsData[_0x3125e4]);
});
}();
else {
if (_0x378b5c.coreOptionData[_0x378b5c.currentFrame]) {
var _0x2c1832 = _0x378b5c.coreOptionData[_0x378b5c.currentFrame].key,
_0x4fd0cc = _0x378b5c.coreOptionData[_0x378b5c.currentFrame].value;
_0x2593da.updateCoreOptions.call(_0x17edbf, _0x2c1832, _0x4fd0cc), delete _0x378b5c.coreOptionData[_0x378b5c.currentFrame];
}
if (_0x378b5c.currentFrame <= 0x0 || _0x378b5c.inputsData[_0x378b5c.currentFrame]) _0x378b5c.wait = !0x1, _0x378b5c.systemPause(0x0), _0x27f4c4.Module.resumeMainLoop(), _0xa88a13 = _0x378b5c.inputsData[_0x378b5c.currentFrame], _0x378b5c.inputsData[_0x378b5c.currentFrame] = null, delete _0x378b5c.inputsData[_0x378b5c.currentFrame], _0xa88a13 || (_0xa88a13 = []), _0xa88a13.forEach(function(_0x4bb42f, _0x28928a) {
_0x4bb42f.index >= 0x0 && _0x378b5c.simulateInputFn(_0x4bb42f.index, _0x4bb42f.key, _0x4bb42f.status);
});
else {
_0x378b5c.wait = true, _0x378b5c.systemPause(0x1), _0x27f4c4.Module.pauseMainLoop();
var _0x3a10d1 = !0x1,
_0x42763c = Object.keys(_0x378b5c.inputsData);
0x0 == _0x42763c.length && (_0x3a10d1 = true);
for (var _0x58af15 = 0x0; _0x58af15 < _0x42763c.length; _0x58af15 += 0x1) {
if (_0x42763c[_0x58af15] > _0x378b5c.currentFrame) {
console.log('lost', _0x378b5c.currentFrame), _0x3a10d1 = true;
break;
}
}
_0x3a10d1 ? _0x378b5c.wait || (!_0x52eb7d || _0x52eb7d < new Date().valueOf() - 0xbb8) && (_0x378b5c.inputsData = {}, _0x52eb7d = new Date().valueOf(), _0x378b5c.connection.send(JSON.stringify({
'act': 'sync-mem',
'value': _0x378b5c.connection.userid
}), _0x378b5c.roomMaster)) : (_0x378b5c.wait = true, _0x378b5c.systemPause(0x1), _0x27f4c4.Module.pauseMainLoop());
}
_0x378b5c.currentFrame % 0x64 == 0x0 && Object.keys(_0x378b5c.inputsData).forEach(function(_0x3aa393, _0x3a7829) {
_0x3aa393 < _0x378b5c.currentFrame - 0x32 && (_0x378b5c.inputsData[_0x3aa393] = null, delete _0x378b5c.inputsData[_0x3aa393]);
});
}
}, _0x378b5c.connection.onopen = function(_0x4b10d6) {
_0x378b5c.connected = true, _0x378b5c.connection.isInitiator && (-0x1 === _0x378b5c.players.indexOf(_0x4b10d6.userid) && (_0x378b5c.players.filter(function(_0x3eafb3, _0xabd7b7) {
return null == _0x3eafb3;
}).length > 0x0 ? _0x378b5c.players.forEach(function(_0xbf0f19, _0xa9d540) {
null != _0xbf0f19 || _0x378b5c.players.includes(_0x4b10d6.userid) || (_0x378b5c.players[_0xa9d540] = _0x4b10d6.userid);
}) : _0x378b5c.players.push(_0x4b10d6.userid)), _0x378b5c.connection.send(JSON.stringify({
'act': 'get-players-result',
'value': _0x378b5c.players,
'coreOptions': _0x2593da.coreOptionsValues,
'newuser': _0x4b10d6.userid,
'master': _0x378b5c.connection.userid
}))), _0x2593da.updateCoreOptionMenuItems.call(_0x17edbf);
}, _0x378b5c.connection.onFileStart = function(_0x50eb3a) {
var _0x2c1832 = _0x50eb3a.name.split('-');
'reset' !== _0x2c1832[0x0] && 0x0 != _0x2c1832[0x0] || (_0x378b5c.inputsData = {}, _0x378b5c.disableControl(true)), _0x378b5c.progressHelper[_0x50eb3a.remoteUserId] = {}, _0x378b5c.progressHelper[_0x50eb3a.remoteUserId].max = _0x50eb3a.maxChunks, _0x378b5c.showLoading.call(_0x17edbf), _0x378b5c.connection.isInitiator ? (_0x378b5c.wait = true, _0x378b5c.systemPause(0x1), _0x27f4c4.Module.pauseMainLoop()) : _0x378b5c.inputsData = {};
}, _0x378b5c.connection.onFileProgress = function(_0x44a04b) {
var _0x17edbf = _0x378b5c.progressHelper[_0x44a04b.remoteUserId];
_0x17edbf.current = _0x44a04b.currentPosition || _0x44a04b.maxChunks || _0x17edbf.max;
}, _0x378b5c.connection.onFileEnd = function(_0x45d454) {
_0x378b5c.hideLoading.call(_0x17edbf);
var _0x2c1832 = _0x45d454.name.split('-'),
_0x3795d9 = _0x2c1832[0x0] >> 0x0;
'reset' === _0x2c1832[0x0] ? (_0x3795d9 = 0x0, _0x378b5c.connection.isInitiator ? (_0x378b5c.systemPause(0x0), _0x378b5c.disableControl(!0x1), _0x17edbf.playing = true, _0x27f4c4.Module.resumeMainLoop()) : _0x550f17.a.get(_0x45d454.url, {
'responseType': 'arraybuffer'
}).then(function(_0x37db4c) {
_0x378b5c.inputsData = {};
var _0x2c1832 = new Uint8Array(_0x37db4c.data);
_0x378b5c.loadState(_0x2c1832, _0x3795d9), _0x378b5c.wait = !0x1, _0x378b5c.systemPause(0x0), _0x378b5c.disableInput = !0x1, _0x17edbf.playing = true, _0x27f4c4.Module.resumeMainLoop();
})) : 'game.state' === _0x2c1832[0x1] && (_0x378b5c.connection.isInitiator ? (_0x378b5c.openRoom(_0x17edbf), _0x378b5c.wait = !0x1, _0x378b5c.systemPause(0x0), _0x27f4c4.Module.resumeMainLoop()) : (_0x378b5c.netPlayInitFrame = _0x3795d9, _0x550f17.a.get(_0x45d454.url, {
'responseType': 'arraybuffer'
}).then(function(_0x4d241b) {
var _0x2c1832 = new Uint8Array(_0x4d241b.data);
_0x378b5c.loadState(_0x2c1832, _0x3795d9), _0x378b5c.wait = !0x1, _0x378b5c.disableInput = !0x1, _0x378b5c.connection.send(JSON.stringify({
'act': 'wait-end',
'value': _0x378b5c.connection.userid
})), _0x378b5c.hideLoading.call(_0x17edbf), _0x17edbf.playing = true, _0x27f4c4.Module.resumeMainLoop();
}))), _0x378b5c.connection.isInitiator || console.log('recv mem end', _0x378b5c.currentFrame, _0x45d454.name);
};
var _0x3bdb35 = !0x1;
_0x378b5c.connection.onmessage = function(_0x32e81a) {
var _0x2c1832, _0x2dca58 = _0x32e81a.data || _0x32e81a;
try {
_0x2c1832 = JSON.parse(_0x2dca58);
} catch (_0x3fb9cc) {
_0x2c1832 = null;
}
if (_0x2c1832) {
if ('room-master' === _0x2c1832.act && (_0x378b5c.roomMaster = _0x2c1832.value), _0x2c1832.act, 'short-pause' === _0x2c1832.act && (_0x3bdb35 || (_0x378b5c.systemPause(0x1), _0x27f4c4.Module.pauseMainLoop(), _0x3bdb35 = true, _0x378b5c.wait = true, setTimeout(function() {
_0x378b5c.systemPause(0x0), _0x27f4c4.Module.resumeMainLoop(), _0x378b5c.wait = !0x1, _0x3bdb35 = !0x1;
}, 0x30))), 'sync-control' === _0x2c1832.act) {
var _0xb051c1 = _0x2c1832.value;
_0x17edbf.playing || _0x378b5c.wait;
var _0x2ef644 = _0x378b5c.connection.userid;
_0x378b5c.players.indexOf(_0x2ef644);
_0x378b5c.connection.isInitiator, _0xb051c1.forEach(function(_0x308105, _0x383237) {
var _0x2c1832 = _0x308105.split('|'),
_0x25a5c4 = _0x2c1832[0x0] >> 0x0,
_0x2b3afd = _0x2c1832[0x1] >> 0x0,
_0x4f5761 = _0x2c1832[0x2] >> 0x0,
_0x249303 = _0x2c1832[0x3] >> 0x0,
_0x54e0fd = _0x378b5c.currentFrame;
_0x378b5c.inputsData[_0x249303] || (_0x378b5c.inputsData[_0x249303] = []), -0x1 !== _0x25a5c4 && _0x249303 === _0x54e0fd && _0x378b5c.simulateInputFn(_0x25a5c4, _0x2b3afd, _0x4f5761), _0x378b5c.connection.isInitiator ? _0x25a5c4 >= 0x0 && (_0x378b5c.inputsData[_0x54e0fd] || (_0x378b5c.inputsData[_0x54e0fd] = []), -0x2 != _0x2b3afd && (_0x378b5c.inputsData[_0x54e0fd].push({
'index': _0x25a5c4,
'key': _0x2b3afd,
'status': _0x4f5761,
'frame': _0x54e0fd
}), _0x378b5c.simulateInputFn(_0x25a5c4, _0x2b3afd, _0x4f5761)), _0x3bdb35 || _0x54e0fd - 0xa >= _0x249303 && (_0x378b5c.systemPause(0x1), _0x27f4c4.Module.pauseMainLoop(), _0x3bdb35 = true, _0x378b5c.wait = true, setTimeout(function() {
_0x378b5c.systemPause(0x0), _0x27f4c4.Module.resumeMainLoop(), _0x378b5c.wait = !0x1, _0x3bdb35 = !0x1;
}, 0x30))) : ('' !== _0x25a5c4 && _0x378b5c.inputsData[_0x249303].push({
'index': _0x25a5c4,
'key': _0x2b3afd,
'status': _0x4f5761,
'frame': _0x249303
}), _0x378b5c.inputsData[_0x378b5c.currentFrame] && _0x27f4c4.Module.resumeMainLoop(), _0x54e0fd + 0xa <= _0x249303 && _0x249303 > _0x378b5c.netPlayInitFrame + 0x64 && _0x378b5c.connection.send(JSON.stringify({
'act': 'short-pause',
'value': _0x2ef644
}), _0x378b5c.roomMaster));
});
}
if (_0x2c1832.act, _0x2c1832.act, 'update-core-option' === _0x2c1832.act && (_0x378b5c.connection.isInitiator || (_0x378b5c.coreOptionData[_0x2c1832.frame] = {
'key': _0x2c1832.key,
'value': _0x2c1832.value
})), 'restart-game' === _0x2c1832.act && (_0x378b5c.inputsData = {}, _0x378b5c.connection.send(JSON.stringify({
'act': 'sync-mem',
'value': _0x378b5c.connection.userid
}), _0x378b5c.roomMaster)), 'pause' === _0x2c1832.act && _0x27f4c4.Module.pauseMainLoop(), 'resume' === _0x2c1832.act && _0x27f4c4.Module.resumeMainLoop(), 'wait' === _0x2c1832.act && (_0x378b5c.wait = true, _0x378b5c.systemPause(0x1), _0x2c1832.value !== _0x378b5c.connection.userid && (_0x378b5c.waitingList[_0x2c1832.value] = 0x1), _0x378b5c.disableControl(true)), 'wait-end' === _0x2c1832.act && (_0x378b5c.waitingList[_0x2c1832.value] && delete _0x378b5c.waitingList[_0x2c1832.value], 0x0 === Object.keys(_0x378b5c.waitingList).length && (_0x378b5c.wait = !0x1, _0x378b5c.systemPause(0x0), _0x378b5c.disableInput = !0x1)), 'sync-mem' === _0x2c1832.act) {
_0x378b5c.wait = true, _0x378b5c.systemPause(0x1);
var _0x2a5bce = _0x2c1832.value;
_0x378b5c.disableControl(true), _0x378b5c.connection.send(JSON.stringify({
'act': 'wait',
'value': _0x2a5bce
}));
for (var _0x8624ae = _0x378b5c.getStateInfo().split('|'), _0x99810a = _0x8624ae[0x0] >> 0x0, _0x96975a = new Uint8Array(_0x99810a), _0x4aea80 = _0x8624ae[0x1] >> 0x0, _0x2d2ba0 = 0x0; _0x2d2ba0 < _0x99810a; _0x2d2ba0 += 0x1) _0x96975a[_0x2d2ba0] = _0x27f4c4.Module.getValue(_0x4aea80 + _0x2d2ba0);
_0x27f4c4.FS.createDataFile('/', 'game.state', _0x96975a, true, true), _0x27f4c4.Module.cwrap('load_state', 'number', ['string', 'number'])('game.state', 0x0), _0x27f4c4.FS.unlink('game.state'), _0x378b5c.inputsData = {};
var _0x49054f = new File([_0x96975a], '' .concat(0x0, '-game.state'), {
'type': '',
'lastModified': new Date()
});
_0x378b5c.connection.shareFile(_0x49054f, _0x2a5bce);
}
if (_0x2c1832.act, 'get-players-result' === _0x2c1832.act && (_0x378b5c.players = _0x2c1832.value, _0x378b5c.roomMaster = _0x2c1832.master, _0x2c1832.newuser === _0x378b5c.connection.userid)) {
var _0x418ccb = _0x2593da.coreOptionsValues;
_0x2c1832.coreOptions && Object.keys(_0x2c1832.coreOptions).forEach(function(_0x3f4c8f) {
_0x418ccb[_0x3f4c8f] !== _0x2c1832.coreOptions[_0x3f4c8f] && _0x2593da.updateCoreOptions.call(_0x17edbf, _0x3f4c8f, _0x2c1832.coreOptions[_0x3f4c8f]);
}), _0x378b5c.showLoading.call(_0x17edbf), _0x378b5c.disableControl(true), setTimeout(function() {
_0x378b5c.connection.send(JSON.stringify({
'act': 'sync-mem',
'value': _0x378b5c.connection.userid
}), _0x378b5c.roomMaster);
}, 0x1), _0x378b5c.roomMaster && _0x378b5c.connection.peers[_0x378b5c.roomMaster] && (_0x378b5c.connection.extra.room_name = _0x378b5c.connection.peers[_0x378b5c.roomMaster].extra.room_name, _0x378b5c.connection.updateExtraData()), _0x378b5c.connection.peers[_0x378b5c.roomMaster] && (_0x3d0d28 = _0x378b5c.connection.peers[_0x378b5c.roomMaster].extra), _0x378b5c.openRoom(_0x17edbf);
}
}
}, _0x378b5c.connection.onUserStatusChanged = function(_0x4188ba) {
if ('offline' === _0x4188ba.status) {
var _0x17edbf = _0x378b5c.players.indexOf(_0x4188ba.userid);
_0x17edbf >= 0x0 && (_0x378b5c.players[_0x17edbf] = null);
}
_0x4188ba.status;
}, _0x378b5c.connection.onExtraDataUpdated = function(_0x20ce61) {
_0x20ce61.userid === _0x378b5c.connection.userid && (_0x378b5c.connection.extra = _0x20ce61.extra);
}, _0x378b5c.connection.onleave = function(_0x30ebc6) {
if (_0x30ebc6.userid) {
var _0x2c1832 = _0x378b5c.players.indexOf(_0x30ebc6.userid);
_0x2c1832 >= 0x0 && (_0x378b5c.players[_0x2c1832] = null);
}
_0x378b5c.waitingList[_0x30ebc6.userid] && delete _0x378b5c.waitingList[_0x30ebc6.userid], _0x378b5c.connection.isInitiator && 0x0 === Object.keys(_0x378b5c.waitingList).length && _0x378b5c.wait && (_0x378b5c.wait = !0x1, _0x378b5c.systemPause(0x0), _0x378b5c.disableInput = !0x1, _0x378b5c.connection.send(JSON.stringify({
'act': 'wait-end',
'value': _0x378b5c.connection.userid
}))), _0x378b5c.openRoom(_0x17edbf), _0x378b5c.inputsData = {}, _0x378b5c.hideLoading.call(_0x17edbf);
};
var _0x2fba43 = function() {
try {
_0x378b5c.connection.onbeforeunload();
} catch (_0xac619d) {}
_0x378b5c.connected = !0x1, _0x378b5c.connection.password = null, _0x378b5c.players = [], _0x378b5c.connection.peersBackup = [], _0x378b5c.waitingList = {}, _0x378b5c.inputsData = {}, _0x378b5c.hideLoading.call(_0x17edbf), _0x132da7(_0x17edbf.elements.buttons.restart, !0x1), _0x132da7(_0x17edbf.elements.buttons.loadState, !0x1), _0x132da7(_0x17edbf.elements.buttons.saveState, !0x1), _0x378b5c.allowCheat && _0x132da7(_0x17edbf.elements.buttons.cheat, !0x1), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x2), !0x1), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x3), !0x1), Array.from(_0x17edbf.elements.buttons.play).forEach(function(_0x3d296d) {
_0x132da7(_0x3d296d, !0x1);
}), _0x2593da.updateCoreOptionMenuItems.call(_0x17edbf);
};
_0x378b5c.connection.close = _0x2fba43, _0x378b5c.connection.disconnect = _0x2fba43, _0x378b5c.connection.leave = _0x2fba43;
var _0x3f9c57, _0x10c0ad = function(_0x42ff74, _0x306f0b, _0x345d4d) {
_0x378b5c.hideLoading.call(_0x17edbf), _0x42ff74 ? (_0x378b5c.stopLoadRooms(), _0x132da7(_0x17edbf.elements.buttons.restart, true), _0x132da7(_0x17edbf.elements.buttons.loadState, true), _0x132da7(_0x17edbf.elements.buttons.saveState, true), _0x378b5c.resetCheat(), _0x132da7(_0x17edbf.elements.buttons.cheat, true), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x2), true), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x3), true), Array.from(_0x17edbf.elements.buttons.play).forEach(function(_0x2911fb) {
_0x132da7(_0x2911fb, true);
}), _0x378b5c.connection.socket && _0x378b5c.connection.socket.on('set-isInitiator-true', function(_0x31da46) {
_0x31da46 === _0x378b5c.connection.sessionid && (_0x378b5c.connection.socket.emit('extra-data-updated', _0x378b5c.connection.extra), _0x378b5c.connection.send(JSON.stringify({
'act': 'room-master',
'value': _0x378b5c.connection.userid
})), _0x378b5c.roomMaster = _0x378b5c.connection.userid, _0x378b5c.systemPause(0x0), _0x27f4c4.Module.resumeMainLoop(), _0x2593da.updateCoreOptionMenuItems.call(_0x17edbf), _0x132da7(_0x17edbf.elements.buttons.restart, !0x1), _0x132da7(_0x17edbf.elements.buttons.loadState, !0x1), _0x132da7(_0x17edbf.elements.buttons.saveState, !0x1), Array.from(_0x17edbf.elements.buttons.play).forEach(function(_0x132034) {
_0x132da7(_0x132034, !0x1);
}));
})) : (_0x4d8495.close(), _0x5289c1.querySelector('#modal-7d8fd50ed642340b-content').innerHTML = _0x345d4d, _0x4d8495.show('modal-7d8fd50ed642340b', {
'closeTrigger': 'data-modal-close'
}));
},
_0x1e0cdb = function(_0x5b3f13) {
var _0x2c1832 = _0x5b3f13.target.getAttribute('data-id');
if ('Y' === _0x5b3f13.target.getAttribute('data-password')) _0x4d8495.show('modal-5aa765d61d8327de', {
'closeTrigger': 'data-modal-close'
}), _0x325651.setAttribute('data-id', _0x2c1832);
else {
_0x378b5c.showLoading.call(_0x17edbf), _0x378b5c.connection.userid = _0x378b5c.connection.token();
try {
_0x378b5c.connection.join(_0x2c1832, _0x10c0ad);
} catch (_0x2767b9) {
_0x378b5c.hideLoading.call(_0x17edbf), _0x5289c1.querySelector('#modal-7d8fd50ed642340b-content').innerHTML = 'Error', _0x4d8495.show('modal-7d8fd50ed642340b', {
'closeTrigger': 'data-modal-close'
});
}
}
return _0x5b3f13.stopPropagation(), !0x1;
};
_0x378b5c.loadRoomsList = function() {
_0x132da7(_0x17edbf.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['netplay-roomlist'])), !0x1), _0x132da7(_0x17edbf.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['netplay-room'])), true), _0x132da7(_0x17edbf.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['btn-create-room'])), !0x1), _0x132da7(_0x17edbf.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['btn-quit'])), true);
var listUrl = _0x17edbf.listUrl
if (! listUrl.endsWith('/')) {
listUrl += '/';
};
var _0x2c1832 = _0x17edbf.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['netplay-roomlist'])).querySelector('tbody'),
_0x5cf2be = [listUrl, 'list?game_id=', _0x17edbf.config.gameId, '&domain=', document.domain].join('');
_0x550f17.a.get(_0x5cf2be, {}).then(function(_0x37c60b) {
if (_0x37c60b.data) {
var _0x4782da = [],
_0x28e87d = [];
_0x2c1832.querySelectorAll('tr').forEach(function(_0x1e5a22) {
_0x28e87d.push(_0x1e5a22.getAttribute('id'));
}), Object.keys(_0x37c60b.data).forEach(function(_0x47ae0e) {
var _0x4a8f4c = _0x37c60b.data[_0x47ae0e];
if (_0x4782da.push(_0x47ae0e), _0x28e87d.includes(_0x47ae0e)) {
var _0x2d672f = _0x2c1832.querySelector('tr[id="' .concat(_0x47ae0e, '"]'));
_0x2c1832.removeChild(_0x2d672f);
}
var _0x295f41 = document.createElement('tr');
_0x295f41.setAttribute('id', _0x47ae0e);
var _0xa2ea61 = _0x4a8f4c.password ? _0x378b5c.icons.lock : '',
_0x5452b6 = _0x4a8f4c.password ? 'Y' : 'N',
_0xb3121d = _0x4a8f4c.current < _0x4a8f4c.max ? '<span data-id="' .concat(_0x47ae0e, '" data-password="').concat(_0x5452b6, '" class="').concat(_0x378b5c.classNames['btn-join-room'], '">Join</span>') : '',
_0x5c8146 = _0x4a8f4c.country ? _0x4a8f4c.country : '?';
_0x295f41.innerHTML = '<td>[' .concat(_0x5c8146, '] ').concat(_0xa2ea61, ' ').concat(_0x4a8f4c.room_name, '</td><td>').concat(_0x4a8f4c.current, '/').concat(_0x4a8f4c.max, '</td><td>').concat(_0xb3121d, '</td>'), _0x1093f4.call(_0xa88a13, _0x295f41.querySelector('.' .concat(_0x378b5c.classNames['btn-join-room'])), 'click', _0x1e0cdb), _0x2c1832.appendChild(_0x295f41);
}), _0x2c1832.querySelectorAll('tr').forEach(function(_0x24dcf7) {
_0x4782da.includes(_0x24dcf7.id) || _0x2c1832.removeChild(_0x24dcf7);
});
}
_0x378b5c.loadRoomsListTimer = setTimeout(_0x378b5c.loadRoomsList, 0x7d0);
}).catch(function(_0x227055) {
console.log('Network Error', _0x227055), _0x378b5c.loadRoomsListTimer = setTimeout(_0x378b5c.loadRoomsList, 0x7d0);
});
}, _0x378b5c.openRoom = function(_0x10625f) {
_0x378b5c.stopLoadRooms(), _0x132da7(_0x10625f.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['netplay-roomlist'])), true), _0x132da7(_0x10625f.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['netplay-room'])), !0x1), _0x132da7(_0x10625f.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['btn-create-room'])), true), _0x132da7(_0x10625f.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['btn-quit'])), !0x1), _0x378b5c.resetCheat(), _0x132da7(_0x10625f.elements.buttons.cheat, true);
var _0x17edbf = _0x10625f.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['netplay-room'])),
_0x2c1832 = _0x17edbf.querySelector('[data-room-password]');
null === _0x378b5c.connection.password ? _0x132da7(_0x2c1832, true) : (_0x132da7(_0x2c1832, !0x1), _0x2c1832.querySelector('span').innerText = _0x378b5c.connection.password);
var _0x18c1cb = _0x17edbf.querySelector('tbody');
_0x18c1cb.innerHTML = '', _0x17edbf.querySelector('strong').innerText = _0x3d0d28.room_name, _0x378b5c.players.forEach(function(_0x938d55, _0xf3d39a) {
var _0x2c1832 = null;
if (_0x938d55 && (_0x378b5c.connection.peers[_0x938d55] ? _0x2c1832 = _0x378b5c.connection.peers[_0x938d55].extra : _0x378b5c.connection.userid === _0x938d55 && (_0x2c1832 = _0x378b5c.connection.extra), _0x2c1832 && _0x2c1832.name)) {
var _0x564064 = document.createElement('tr');
_0x564064.innerHTML = '<td>' .concat(_0xf3d39a + 0x1, '</td><td>').concat(_0x2c1832.name, '</td><td></td>'), _0x18c1cb.appendChild(_0x564064);
}
});
}, _0x1093f4.call(this, _0x325651.querySelector('.' .concat(_0x378b5c.classNames['modal__btn-primary'])), 'click', function(_0x9ac969) {
var _0x2c1832 = _0x325651.querySelector('.' .concat(_0x378b5c.classNames['netplay-room-password-input'])).value;
_0x378b5c.showLoading.call(_0x17edbf), _0x378b5c.connection.password = _0x2c1832;
var _0xcf7207 = _0x325651.getAttribute('data-id');
_0x378b5c.connection.join(_0xcf7207, function(_0x1a0deb, _0x1ef6c8, _0x1ad18e) {
_0x10c0ad(_0x1a0deb, 0x0, _0x1ad18e), _0x1a0deb && _0x325651.classList.remove(_0x449eac({
'is-open': true
}));
}), _0x9ac969.stopPropagation();
}), _0x1093f4.call(this, _0x2c1832.querySelector('.' .concat(_0x378b5c.classNames['modal__btn-primary'])), 'click', function(_0x43f795) {
var _0x17edbf = _0x2c1832.querySelector('.' .concat(_0x378b5c.classNames['netplay-player-name'])).value;
_0x17edbf = _0x17edbf.replace(/<|>/g, ''), _0x2c1832.querySelector('.' .concat(_0x378b5c.classNames.modal__errmsg)).innerHTML = '', _0x17edbf.trim() && (_0x378b5c.playerName = _0x17edbf.trim(), _0x378b5c.connection.extra.name = _0x17edbf.trim(), _0x4d8495.close()), _0x43f795.stopPropagation();
}), _0x1093f4.call(this, _0x36bde0.querySelector('.' .concat(_0x378b5c.classNames['modal__btn-primary'])), 'click', function(_0x202cf6) {
var _0x2c1832 = _0x36bde0.querySelector('.' .concat(_0x378b5c.classNames['netplay-room-name-input'])).value;
_0x2c1832 = _0x2c1832.replace(/<|>/g, '');
var _0x3510ed = _0x36bde0.querySelector('select[data-max-players] option:checked').value;
_0x3510ed < 0x2 && (_0x3510ed = 0x2), _0x3510ed > 0x4 && (_0x3510ed = 0x4);
for (var _0x1264b2 = 0x0; _0x1264b2 < _0x3510ed; _0x1264b2++) _0x378b5c.players[_0x1264b2] = null;
_0x378b5c.connection.maxParticipantsAllowed = _0x3510ed;
var _0x140bcf = _0x36bde0.querySelector('.' .concat(_0x378b5c.classNames['netplay-room-password-input'])).value;
_0x36bde0.querySelector('.' .concat(_0x378b5c.classNames.modal__errmsg)).innerHTML = '', _0x2c1832.trim() ? (_0x378b5c.connection.extra.room_name = _0x2c1832.trim(), '' !== _0x140bcf.trim() && (_0x378b5c.connection.password = _0x140bcf.trim()), _0x378b5c.showLoading.call(_0x17edbf), _0x378b5c.connection.open(_0x378b5c.connection.token(), function(_0x3c6c26, _0x203288, _0x210547) {
_0x378b5c.hideLoading.call(_0x17edbf), _0x3c6c26 ? (_0x378b5c.players[0x0] = _0x378b5c.connection.userid, _0x378b5c.roomMaster = _0x378b5c.connection.userid, _0x3f9c57.closeModal(), _0x378b5c.stopLoadRooms(), _0x3d0d28 = _0x378b5c.connection.extra, _0x378b5c.openRoom(_0x17edbf), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x2), true), _0x132da7(_0x2593da.contextMenu.querySelectorAll('ul li').item(0x3), true)) : _0x36bde0.querySelector('.' .concat(_0x378b5c.classNames.modal__errmsg)).innerHTML = _0x210547;
})) : _0x3f9c57.closeModal(), _0x202cf6.stopPropagation();
}), _0x1093f4.call(this, _0x17edbf.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['btn-create-room'])), 'click', function(_0x48cb26) {
return _0x3f9c57 = _0x4d8495.show('modal-85cd7a1c543a484a', {
'closeTrigger': 'data-modal-close',
'onClose': function() {
_0x17edbf.elements.container.focus();
}
}), _0x48cb26.stopPropagation(), !0x1;
}), _0x1093f4.call(this, _0x17edbf.elements.dialogs.netplay.querySelector('.' .concat(_0x378b5c.classNames['btn-quit'])), 'click', function(_0x68459d) {
return _0x378b5c.connection.close(), _0x378b5c.stopLoadRooms(), _0x378b5c.loadRoomsList(), _0x378b5c.inputsData = {}, _0x68459d.stopPropagation(), !0x1;
});
}
window._0x378b5c = _0x378b5c
},
'initKeyboard': function() {
var _0xa88a13 = this,
_0x17edbf = this,
_0x2c1832 = this.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames.overlay));
_0x1093f4.call(this, document.body, 'keydown keyup', function(_0x459b32) {
if (_0x459b32.which >= 0x70 && _0x459b32.which <= 0x7b);
else {
var _0x335c01 = !_0xa88a13.elements.dialogs.gamepad.hidden && !_0x2c1832.hidden,
_0x2eb03e = 'keydown' === _0x459b32.type;
if (_0x2eb03e && _0x335c01) {
var _0x1a3a47 = _0x2c1832.getAttribute('data-index'),
_0x1394cb = _0x2c1832.getAttribute('data-id'),
_0x2c8fd1 = null;
if (_0x459b32.key && _0x378b5c.keyMap[_0x459b32.keyCode] === 'escape') {
(_0x2c8fd1 = _0x17edbf.elements.dialogs.gamepad.querySelector('[data-id="' .concat(_0x1394cb, '"][data-index="').concat(_0x1a3a47, '"][data-type="1"]'))).setAttribute('data-value', '')
_0x2c8fd1.value = ''
_0x132da7(_0x2c1832, true);
return;
};
var _0x1a3a47 = _0x2c1832.getAttribute('data-index'),
_0x1394cb = _0x2c1832.getAttribute('data-id'),
_0x2c8fd1 = null;
_0x459b32.key && ((_0x2c8fd1 = _0x17edbf.elements.dialogs.gamepad.querySelector('[data-id="' .concat(_0x1394cb, '"][data-index="').concat(_0x1a3a47, '"][data-type="1"]'))).setAttribute('data-value', _0x459b32.keyCode), _0x2c8fd1.value = _0x378b5c.keyMap[_0x459b32.keyCode]), _0x132da7(_0x2c1832, true);
} else Object.keys(_0x378b5c.controllers).forEach(function(_0x3863d2) {
Object.keys(_0x378b5c.controllers[_0x3863d2]).forEach(function(_0x509939) {
if (parseInt(_0x378b5c.controllers[_0x3863d2][_0x509939].value, 0xa) === _0x459b32.keyCode && ['24', '25', '26'].includes(_0x509939) && _0x459b32.type == 'keydown' && ! _0x459b32.repeat) {
if (_0x509939 == '24') {//save
_0x378b5c.quickSaveState()
} else if (_0x509939 == '25') {//load
_0x378b5c.quickLoadState()
} else if (_0x509939 == '26') {//load
_0x378b5c.changeStateSlot()
}
// quick save/load button
}
parseInt(_0x378b5c.controllers[_0x3863d2][_0x509939].value, 0xa) === _0x459b32.keyCode && (_0x2eb03e ? _0x509939 >= 0x10 && _0x509939 <= 0x17 ? _0x378b5c.simulateInput(_0x3863d2, _0x509939, 0x7fff) : _0x378b5c.simulateInput(_0x3863d2, _0x509939, 0x1) : _0x378b5c.simulateInput(_0x3863d2, _0x509939, 0x0));
});
});
}
}), document.body.addEventListener('keydown', function(_0x14d612) {
var _0x2c1832 = _0x14d612.keyCode ? _0x14d612.keyCode : _0x14d612.which;
[0x20, 0x25, 0x26, 0x27, 0x28, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x7b].includes(_0x2c1832) && _0x14d612.srcElement === _0x17edbf.elements.container && _0x14d612.preventDefault(), _0x14d612.srcElement.className.split(/ +/).includes(_0x378b5c.classNames.set) && _0x14d612.preventDefault();
});
},
'initGamepad': function() {
var _0xa88a13 = this,
_0x17edbf = this,
_0x2c1832 = this.elements.dialogs.gamepad.querySelector('.' .concat(_0x378b5c.classNames.overlay)),
_0x181250 = new _0x4ad1c6[('Gamepad')]();
_0x378b5c.gamepad = _0x181250, _0x181250.init() && (_0x181250.bind(_0x4ad1c6.Gamepad.Event.TICK, function(_0x2fe35d) {
_0x2fe35d.forEach(function(_0x2b21f8) {
var _0x17edbf;
_0x2b21f8 && _0x2b21f8.axes && _0x2b21f8.axes[0x9] && ((_0x17edbf = _0x2b21f8.axes[0x9]) < 3.28571 ? 0x1 == _0x17edbf ? (_0x2b21f8.extra_buttons[0xc].pressed || (_0x2b21f8.extra_buttons[0xc] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xc,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})), _0x2b21f8.extra_buttons[0xe].pressed || (_0x2b21f8.extra_buttons[0xe] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xe,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
}))) : _0x17edbf <= -0.7142857 && _0x17edbf >= -0.714291 ? (_0x2b21f8.extra_buttons[0xc].pressed || (_0x2b21f8.extra_buttons[0xc] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xc,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})), _0x2b21f8.extra_buttons[0xf].pressed || (_0x2b21f8.extra_buttons[0xf] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xf,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
}))) : _0x17edbf >= 0.42856 && _0x17edbf <= 0.42858 ? (_0x2b21f8.extra_buttons[0xd].pressed || (_0x2b21f8.extra_buttons[0xd] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xd,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})), _0x2b21f8.extra_buttons[0xe].pressed || (_0x2b21f8.extra_buttons[0xe] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xe,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
}))) : _0x17edbf >= -0.14287 && _0x17edbf <= -0.14285 ? (_0x2b21f8.extra_buttons[0xd].pressed || (_0x2b21f8.extra_buttons[0xd] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xd,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})), _0x2b21f8.extra_buttons[0xf].pressed || (_0x2b21f8.extra_buttons[0xf] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xf,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
}))) : _0x17edbf <= -0x1 && _0x17edbf >= -1.01 ? _0x2b21f8.extra_buttons[0xc].pressed || (_0x2b21f8.extra_buttons[0xc] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xc,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})) : _0x17edbf >= 0.142857 && _0x17edbf <= 0.142858 ? _0x2b21f8.extra_buttons[0xd].pressed || (_0x2b21f8.extra_buttons[0xd] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xd,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})) : _0x17edbf >= 0.7142857 && _0x17edbf <= 0.7142858 ? _0x2b21f8.extra_buttons[0xe].pressed || (_0x2b21f8.extra_buttons[0xe] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xe,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})) : _0x17edbf <= -0.42857 && _0x17edbf >= -0.42858 && (_0x2b21f8.extra_buttons[0xf].pressed || (_0x2b21f8.extra_buttons[0xf] = {
'pressed': true,
'value': 0x1
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, {
'index': 0xf,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
}))) : (_0x2b21f8.extra_buttons[0xc].pressed && (_0x2b21f8.extra_buttons[0xc] = {
'pressed': !0x1,
'value': 0x0
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_UP, {
'index': 0xc,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})), _0x2b21f8.extra_buttons[0xd].pressed && (_0x2b21f8.extra_buttons[0xd] = {
'pressed': !0x1,
'value': 0x0
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_UP, {
'index': 0xd,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})), _0x2b21f8.extra_buttons[0xe].pressed && (_0x2b21f8.extra_buttons[0xe] = {
'pressed': !0x1,
'value': 0x0
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_UP, {
'index': 0xe,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
})), _0x2b21f8.extra_buttons[0xf].pressed && (_0x2b21f8.extra_buttons[0xf] = {
'pressed': !0x1,
'value': 0x0
}, _0x181250._fire(_0x4ad1c6.Gamepad.Event.BUTTON_UP, {
'index': 0xf,
'gamepad': _0x2b21f8,
'gamepadIndex': _0x2b21f8.index
}))));
});
}), _0x181250.bind(_0x4ad1c6.Gamepad.Event.BUTTON_DOWN, function(_0x15761a) {
if (!_0xa88a13.elements.dialogs.gamepad.hidden && !_0x2c1832.hidden) {
var _0x387018, _0xdd4205 = parseInt(_0x2c1832.getAttribute('data-index'), 0xa),
_0x1f4ee2 = _0x2c1832.getAttribute('data-id');
if (_0x15761a.gamepad.index === parseInt(_0xdd4205, 0xa)) {
for (var _0x211087 = 0x0; _0x211087 < 0x20 && ((_0x387018 = _0x15761a.gamepad.buttons[_0x211087]) || (_0x387018 = _0x15761a.gamepad.extra_buttons[_0x211087]), !_0x387018 || !('number' == typeof _0x387018 && 0x1 === _0x387018 || 'number' == typeof _0x387018.value && 0x1 === _0x387018.value)); _0x211087 += 0x1);
var _0x126d2d = null;
(_0x126d2d = _0x17edbf.elements.dialogs.gamepad.querySelector('[data-id="' .concat(_0x1f4ee2, '"][data-index="').concat(_0xdd4205, '"][data-type="2"]'))).setAttribute('data-value', _0x15761a.index), _0x126d2d.value = 'button ' .concat(_0x15761a.index + 0x1), _0x132da7(_0x2c1832, true);
}
} else Object.keys(_0x378b5c.controllers[_0x15761a.gamepad.index.toString()]).forEach(function(_0x3cf4d3) {
var _0x5cf388 = _0x15761a.gamepad.index.toString();
if (parseInt(_0x378b5c.controllers[_0x5cf388][_0x3cf4d3].value2, 0xa) === parseInt(_0x15761a.index, 0xa) && ['24', '25', '26'].includes(_0x3cf4d3)) {
if (_0x3cf4d3 == '24') {//save
_0x378b5c.quickSaveState()
} else if (_0x3cf4d3 == '25') {//load
_0x378b5c.quickLoadState()
} else if (_0x3cf4d3 == '26') {//load
_0x378b5c.changeStateSlot()
}
}
parseInt(_0x378b5c.controllers[_0x5cf388][_0x3cf4d3].value2, 0xa) === parseInt(_0x15761a.index, 0xa) && parseInt(_0x5cf388, 0xa) === parseInt(_0x15761a.gamepad.index, 0xa) && _0x378b5c.simulateInput(_0x5cf388, _0x3cf4d3, 0x1);
});
}), _0x181250.bind(_0x4ad1c6.Gamepad.Event.BUTTON_UP, function(_0x3220b3) {
Object.keys(_0x378b5c.controllers).forEach(function(_0x302c3e) {
Object.keys(_0x378b5c.controllers[_0x302c3e]).forEach(function(_0x14eb8a) {
parseInt(_0x378b5c.controllers[_0x302c3e][_0x14eb8a].value2, 0xa) === parseInt(_0x3220b3.index, 0xa) && parseInt(_0x302c3e, 0xa) === parseInt(_0x3220b3.gamepad.index, 0xa) && _0x378b5c.simulateInput(_0x302c3e, _0x14eb8a, 0x0);
});
});
}), _0x181250.bind(_0x4ad1c6.Gamepad.Event.AXIS_CHANGED, function(_0x31f017) {
var value = function(value) {
if (value > 0.5 || value < 0.5) {
return (value > 0) ? 1 : -1;
} else {
return 0;
}
}(parseFloat(_0x31f017.value));
if (!_0xa88a13.elements.dialogs.gamepad.hidden && !_0x2c1832.hidden) {
if (value !== 0) {
var _0x1f4ee2 = _0x2c1832.getAttribute('data-id'),
_0xdd4205 = parseInt(_0x2c1832.getAttribute('data-index'), 0xa);
var _0x126d2d = _0xa88a13.elements.dialogs.gamepad.querySelector('[data-id="' .concat(_0x1f4ee2, '"][data-index="').concat(_0xdd4205, '"][data-type="2"]'))
_0x126d2d.setAttribute('data-value', _0x31f017.axis + ':' + value)
_0x126d2d.value = _0x31f017.axis + ':' + value
_0x132da7(_0x2c1832, true)
}
} else {
if (! _0x378b5c.prevButtons) {
_0x378b5c.prevButtons = {}
}
var quit = false
var _0x5cf388 = _0x31f017.gamepad.index.toString();
if (! _0x378b5c.prevButtons[_0x5cf388]) {
_0x378b5c.prevButtons[_0x5cf388] = {}
}
if (! _0x378b5c.prevButtons[_0x5cf388][_0x31f017.axis]) {
_0x378b5c.prevButtons[_0x5cf388][_0x31f017.axis] = 0
}
if (! _0x378b5c.prevButtons[_0x5cf388].buttonID) {
_0x378b5c.prevButtons[_0x5cf388].buttonID = 0
}
for (var w=0; w<Object.keys(_0x378b5c.controllers[_0x5cf388]).length; w++) {
var _0x3cf4d3 = Object.keys(_0x378b5c.controllers[_0x5cf388])[w]
if (! _0x378b5c.controllers[_0x5cf388][_0x3cf4d3].value2) {
continue
}
if (_0x378b5c.controllers[_0x5cf388][_0x3cf4d3].value2.split(':')[0] === _0x31f017.axis && parseInt(_0x378b5c.controllers[_0x5cf388][_0x3cf4d3].value2.split(':')[1]) === value && ['24', '25', '26'].includes(_0x3cf4d3)) {
if (_0x3cf4d3 == '24') {//save
quit = true
_0x378b5c.quickSaveState()
} else if (_0x3cf4d3 == '25') {//load
quit = true
_0x378b5c.quickLoadState()
} else if (_0x3cf4d3 == '26') {//load
_0x378b5c.changeStateSlot()
}
}
if (value === 0) {
// button up
if (_0x378b5c.controllers[_0x5cf388][_0x3cf4d3].value2.split(':')[0] === _0x31f017.axis) {
_0x378b5c.prevButtons[_0x5cf388][_0x31f017.axis] = value
_0x378b5c.prevButtons[_0x5cf388].buttonID = _0x3cf4d3
_0x378b5c.simulateInput(_0x5cf388, _0x3cf4d3, 0x0)
}
} else {
//button down
if ((1 === _0x378b5c.prevButtons[_0x5cf388][_0x31f017.axis] && value === -1) ||
(-1 === _0x378b5c.prevButtons[_0x5cf388][_0x31f017.axis] && value === 1)) {
_0x378b5c.simulateInput(_0x5cf388, _0x378b5c.prevButtons[_0x5cf388].buttonID, 0x0)
}
if (_0x378b5c.controllers[_0x5cf388][_0x3cf4d3].value2.split(':')[0] === _0x31f017.axis && parseInt(_0x378b5c.controllers[_0x5cf388][_0x3cf4d3].value2.split(':')[1]) === value) {
_0x378b5c.prevButtons[_0x5cf388][_0x31f017.axis] = value
_0x378b5c.prevButtons[_0x5cf388].buttonID = _0x3cf4d3
quit = true
_0x378b5c.simulateInput(_0x5cf388, _0x3cf4d3, 0x1)
}
}
};
if (quit) {
return
}
if (! _0xa88a13.elements.dialogs.gamepad.hidden && !_0x2c1832.hidden) {
return
}
}
Math.abs(_0x31f017.value) <= 0.1 && (_0x31f017.value = 0x0);
var _0x17edbf = _0x31f017.gamepad.index;
'LEFT_STICK_X' === _0x31f017.axis && (_0x31f017.value > 0x0 ? (_0x378b5c.simulateInput(_0x17edbf, 0x10, 0x7fff * _0x31f017.value), _0x378b5c.simulateInput(_0x17edbf, 0x11, 0x0)) : (_0x378b5c.simulateInput(_0x17edbf, 0x11, 0x7fff * _0x31f017.value), _0x378b5c.simulateInput(_0x17edbf, 0x10, 0x0))), 'LEFT_STICK_Y' === _0x31f017.axis && (_0x31f017.value > 0x0 ? (_0x378b5c.simulateInput(_0x17edbf, 0x12, 0x7fff * _0x31f017.value), _0x378b5c.simulateInput(_0x17edbf, 0x13, 0x0)) : (_0x378b5c.simulateInput(_0x17edbf, 0x13, 0x7fff * _0x31f017.value), _0x378b5c.simulateInput(_0x17edbf, 0x12, 0x0))), 'RIGHT_STICK_X' === _0x31f017.axis && (_0x31f017.value > 0x0 ? (_0x378b5c.simulateInput(_0x17edbf, 0x14, 0x7fff * _0x31f017.value), _0x378b5c.simulateInput(_0x17edbf, 0x15, 0x0)) : (_0x378b5c.simulateInput(_0x17edbf, 0x15, 0x7fff * _0x31f017.value), _0x378b5c.simulateInput(_0x17edbf, 0x14, 0x0))), 'RIGHT_STICK_Y' === _0x31f017.axis && (_0x31f017.value > 0x0 ? (_0x378b5c.simulateInput(_0x17edbf, 0x16, 0x7fff * _0x31f017.value), _0x378b5c.simulateInput(_0x17edbf, 0x17, 0x0)) : (_0x378b5c.simulateInput(_0x17edbf, 0x17, 0x7fff * _0x31f017.value), _0x378b5c.simulateInput(_0x17edbf, 0x16, 0x0)));
}), _0x181250.bind(_0x4ad1c6.Gamepad.Event.DISCONNECTED, function(_0x1c996a) {
[0x0, 0x1, 0x2, 0x3].forEach(function(_0x429f30) {
var _0x2c1832 = _0x181250.gamepads[_0x429f30];
_0x17edbf.elements.dialogs.gamepad.querySelector('#controls-' .concat(_0x429f30, ' .gamepad-name')).innerHTML = _0x2c1832 ? _0x2c1832.id : 'n/a';
});
}), _0x181250.bind(_0x4ad1c6.Gamepad.Event.CONNECTED, function(_0x748f24) {
Object.keys(_0x181250.gamepads).forEach(function(_0x327c74) {
var _0x2c1832 = _0x181250.gamepads[_0x327c74];
_0x17edbf.elements.dialogs.gamepad.querySelector('#controls-' .concat(_0x327c74, ' .gamepad-name')).innerHTML = _0x2c1832 ? _0x2c1832.id : 'n/a';
});
}));
},
'setShader': function(_0x4cbec3) {
if ('disabled' === _0x4cbec3) _0x27f4c4.FS.unlink('/shader/shader.glslp', ''), _0x378b5c.toggleShader && _0x378b5c.toggleShader(0x0);
else {
_0x378b5c.toggleShader && _0x378b5c.toggleShader(0x0);
var _0x17edbf = '/shader/' .concat(_0x4cbec3);
_0x27f4c4.FS.writeFile('/shader/shader.glslp', function(_0x52407e) {
if (!(_0x52407e instanceof Int32Array)) throw new Error('uintArrayToString: Only accepts Int32Array parameter');
for (var _0x17edbf = '', _0x2c1832 = 0x0, _0x4d451e = _0x52407e.length; _0x2c1832 < _0x4d451e; _0x2c1832 += 0x1) {
var _0x37c7b6 = _0x52407e[_0x2c1832];
if (_0x37c7b6 > 0xffff) throw new Error('Invalid value attempted to be serialised');
_0x17edbf += String.fromCharCode(_0x37c7b6);
}
return _0x17edbf;
}(new Int32Array(_0x27f4c4.FS.readFile(_0x17edbf)))), _0x378b5c.toggleShader && _0x378b5c.toggleShader(0x1);
}
},
'toggleVirtualGamepad': function(_0x1ed80b) {
var _0xa88a13 = this;
if (null === _0x378b5c.virtualGamepadContainer) {
_0x378b5c.virtualGamepadContainer = _0x428003('div', {
'class': _0x449eac({
'ejs-virtual-gamepad': true
}),
'hidden': ''
}), this.elements.container.appendChild(_0x378b5c.virtualGamepadContainer), _0x378b5c.virtualGamepadContainer.innerHTML = '\n <div class="' .concat(_0x449eac({
'virtual-gamepad': true
}), '" style="display: block;">\n <div class="').concat(_0x449eac({
'top': true
}), '">\n <div class="').concat(_0x449eac({
'buttons': true,
'b_l': true
}), '">L</div>\n <div class="').concat(_0x449eac({
'buttons': true,
'b_r': true
}), '">R</div>\n </div>\n <div class="').concat(_0x449eac({
'left': true
}), '"></div>\n <div class="').concat(_0x449eac({
'center': true
}), '">\n <div class="').concat(_0x449eac({
'buttons': true,
'b_select': true
}), '">SELECT</div>\n <div class="').concat(_0x449eac({
'buttons': true,
'b_start': true
}), '">START</div>\n <div class="').concat(_0x449eac({
'buttons': true,
'b_c': true
}), '">MENU</div>\n </div>\n <div class="').concat(_0x449eac({
'right': true
}), '">\n <div class="').concat(_0x449eac({
'buttons': true,
'b_a': true
}), '">A</div>\n <div class="').concat(_0x449eac({
'buttons': true,
'b_b': true
}), '">B</div>\n <div class="').concat(_0x449eac({
'buttons': true,
'b_x': true
}), '">X</div>\n <div class="').concat(_0x449eac({
'buttons': true,
'b_y': true
}), '">Y</div>\n </div>\n </div>'), _0x132da7(_0x378b5c.virtualGamepadContainer, !_0x1ed80b);
var _0x17edbf = _0x3a58c8.a.create({
'zone': _0x530042.call(this, '.' .concat(_0x449eac({
'ejs-virtual-gamepad': true
}), ' .').concat(_0x449eac({
'left': true
}))),
'mode': 'static',
'position': {
'left': '50%',
'top': '50%'
},
'color': 'red'
});
_0x17edbf.on('end', function(_0x27f006, _0x1fb527) {
_0x378b5c.syncInput(0x0, 0x4, 0x0), _0x378b5c.syncInput(0x0, 0x5, 0x0), _0x378b5c.syncInput(0x0, 0x6, 0x0), _0x378b5c.syncInput(0x0, 0x7, 0x0);
}), _0x17edbf.on('move', function(_0x1b060e, _0x4ae7d6) {
var _0x2c1832 = _0x4ae7d6.angle.degree;
_0x2c1832 >= 0x1e && _0x2c1832 < 0x96 ? _0x378b5c.syncInput(0x0, 0x4, 0x1) : window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x4, 0x0);
}, 0x1e), _0x2c1832 >= 0xd2 && _0x2c1832 < 0x14a ? _0x378b5c.syncInput(0x0, 0x5, 0x1) : window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x5, 0x0);
}, 0x1e), _0x2c1832 >= 0x78 && _0x2c1832 < 0xf0 ? _0x378b5c.syncInput(0x0, 0x6, 0x1) : window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x6, 0x0);
}, 0x1e), _0x2c1832 >= 0x12c || _0x2c1832 >= 0x0 && _0x2c1832 < 0x3c ? _0x378b5c.syncInput(0x0, 0x7, 0x1) : window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x7, 0x0);
}, 0x1e);
});
var _0x2c1832 = _0x449eac({
'virtual-gamepad': true
}),
_0x2ec721 = _0x449eac({
'touch': true
});
_0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832)), 'touchmove', function(_0x2ae626) {
_0x2ae626.stopPropagation();
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_a': true
}))), 'touchstart touchend', function(_0x185083) {
'touchend' === _0x185083.type ? (_0x3a8e2f(_0x185083.target, _0x2ec721, !0x1), window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x8, 0x0);
}, 0x1e)) : (_0x3a8e2f(_0x185083.target, _0x2ec721, true), _0x378b5c.syncInput(0x0, 0x8, 0x1)), _0x185083.stopPropagation();
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_b': true
}))), 'touchstart touchend', function(_0x213c3d) {
'touchend' === _0x213c3d.type ? (_0x3a8e2f(_0x213c3d.target, _0x2ec721, !0x1), window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x0, 0x0);
}, 0x1e)) : (_0x3a8e2f(_0x213c3d.target, _0x2ec721, true), _0x378b5c.syncInput(0x0, 0x0, 0x1)), _0x213c3d.stopPropagation();
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_x': true
}))), 'touchstart touchend', function(_0x5ce708) {
'touchend' === _0x5ce708.type ? (_0x3a8e2f(_0x5ce708.target, _0x2ec721, !0x1), window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x9, 0x0);
}, 0x1e)) : (_0x3a8e2f(_0x5ce708.target, _0x2ec721, true), _0x378b5c.syncInput(0x0, 0x9, 0x1)), _0x5ce708.stopPropagation();
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_y': true
}))), 'touchstart touchend', function(_0x1cfae6) {
'touchend' === _0x1cfae6.type ? (_0x3a8e2f(_0x1cfae6.target, _0x2ec721, !0x1), window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x1, 0x0);
}, 0x1e)) : (_0x3a8e2f(_0x1cfae6.target, _0x2ec721, true), _0x378b5c.syncInput(0x0, 0x1, 0x1)), _0x1cfae6.stopPropagation();
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_l': true
}))), 'touchstart touchend', function(_0x1a50ab) {
'touchend' === _0x1a50ab.type ? (_0x3a8e2f(_0x1a50ab.target, _0x2ec721, !0x1), window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0xa, 0x0);
}, 0x1e)) : (_0x3a8e2f(_0x1a50ab.target, _0x2ec721, true), _0x378b5c.syncInput(0x0, 0xa, 0x1)), _0x1a50ab.stopPropagation();
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_r': true
}))), 'touchstart touchend', function(_0x3b8139) {
'touchend' === _0x3b8139.type ? (_0x3a8e2f(_0x3b8139.target, _0x2ec721, !0x1), window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0xb, 0x0);
}, 0x1e)) : (_0x3a8e2f(_0x3b8139.target, _0x2ec721, true), _0x378b5c.syncInput(0x0, 0xb, 0x1)), _0x3b8139.stopPropagation();
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_start': true
}))), 'touchstart touchend', function(_0x47af91) {
'touchend' === _0x47af91.type ? (_0x3a8e2f(_0x47af91.target, _0x2ec721, !0x1), window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x3, 0x0);
}, 0x1e)) : (_0x3a8e2f(_0x47af91.target, _0x2ec721, true), _0x378b5c.syncInput(0x0, 0x3, 0x1)), _0x47af91.stopPropagation();
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_c': true
}))), 'touchstart touchend', function(_0x47af91) {
_0x5ab74d.toggleControls.call(_0xa88a13, true);
setTimeout(function() {
_0x5ab74d.toggleControls.call(_0xa88a13, false);
}, 5000)
}), _0x1093f4.call(this, _0x530042.call(this, '.' .concat(_0x2c1832, ' .').concat(_0x449eac({
'b_select': true
}))), 'touchstart touchend', function(_0x37f594) {
'touchend' === _0x37f594.type ? (_0x3a8e2f(_0x37f594.target, _0x2ec721, !0x1), window.setTimeout(function() {
_0x378b5c.syncInput(0x0, 0x2, 0x0);
}, 0x1e)) : (_0x3a8e2f(_0x37f594.target, _0x2ec721, true), _0x378b5c.syncInput(0x0, 0x2, 0x1)), _0x37f594.stopPropagation();
});
} else _0x132da7(_0x378b5c.virtualGamepadContainer, !_0x1ed80b);
},
'setup': function() {
var _0xa88a13 = this;
if (this.game) {
if (_0x378b5c.playerName = _0xa88a13.config.playerName, _0x3a8e2f(this.elements.container, this.config.classNames.type, true), this.elements.container.setAttribute('tabindex', '0'), this.color) {
var _0x17edbf = function(_0x175d8e) {
var _0x17edbf = _0x175d8e.toLowerCase();
if (_0x17edbf && /^#([0-9a-fA-f]{3}|[0-9a-fA-f]{6})$/ .test(_0x17edbf)) {
if (0x4 === _0x17edbf.length) {
for (var _0x2c1832 = '#', _0x4c881f = 0x1; _0x4c881f < 0x4; _0x4c881f += 0x1) _0x2c1832 += _0x17edbf.slice(_0x4c881f, _0x4c881f + 0x1).concat(_0x17edbf.slice(_0x4c881f, _0x4c881f + 0x1));
_0x17edbf = _0x2c1832;
}
for (var _0x5ac4ff = [], _0x324169 = 0x1; _0x324169 < 0x7; _0x324169 += 0x2) _0x5ac4ff.push(parseInt('0x' .concat(_0x17edbf.slice(_0x324169, _0x324169 + 0x2)), 0x10));
return _0x5ac4ff;
}
return null;
}(this.color);
_0x17edbf && this.elements.container.setAttribute('style', '--ejs-primary-color: ' .concat(_0x17edbf[0x0], ', ').concat(_0x17edbf[0x1], ', ').concat(_0x17edbf[0x2]));
}
if (this.elements.wrapper = _0x428003('div', {
'class': this.config.classNames.video
}), _0x580edd(this.game, this.elements.wrapper), 'video' !== _0xa88a13.game.tagName.toLocaleLowerCase()) {
var _0x2c1832 = this.config.volume;
Object.defineProperty(_0xa88a13.game, 'volume', {
'get': function() {
return _0x2c1832;
},
'set': function(_0x4cca18) {
_0x2c1832 = _0x4cca18, _0xbae705.call(_0xa88a13, _0xa88a13.game, 'volumechange');
}
});
var _0x541fe1 = _0xa88a13.storage.get('muted');
Object.defineProperty(_0xa88a13.game, 'muted', {
'get': function() {
return _0x541fe1;
},
'set': function(_0x54b6e0) {
var _0x2c1832 = !!_0x1e2c68.boolean(_0x54b6e0) && _0x54b6e0;
_0x541fe1 = _0x2c1832, _0xbae705.call(_0xa88a13, _0xa88a13.game, 'volumechange');
}
});
}
window.addEventListener('beforeunload', function(_0x34b11a) {
if (_0xa88a13.started) {
var _0x2c1832 = _0xa88a13.startName.split('/').pop().split('.'),
_0x478412 = _0x2c1832.slice(0x0, _0x2c1832.length - 0x1).join('.');
_0x27f4c4.FS.syncfs(function(_0x1026f5) {});
try {
var _0x40d46a = 'srm';
'nds' === _0xa88a13.system && (_0x40d46a = 'dsv'), _0x27f4c4.FS.unlink('/data/saves/' .concat(_0x478412, '.').concat(_0x40d46a));
} catch (_0x2c3bc8) {
console.log('fail to save');
}
_0x378b5c.saveSavFiles(), _0x27f4c4.FS.syncfs(function(_0x2c4b20) {});
}
}, true);
} else console.warn('No game element found!');
},
'listeners': function() {
var _0xa88a13 = this;
_0xa88a13.elements.buttons.restart && _0x1093f4.call(_0xa88a13, _0xa88a13.elements.buttons.restart, 'click', function(_0x5cfc51) {
if (_0x378b5c.restartGame(), _0x378b5c.inputsData = {}, _0x378b5c.connected && _0x378b5c.connection.isInitiator) {
for (var _0x17edbf = _0x378b5c.getStateInfo().split('|'), _0x2c1832 = _0x17edbf[0x0] >> 0x0, _0x1036de = (_0x17edbf[0x2], new Uint8Array(_0x2c1832)), _0x532138 = _0x17edbf[0x1] >> 0x0, _0x5a4c27 = 0x0; _0x5a4c27 < _0x2c1832; _0x5a4c27 += 0x1) _0x1036de[_0x5a4c27] = _0x27f4c4.Module.getValue(_0x532138 + _0x5a4c27);
var _0x5a3d1a = new File([_0x1036de], '0-game.state', {
'type': '',
'lastModified': new Date()
});
_0x378b5c.connection.shareFile(_0x5a3d1a), _0x378b5c.inputsData = {}, _0x378b5c.wait = true, _0x378b5c.systemPause(0x1), _0x27f4c4.Module.pauseMainLoop(), _0x378b5c.disableControl(true);
}
}), _0xa88a13.elements.buttons.screenRecord && _0x1093f4.call(_0xa88a13, _0xa88a13.elements.buttons.screenRecord, 'click', function(_0x5cfc51) {
if (! _0xa88a13.recordData) {
_0xa88a13.recordData = {started:false, data:[], stopped: false}
}
if (! _0xa88a13.recordData.started) { //start recording
_0xa88a13.elements.buttons.screenRecord.getElementsByClassName('ejs--74c6d4176d27e37a19d2e9e61de8f4')[0].innerHTML = 'Stop Screen Recording';
if (MediaRecorder.isTypeSupported('video/webm; codecs=h264')) {
var options = {mimeType: 'video/webm; codecs=h264'}; // video/webm; codecs=h264,opus
} else if (MediaRecorder.isTypeSupported('video/webm; codecs=H264')) {
var options = {mimeType: 'video/webm; codecs=H264'};
} else if (MediaRecorder.isTypeSupported('video/webm; codecs=vp9')) {
var options = {mimeType: 'video/webm; codecs=vp9'};
} else if (MediaRecorder.isTypeSupported('video/webm; codecs=vp8')) {
var options = {mimeType: 'video/webm; codecs=vp8'};
} else if (MediaRecorder.isTypeSupported('video/webm; codecs=avc1')) {
var options = {mimeType: 'video/webm; codecs=avc1'};
} else if (MediaRecorder.isTypeSupported('video/webm; codecs=vp9.0')) {
var options = {mimeType: 'video/webm; codecs=vp9.0'};
} else if (MediaRecorder.isTypeSupported('video/webm; codecs=vp8.0')) {
var options = {mimeType: 'video/webm; codecs=vp8.0'};
} else {
var options = {};
}
_0xa88a13.recordData.stream = _0x27f4c4.Module.canvas.captureStream(30);
_0xa88a13.recordData.recorder = new MediaRecorder(_0xa88a13.recordData.stream, options);
//_0xa88a13.recordData.recorder.addTrack() // TODO - find audio element
_0xa88a13.recordData.recorder.ondataavailable = function(e) {
if (e.data.size > 0) {
_0xa88a13.recordData.data.push(e.data)
}
if (_0xa88a13.recordData.stopped) {
var a = document.createElement("a")
a.href = window.URL.createObjectURL(new Blob(_0xa88a13.recordData.data, {type: "video/webm"}))
a.download = 'record.webm'
a.click()
window.URL.revokeObjectURL(a.href)
delete _0xa88a13.recordData
_0xa88a13.elements.buttons.screenRecord.getElementsByClassName('ejs--74c6d4176d27e37a19d2e9e61de8f4')[0].innerHTML = 'Start Screen Recording';
}
}
_0xa88a13.recordData.recorder.start()
_0xa88a13.recordData.started = true
} else if (_0xa88a13.recordData.started) { //stop recording
_0xa88a13.recordData.recorder.stop()
_0xa88a13.recordData.stopped = true
}
}), _0xa88a13.elements.buttons.play && Array.from(_0xa88a13.elements.buttons.play).forEach(function(_0x24495b) {
_0x1093f4.call(_0xa88a13, _0x24495b, 'click', function(_0x966382) {
_0xa88a13.playing ? (_0xa88a13.playing = !0x1, _0x27f4c4.Module.pauseMainLoop(), _0x378b5c.connected && _0x378b5c.connection.isInitiator && _0x378b5c.connection.send(JSON.stringify({
'act': 'pause'
})), _0xbae705.call(_0xa88a13, _0xa88a13.elements.container, 'pause', !0x1, {
'pause': true
})) : (_0xa88a13.playing = true, _0x27f4c4.Module.resumeMainLoop(), _0x378b5c.connected && _0x378b5c.connection.isInitiator && _0x378b5c.connection.send(JSON.stringify({
'act': 'resume'
})), _0xbae705.call(_0xa88a13, _0xa88a13.elements.container, 'pause', !0x1, {
'pause': !0x1
}));
});
}), _0xa88a13.elements.buttons.netplay && _0x1093f4.call(_0xa88a13, _0xa88a13.elements.buttons.netplay, 'click', function(_0x2317b9) {
_0x378b5c.disableControl(true), _0x132da7(_0xa88a13.elements.dialogs.netplay, !0x1), _0x378b5c.players.length > 0x0 ? _0x378b5c.openRoom(_0xa88a13) : (_0x378b5c.stopLoadRooms(), _0x378b5c.loadRoomsList()), _0x378b5c.playerName || _0x4d8495.show('modal-9de6c4e9ce2b9361', {
'closeByEsckey': !0x1
});
}), _0xa88a13.elements.buttons.cheat && _0x1093f4.call(_0xa88a13, _0xa88a13.elements.buttons.cheat, 'click', function(_0x44b01a) {
_0x132da7(_0xa88a13.elements.dialogs.cheat, !0x1);
});
_0x1093f4.call(_0xa88a13, _0xa88a13.elements.container, 'savestate', function(_0x25b541) {
var _0x2c1832 = _0x25b541.detail.state;
if (_0x1e2c68.function(_0xa88a13.config.onsavestate)) _0xa88a13.config.onsavestate.call(null, {
'screenshot': _0x25b541.detail.screenshot,
'state': _0x2c1832
});
else {
var _0x2b5fc8 = new Blob([_0x2c1832]),
_0x152605 = _0xa88a13.startName.split('/').pop().split('.'),
_0x51d471 = _0x152605.slice(0x0, _0x152605.length - 0x1).join('.')
if (typeof _0xa88a13.gameName == 'string') {
var _0x51d471 = _0xa88a13.gameName
}
var _0x49f6b6 = _0x51d471 ? '' .concat(_0x51d471, '.state') : 'game.state';
_0x4e4ca6()(_0x2b5fc8, _0x49f6b6);
}
_0xa88a13.elements.container.focus();
});
_0xa88a13.game.stateloadField = _0x428003('input'), _0xa88a13.game.stateloadField.type = 'file', _0xa88a13.game.stateloadField.onchange = function _0x17edbf(_0xcd4599) {
var _0xfd246e, _0x3fdd71 = _0xcd4599.target.files;
_0x3fdd71.length > 0x0 && ((_0xfd246e = new FileReader()).file_name = _0x3fdd71[0x0].name, _0xfd246e.onload = function() {
var _0xa88a13 = new Uint8Array(_0xfd246e.result);
EJS_loadState(_0xa88a13);
}, _0xfd246e.readAsArrayBuffer(_0x3fdd71[0x0])), _0xa88a13.game.stateloadField = _0x428003('input'), _0xa88a13.game.stateloadField.type = 'file', _0xa88a13.game.stateloadField.onchange = _0x17edbf;
}, _0x1093f4.call(_0xa88a13, _0xa88a13.elements.container, 'loadstate', function(_0x13504c) {
_0x1e2c68.function(_0xa88a13.config.onloadstate) ? _0xa88a13.config.onloadstate.call(null, _0xa88a13.loadState) : _0xa88a13.game.stateloadField.click(), _0xa88a13.elements.container.focus();
}), document.addEventListener('visibilitychange', function() {
'hidden' === document.visibilityState ? console.log('hidden') : console.log('active');
});
},
'bindFunction': function() {
var _0xa88a13 = this,
_0x17edbf = _0xa88a13.system;
if (_0x27f4c4.Module && _0x27f4c4.Module._set_volume) {
var _0x2c1832 = _0x27f4c4.Module.cwrap('set_volume', 'null', ['number']);
_0x378b5c.setVolume = function() {
var _0x17edbf = 0x0;
_0xa88a13.volume < 0x1 && (_0x17edbf = 0x0 === _0xa88a13.volume ? -0x50 : 0x1e * _0xa88a13.volume - 0x1e, _0x2c1832(_0x17edbf)), _0xa88a13.muted && (_0x17edbf = -0x50), _0x2c1832(_0x17edbf);
};
}
_0x378b5c.setVariable = _0x27f4c4.Module.cwrap('set_variable', 'null', ['string', 'string']), _0x378b5c.simulateInputFn = _0x27f4c4.Module.cwrap('simulate_input', 'null', ['number', 'number', 'number']);
var _0x2c63cc = {
0: null,
1: null,
2: null,
3: null,
4: null,
5: null,
6: null,
7: null,
8: null,
9: null,
10: null,
11: null,
12: null,
13: null,
14: null,
15: null,
16: null,
17: null,
18: null,
19: null,
20: null,
21: null,
22: null,
23: null,
24: null,
25: null,
26: null,
27: null,
28: null,
29: null,
30: null,
31: null
};
_0x378b5c.simulateInput = function(_0x249751, _0x480d05, _0x50f784) {
if (_0x378b5c.connected) {
if (_0xa88a13.playing && !_0x378b5c.disableInput) {
var _0x35b9c1 = _0x378b5c.connection.userid,
_0x4d96ad = _0x378b5c.roomMaster === _0x35b9c1,
_0xf1bb97 = _0x378b5c.players.indexOf(_0x35b9c1),
_0x52e1f4 = _0x378b5c.currentFrameNum();
_0x249751 >> 0x0 == 0x0 && (_0x2c63cc[_0x480d05] !== _0x50f784 && (_0x4d96ad ? (_0x378b5c.inputsData[_0x52e1f4] || (_0x378b5c.inputsData[_0x52e1f4] = []), _0x378b5c.inputsData[_0x52e1f4].push({
'index': _0xf1bb97,
'key': _0x480d05,
'status': _0x50f784,
'frame': _0x52e1f4
}), _0x378b5c.simulateInputFn(_0xf1bb97, _0x480d05, _0x50f784)) : _0x378b5c.connection.send(JSON.stringify({
'act': 'sync-control',
'value': ['' .concat(_0xf1bb97, '|').concat(_0x480d05, '|').concat(_0x50f784, '|').concat(_0x52e1f4)]
}), _0x378b5c.roomMaster)), _0x2c63cc[_0x480d05] = _0x50f784);
}
} else _0x378b5c.disableInput || _0x378b5c.simulateInputFn(_0x249751, _0x480d05, _0x50f784);
}, _0x378b5c.syncInput = _0x378b5c.simulateInput;
var _0x762355 = _0x27f4c4.Module.cwrap('shader_enable', 'null', ['number']);
_0x378b5c.toggleShader = function(_0x433182) {
return _0x762355(_0x433182);
}, _0x378b5c.getStateInfo = _0x27f4c4.Module.cwrap('get_state_info', 'string', []), _0x378b5c.saveState = function() {
var _0xa88a13, _0x17edbf = _0x378b5c.getStateInfo().split('|'),
_0x2c1832 = _0x17edbf[0x0] >> 0x0;
if (_0x2c1832 > 0x0) {
_0xa88a13 = new Uint8Array(_0x2c1832);
for (var _0x2ef6be = _0x17edbf[0x1] >> 0x0, _0x3cc34b = 0x0; _0x3cc34b < _0x2c1832; _0x3cc34b++) _0xa88a13[_0x3cc34b] = _0x27f4c4.Module.getValue(_0x2ef6be + _0x3cc34b);
}
return _0xa88a13;
};
var _0x25a7a2 = _0x27f4c4.Module.cwrap('load_state', 'number', ['string', 'number']);
_0x378b5c.loadState = function(_0x4389ae, _0x1d4918) {
var _0x91cd69;
if (_0x91cd69 = _0x4389ae, _0x27f4c4.FS.createDataFile('/', 'game.state', _0x91cd69, true, true), null === _0x1d4918 && (_0x1d4918 = 0x0), _0x25a7a2('game.state', _0x1d4918), 'arcade' === _0x17edbf ? setTimeout(function() {
_0x378b5c.getStateInfo(), _0x25a7a2('game.state', _0x1d4918), _0x27f4c4.FS.unlink('game.state');
}, 0xa) : _0x27f4c4.FS.unlink('game.state'), _0x378b5c.connected && _0x378b5c.connection.isInitiator) {
for (var _0x54607c = _0x378b5c.getStateInfo().split('|'), _0x1ab9c9 = _0x54607c[0x0] >> 0x0, _0xce58ec = (_0x54607c[0x2], new Uint8Array(_0x1ab9c9)), _0x4aff4f = _0x54607c[0x1] >> 0x0, _0x3bfae0 = 0x0; _0x3bfae0 < _0x1ab9c9; _0x3bfae0 += 0x1) _0xce58ec[_0x3bfae0] = _0x27f4c4.Module.getValue(_0x4aff4f + _0x3bfae0);
var _0x227419 = new File([_0xce58ec], '0-game.state', {
'type': '',
'lastModified': new Date()
});
_0x378b5c.connection.shareFile(_0x227419), _0x378b5c.inputsData = {}, _0x378b5c.wait = true, _0x378b5c.systemPause(0x1), _0x27f4c4.Module.pauseMainLoop(), _0x378b5c.disableControl(true);
}
_0xa88a13.elements.container.focus();
}, _0x27f4c4.Module._set_cheat && (_0x378b5c.setCheat = _0x27f4c4.Module.cwrap('set_cheat', 'number', ['number', 'number', 'string'])), _0x27f4c4.Module._reset_cheat && (_0x378b5c.resetCheat = _0x27f4c4.Module._reset_cheat), _0x378b5c.quickSaveState = function() {
if (_0xa88a13.started && !_0x378b5c.connected) {
if (_0xa88a13.statesSupported === false) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = 'CANNOT CURRENTLY SAVE STATE';
for (var i=0; i<_0x378b5c.saveMsgTransitions.length; i++) {
_0x378b5c.saveMsgTransitions[i] = false;
}
var a = _0x378b5c.saveMsgTransitions.length;
_0x378b5c.saveMsgTransitions.push(true);
setTimeout(function() {
if (_0x378b5c.saveMsgTransitions[a]) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = '';
_0x378b5c.saveMsgTransitions = [];
}
}, 1500)
return;
}
if (_0x2593da && typeof _0x2593da.getCoreOptionsValues == 'function') {
var slot = _0x2593da.getCoreOptionsValues()['save-state-slot'];
if (! slot) {
slot = 1;
}
} else {
var slot = 1;
}
var name = slot + '-quick.state';
try {
_0x27f4c4.FS.unlink(name);
} catch (_0x4b4d4c) {}
var success = true;
try {
var _0x17edbf = _0x378b5c.saveState();
_0x27f4c4.FS.createDataFile('/', name, _0x17edbf, true, true);
} catch(e) {
success = false;
}
if (! success) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = 'FAILED TO SAVE STATE TO SLOT ' + slot;
} else {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = 'STATE SAVED TO SLOT ' + slot;
}
for (var i=0; i<_0x378b5c.saveMsgTransitions.length; i++) {
_0x378b5c.saveMsgTransitions[i] = false;
}
var a = _0x378b5c.saveMsgTransitions.length;
_0x378b5c.saveMsgTransitions.push(true);
setTimeout(function() {
if (_0x378b5c.saveMsgTransitions[a]) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = '';
_0x378b5c.saveMsgTransitions = [];
}
}, 1500)
}
}, _0x378b5c.saveMsgTransitions = [], _0x378b5c.quickLoadState = function() {
if (_0xa88a13.started && !_0x378b5c.connected) {
if (_0xa88a13.statesSupported === false) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = 'CANNOT CURRENTLY LOAD STATE';
for (var i=0; i<_0x378b5c.saveMsgTransitions.length; i++) {
_0x378b5c.saveMsgTransitions[i] = false;
}
var a = _0x378b5c.saveMsgTransitions.length;
_0x378b5c.saveMsgTransitions.push(true);
setTimeout(function() {
if (_0x378b5c.saveMsgTransitions[a]) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = '';
_0x378b5c.saveMsgTransitions = [];
}
}, 1500)
return;
}
if (_0x2593da && typeof _0x2593da.getCoreOptionsValues == 'function') {
var slot = _0x2593da.getCoreOptionsValues()['save-state-slot'];
if (! slot) {
slot = 1;
}
} else {
var slot = 1;
}
var name = slot + '-quick.state';
var success = true;
try {
_0x25a7a2(name, 0x0), 'arcade' === _0x17edbf && setTimeout(function() {
_0x378b5c.getStateInfo(), _0x25a7a2(name, 0x0);
}, 0xa);
} catch (_0x4ee386) {
success = false;
}
if (! success) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = 'FAILED TO LOAD STATE FROM SLOT ' + slot;
} else {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = 'LOADED STATE FROM SLOT ' + slot;
}
for (var i=0; i<_0x378b5c.saveMsgTransitions.length; i++) {
_0x378b5c.saveMsgTransitions[i] = false;
}
var a = _0x378b5c.saveMsgTransitions.length;
_0x378b5c.saveMsgTransitions.push(true);
setTimeout(function() {
if (_0x378b5c.saveMsgTransitions[a]) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = '';
_0x378b5c.saveMsgTransitions = [];
}
}, 1500)
}
}, _0x378b5c.changeStateSlot = function(e) {
if (_0x2593da && typeof _0x2593da.getCoreOptionsValues == 'function') {
var slot = _0x2593da.getCoreOptionsValues()['save-state-slot'];
if (! slot) {
slot = 1;
}
} else {
var slot = 1;
}
var newSlot;
if (typeof e != 'number') {
newSlot = slot + 1;
} else {
newSlot = e;
}
if (newSlot > 9) {
newSlot = 1;
}
_0x7f9f36.updateCoreOptions.call(_0xa88a13, 'save-state-slot', newSlot)
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = 'SET SAVE STATE SLOT TO ' + newSlot;
for (var i=0; i<_0x378b5c.saveMsgTransitions.length; i++) {
_0x378b5c.saveMsgTransitions[i] = false;
}
var a = _0x378b5c.saveMsgTransitions.length;
_0x378b5c.saveMsgTransitions.push(true);
setTimeout(function() {
if (_0x378b5c.saveMsgTransitions[a]) {
_0xa88a13.elements.widgets.stateInfoDiv.innerHTML = '';
_0x378b5c.saveMsgTransitions = [];
}
}, 1500)
}, window.EJS_loadState = function(_0x4dd6a5) {
_0x378b5c.loadState(_0x4dd6a5, 0x0), setTimeout(function() {
if (_0x378b5c.connected) {
var _0x17edbf = new File([_0x4dd6a5], '0-game.state', {
'type': '',
'lastModified': new Date()
});
_0x378b5c.connection.shareFile(_0x17edbf), _0x378b5c.inputsData = {}, _0x378b5c.wait = true, _0x378b5c.systemPause(0x1), _0x27f4c4.Module.pauseMainLoop(), _0x378b5c.disableControl(true);
}
}, 0x1e);
};
var _0x840cf4 = _0x27f4c4.Module.cwrap('cmd_take_screenshot', '', []);
_0x378b5c.getScreenData = function() {
return _0x378b5c.toggleShader(0x0), _0x840cf4(), _0x378b5c.toggleShader(0x1), _0x27f4c4.FS.readFile('screenshot.png');
}, _0x378b5c.restartGame = _0x27f4c4.Module.cwrap('system_restart', '', []), _0x378b5c.currentFrameNum = _0x27f4c4.Module.cwrap('get_current_frame_count', '', []), _0x378b5c.saveSavFiles = _0x27f4c4.Module.cwrap('cmd_savefiles', '', []), _0x27f4c4.Module._get_core_options && (_0x378b5c.getGameCoreOptions = _0x27f4c4.Module.cwrap('get_core_options', 'string', [])), _0x378b5c.systemPause = function(_0x29d94a) {
return !0x1;
}, _0x378b5c.controllers = _0x2593da.storage.get('controllers'), _0x378b5c.controllers || (_0x378b5c.controllers = JSON.parse(JSON.stringify(_0x378b5c.defaultControllers)));
}
},
_0xdcec2a = _0x378b5c;
function _0x5005e6(_0x7f3270, _0x477158) {
return function(_0x20890f) {
if (Array.isArray(_0x20890f)) return _0x20890f;
}(_0x7f3270) || function(_0x557107, _0x409476) {
var _0x2c1832 = [],
_0x488a47 = true,
_0x34f76e = !0x1,
_0x19606f = void 0x0;
try {
for (var _0x328f4d, _0x4d9086 = _0x557107[Symbol.iterator](); !(_0x488a47 = (_0x328f4d = _0x4d9086.next()).done) && (_0x2c1832.push(_0x328f4d.value), !_0x409476 || _0x2c1832.length !== _0x409476); _0x488a47 = true);
} catch (_0x13c09a) {
_0x34f76e = true, _0x19606f = _0x13c09a;
} finally {
try {
_0x488a47 || null == _0x4d9086.return || _0x4d9086.return();
} finally {
if (_0x34f76e) throw _0x19606f;
}
}
return _0x2c1832;
}(_0x7f3270, _0x477158) || function() {
throw new TypeError('Invalid attempt to destructure non-iterable instance');
}();
}
var _0x7f9f36 = {
'storage': null,
'coreOptionsValues': {},
'contextMenu': null,
'normalOptions': {
'shader': {
'label': 'Shader',
'options': {
'disabled': 'Disabled',
'2xScaleHQ.glslp': '2xScaleHQ',
'4xScaleHQ.glslp': '4xScaleHQ',
'crt-easymode.glslp': 'CRT easymode',
'crt-aperture.glslp': 'CRT aperture',
'crt-geom.glslp': 'CRT geom'
},
'default': 'disabled'
},
'virtual-gamepad': {
'label': 'Virtual Gamepad',
'options': {
'disabled': 'Disabled',
'enabled': 'Enabled'
},
'default': 'enabled'
}
},
'defaultCoreOptionsValues': function() {
var _0xa88a13 = {};
switch (this.system) {
case 'pce':
_0xa88a13 = {
'pce_nospritelimit': 'enabled'
};
break;
case 'jaguar':
_0xa88a13 = {
'virtualjaguar_doom_res_hack': 'enabled'
};
break;
case 'psx':
_0xa88a13 = {
'pcsx_rearmed_memcard2': 'enabled'
};
break;
case 'nds':
_0xa88a13 = {};
}
return _0xa88a13;
},
'getCoreOptionsValues': function() {
var _0xa88a13 = _0x7f9f36.storage.get('core-options');
return _0x1e2c68.empty(_0xa88a13) && (_0xa88a13 = {}), _0xa88a13;
},
'getCoreOptions': function() {
var _0xa88a13 = {};
switch (this.system) {
case 'nes':
_0xa88a13.fceumm_palette = {
'label': 'Color Palette',
'options': {
'default': 'Default',
'asqrealc': 'asqrealc',
'nintendo-vc': 'nintendo-vc',
'rgb': 'rgb',
'yuv-v3': 'yuv-v3',
'unsaturated-final': 'unsaturated-final',
'sony-cxa2025as-us': 'sony-cxa2025as-us',
'pal': 'pal',
'bmf-final2': 'bmf-final2',
'bmf-final3': 'bmf-final3',
'smooth-fbx': 'smooth-fbx',
'composite-direct-fbx': 'composite-direct-fbx',
'pvm-style-d93-fbx': 'pvm-style-d93-fbx',
'nescap': 'nescap',
'ntsc-hardware-fbx': 'ntsc-hardware-fbx',
'nes-classic-fbx-fs': 'nes-classic-fbx-fs',
'wavebeam': 'wavebeam'
},
'default': 'default'
}, _0xa88a13.fceumm_nospritelimit = {
'label': 'No Sprite Limit',
'options': {
'enabled': 'Enabled',
'disabled': 'Disabled'
},
'default': 'disabled'
}, _0xa88a13.fceumm_sndquality = {
'label': 'Sound Quality',
'options': {
'Low': 'Low',
'High': 'High',
'Very High': 'Very High'
},
'default': 'Low'
}, this.lightgun || (_0xa88a13.system_type = {
'label': '4 Players Support',
'options': {
'nes': 'NES',
'famicom': 'Famicom'
},
'default': 'nes',
'netplay': true
}), _0xa88a13.fceumm_turbo_enable = {
'label': 'Turbo Enable',
'options': ['None', 'Player 1', 'Player 2', 'Both'],
'default': 'None',
'netplay': true
}, _0xa88a13.fceumm_region = {
'label': 'Region',
'options': ['Auto', 'NTSC', 'PAL', 'Dendy'],
'default': 'Auto',
'netplay': true
};
break;
case 'snes':
_0xa88a13 = {
'snes9x_next_overclock': {
'label': 'SuperFX Overclock',
'options': {
'disabled': 'Disabled(10MHz)',
'40MHz': '40MHz',
'60MHz': '60MHz',
'80MHz': '80MHz',
'100MHz': '100MHz'
},
'default': 'disabled',
'netplay': true
}
};
break;
case 'snes2002':
case 'snes2005':
case 'snes2010':
_0xa88a13 = {};
break;
case 'gb':
_0xa88a13 = {
'gambatte_gb_colorization': {
'label': 'GB Colorization',
'options': {
'disabled': 'Disabled',
'auto': 'auto',
'internal': 'internal'
},
'default': 'disabled'
},
'gambatte_gb_internal_palette': {
'label': 'Internal Palette',
'options': {
'GBC - Blue': 'GBC - Blue',
'GBC - Brown': 'GBC - Brown',
'GBC - Dark Blue': 'GBC - Dark Blue',
'GBC - Dark Brown': 'GBC - Dark Brown',
'GBC - Dark Green': 'GBC - Dark Green',
'GBC - Grayscale': 'GBC - Grayscale',
'GBC - Green': 'GBC - Green',
'GBC - Inverted': 'GBC - Inverted',
'GBC - Orange': 'GBC - Orange',
'GBC - Pastel Mix': 'GBC - Pastel Mix',
'GBC - Red': 'GBC - Red',
'GBC - Yellow': 'GBC - Yellow'
},
'default': 'GBC - Blue'
}
};
break;
case 'gba':
_0xa88a13 = {
'mgba_solar_sensor_level': {
'label': 'Solar sensor level',
'options': ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10'],
'default': '0'
},
'mgba_frameskip': {
'label': 'Frameskip',
'options': ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10'],
'default': '0'
}
};
break;
case 'vbanext':
_0xa88a13 = {
'vbanext_turboenable': {
'label': 'Enable Turbo Buttons',
'options': ['disabled', 'enabled'],
'default': 'disabled'
},
'vbanext_turbodelay': {
'label': 'Turbo Delay in frames',
'options': ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15'],
'default': '2'
}
};
break;
case 'n64':
_0xa88a13 = {
'glupen64-aspect': {
'label': 'Aspect Ratio (Need to refresh page)',
'options': ['4:3', '16:9'],
'default': '4:3'
},
'glupen64-43screensize': {
'label': '4:3 Resolution',
'options': ['320x240', '640x480', '960x720', '1280x960'],
'default': '320x240'
},
'glupen64-169screensize': {
'label': '16:9 Resolution',
'options': ['640x360', '960x540', '1280x720'],
'default': '640x360'
},
'glupen64-pak1': {
'label': 'Player 1 Pak',
'options': ['none', 'memory', 'rumble'],
'default': 'none',
'netplay': true
},
'glupen64-pak2': {
'label': 'Player 2 Pak',
'options': ['none', 'memory', 'rumble'],
'default': 'none',
'netplay': true
},
'glupen64-pak3': {
'label': 'Player 3 Pak',
'options': ['none', 'memory', 'rumble'],
'default': 'none',
'netplay': true
},
'glupen64-pak4': {
'label': 'Player 4 Pak',
'options': ['none', 'memory', 'rumble'],
'default': 'none',
'netplay': true
}
};
break;
case 'nds':
_0xa88a13 = {
'desmume_screens_layout': {
'label': 'Screen layout',
'options': ['top/bottom', 'bottom/top', 'left/right', 'right/left', 'top only', 'bottom only', 'quick switch', 'hybrid/top', 'hybrid/bottom'],
'default': 'top/bottom'
},
'screen_rotation': {
'label': 'Screen Rotation',
'options': {
0: '0°',
90: '90°',
180: '180°',
270: '270°'
},
'default': 0x0
},
'desmume_frameskip': {
'label': 'Frameskip',
'options': ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'],
'default': '0'
}
}, _0x2d904a.wasm || delete _0xa88a13.screen_rotation;
break;
case 'vb':
case 'pce':
_0xa88a13 = {};
break;
case 'segaMS':
case 'segaGG':
case 'segaMD':
case 'segaCD':
_0xa88a13 = {
'genesis_plus_gx_no_sprite_limit': {
'label': 'No Sprite Limit',
'options': ['disabled', 'enabled'],
'default': 'disabled'
},
'genesis_plus_gx_overclock': {
'label': 'CPU speed',
'options': ['100%', '125%', '150%', '175%', '200%'],
'default': '100%',
'netplay': true
},
'genesis_plus_gx_sound_output': {
'label': 'Sound output',
'options': ['stereo', 'mono'],
'default': 'stereo'
}
};
break;
case 'sega32x':
_0xa88a13 = {
'picodrive_sprlim': {
'label': 'No sprite limit',
'options': ['disabled', 'enabled'],
'default': 'disabled'
}
};
break;
case 'segaSaturn':
_0xa88a13 = {
'yabause_frameskip': {
'label': 'Frameskip',
'options': ['disabled', 'enabled'],
'default': 'disabled'
}
};
break;
case 'msx':
_0xa88a13 = {
'bluemsx_nospritelimits': {
'label': 'No Sprite Limit',
'options': ['OFF', 'ON'],
'default': 'OFF'
}
};
break;
case 'ws':
case 'ngp':
_0xa88a13 = {};
break;
case 'jaguar':
_0xa88a13 = {
'virtualjaguar_usefastblitter': {
'label': 'Fast Blitter',
'options': ['disabled', 'enabled'],
'default': 'disabled'
},
'virtualjaguar_bios': {
'label': 'Bios',
'options': ['disabled', 'enabled'],
'default': 'disabled'
}
};
break;
case 'psx':
_0xa88a13 = {
'pcsx_rearmed_frameskip': {
'label': 'Frameskip',
'options': [0x0, 0x1, 0x2, 0x3],
'default': 0x0,
'netplay': true
},
'pcsx_rearmed_memcard2': {
'label': 'Enable second memory card',
'options': ['enabled', 'disabled'],
'default': 'enabled',
'netplay': true
},
'pcsx_rearmed_pad1type': {
'label': 'Pad 1 Type',
'options': ['default', 'none', 'standard', 'analog', 'negcon'],
'default': 'default',
'netplay': true
},
'pcsx_rearmed_pad2type': {
'label': 'Pad 2 Type',
'options': ['default', 'none', 'standard', 'analog', 'negcon'],
'default': 'default',
'netplay': true
},
'pcsx_rearmed_pad3type': {
'label': 'Pad 3 Type',
'options': ['default', 'none', 'standard', 'analog', 'negcon'],
'default': 'default',
'netplay': true
},
'pcsx_rearmed_pad4type': {
'label': 'Pad 4 Type',
'options': ['default', 'none', 'standard', 'analog', 'negcon'],
'default': 'default',
'netplay': true
|
'options': ['enabled', 'disabled'],
'default': 'enabled',
'netplay': true
},
'pcsx_rearmed_neon_interlace_enable': {
'label': 'Enable interlacing mode(s)',
'options': ['enabled', 'disabled'],
'default': 'disabled',
'netplay': true
},
'pcsx_rearmed_neon_enhancement_enable': {
'label': 'Enhanced resolution (slow)',
'options': ['enabled', 'disabled'],
'default': 'disabled',
'netplay': true
},
'pcsx_rearmed_neon_enhancement_no_main': {
'label': 'Enhanced resolution speed hack',
'options': ['enabled', 'disabled'],
'default': 'disabled',
'netplay': true
}
};
break;
case 'arcade':
case 'fba0.2.97.29':
_0xa88a13 = {
'fba-aspect': {
'label': 'Aspect ratio',
'options': ['DAR', 'PAR'],
'default': 'DAR'
},
'fba-frameskip': {
'label': 'Frameskip',
'options': ['0', '1', '2', '3', '4', '5'],
'default': '0',
'netplay': true
},
'fba-cpu-speed-adjust': {
'label': 'CPU overclock',
'options': ['100', '110', '120', '130', '140', '150', '160', '170', '180', '190', '200'],
'default': '100',
'netplay': true
},
'fba-diagnostic-input': {
'label': 'Diagnostic Input',
'options': ['None', 'Hold Start', 'Start + A + B', 'Hold Start + A + B', 'Start + L + R', 'Hold Start + L + R', 'Hold Select', 'Select + A + B', 'Hold Select + A + B', 'Select + L + R', 'Hold Select + L + R'],
'default': 'None',
'netplay': true
},
'fba-neogeo-mode': {
'label': 'Force Neo Geo mode',
'options': ['MVS', 'AES', 'UNIBIOS', 'DIPSWITCH'],
'default': 'MVS',
'netplay': true
}
};
break;
case 'mame2003':
case 'mame2010':
case 'mame':
_0xa88a13 = {};
break;
default:
_0xa88a13 = {}, console.warn('Unsupported emulator');
}
if (this.statesSupported === true) {
_0xa88a13['save-state-slot'] = {
'label': 'Save State Slot',
'options': [1, 2, 3, 4, 5, 6, 7, 8, 9],
'default': 1,
'netplay': false
}
}
_0xdcec2a.getGameCoreOptions && _0xdcec2a.getGameCoreOptions().split('\n').forEach(function(_0x2bef5a, _0x49b64b) {
var _0x5995db = _0x2bef5a.split('; '),
_0x16274c = _0x5995db[0x0];
if (0x0 === _0x16274c.indexOf('fba-dipswitch-')) {
var _0x757776 = _0x5995db[0x1].split('|'),
_0x1f895e = _0x16274c.replace(/_/g, ' ').replace(/.+\-(.+)/, '$1');
_0xa88a13[_0x16274c] = {
'label': _0x1f895e,
'options': _0x757776.slice(0x1, -0x1),
'default': _0x757776[0x0].replace('(Default) ', ''),
'netplay': true
};
}
});
return _0xa88a13;
},
'supportNetPlay': function() {
if (this.lightgun || this.mouse) return !0x1;
var _0xa88a13 = window.RTCPeerConnection || window.webkitPeerConnection00 || window.webkitRTCPeerConnection || window.mozRTCPeerConnection,
_0x17edbf = window.mozRTCIceCandidate || window.RTCIceCandidate,
_0x2c1832 = window.mozRTCSessionDescription || window.RTCSessionDescription;
return !!(_0xa88a13 && _0x17edbf && _0x2c1832) && !!(_0xa88a13 && 'createDataChannel' in _0xa88a13.prototype);
},
'isNetPlay': function() {},
'findElements': function() {
try {
return this.elements.controls = _0x530042.call(this, this.config.selectors.controls.wrapper), this.elements.buttons = {
'play': _0x23ffa1.call(this, this.config.selectors.buttons.play),
'pause': _0x530042.call(this, this.config.selectors.buttons.pause),
'restart': _0x530042.call(this, this.config.selectors.buttons.restart),
'mute': _0x530042.call(this, this.config.selectors.buttons.mute),
'settings': _0x530042.call(this, this.config.selectors.buttons.settings),
'fullscreen': _0x530042.call(this, this.config.selectors.buttons.fullscreen),
'saveState': _0x530042.call(this, this.config.selectors.buttons.saveState),
'screenRecord': _0x530042.call(this, this.config.selectors.buttons.screenRecord),
'loadState': _0x530042.call(this, this.config.selectors.buttons.loadState),
'gamepad': _0x530042.call(this, this.config.selectors.buttons.gamepad),
'netplay': _0x530042.call(this, this.config.selectors.buttons.netplay),
'cheat': _0x530042.call(this, this.config.selectors.buttons.cheat)
}, this.elements.inputs = {
'volume': _0x530042.call(this, this.config.selectors.inputs.volume)
}, true;
} catch (_0x527381) {
return this.debug.warn('It looks like there is a problem with your custom controls HTML', _0x527381), !0x1;
}
},
'createIcon': function(_0x43d42f, _0x42a597) {
var _0x2c1832 = document.createElementNS('http://www.w3.org/2000/svg', 'svg');
_0x154f99(_0x2c1832, _0x5dc0c0(_0x42a597, {
'role': 'presentation',
'focusable': 'false'
}));
var _0x50b7a7 = document.createElement('div');
_0x50b7a7.innerHTML = '<svg>' .concat(this.icons[_0x43d42f], '</svg>');
for (var _0x373099 = _0x50b7a7.childNodes[0x0].childNodes, _0x804f4d = 0x0; _0x804f4d < _0x373099.length; _0x804f4d += 0x1) _0x2c1832.appendChild(_0x373099[_0x804f4d]);
return _0x2c1832;
},
'createLabel': function(_0x185043) {
var _0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : {},
_0x2c1832 = _0xb9b2ff.get(_0x185043, this.config),
_0x3d681f = {};
_0x3d681f[_0x17edbf.class] = true, _0x3d681f[this.config.classNames.hidden] = true;
var _0x58a1d4 = Object.assign({}, _0x17edbf, {
'class': [_0x17edbf.class, this.config.classNames.tooltip].filter(Boolean).join(' ')
});
return _0x428003('span', _0x58a1d4, _0x2c1832);
},
'createBadge': function(_0x415aef) {
if (_0x1e2c68.empty(_0x415aef)) return null;
var _0x17edbf = _0x428003('span', {
'class': this.config.classNames.menu.value
});
return _0x17edbf.appendChild(_0x428003('span', {
'class': this.config.classNames.menu.badge
}, _0x415aef)), _0x17edbf;
},
'createButton': function(_0x427a85, _0x493eb2) {
var _0x2c1832, _0x325832, _0xcb618b, _0x25c678, _0x58309d = _0x428003('button'),
_0x183584 = Object.assign({}, _0x493eb2),
_0x2345d2 = _0x9fdcea(_0x427a85),
_0x2ca20d = !0x1;
'type' in _0x183584 || (_0x183584.type = 'button');
var _0x34225b = _0x449eac({
'ejs__control': true
});
switch ('class' in _0x183584 ? _0x183584.class.includes(_0x34225b) || (_0x183584.class += ' ' .concat(_0x34225b)) : _0x183584.class = _0x34225b, _0x427a85) {
case 'play':
_0x2ca20d = true, _0x2c1832 = 'play', _0xcb618b = 'pause', _0x325832 = 'play', _0x25c678 = 'pause';
break;
case 'mute':
_0x2ca20d = true, _0x2c1832 = 'mute', _0xcb618b = 'unmute', _0x325832 = 'volume', _0x25c678 = 'muted';
break;
case 'fullscreen':
_0x2ca20d = true, _0x2c1832 = 'enterFullscreen', _0xcb618b = 'exitFullscreen', _0x325832 = 'enter-fullscreen', _0x25c678 = 'exit-fullscreen';
break;
default:
_0x2c1832 = _0x2345d2, _0x325832 = _0x427a85;
}
return _0x2ca20d ? (_0x58309d.appendChild(_0x7f9f36.createIcon.call(this, _0x25c678, {
'class': _0x449eac({
'icon--pressed': true
})
})), _0x58309d.appendChild(_0x7f9f36.createIcon.call(this, _0x325832, {
'class': _0x449eac({
'icon--not-pressed': true
})
})), _0x58309d.appendChild(_0x7f9f36.createLabel.call(this, _0xcb618b, {
'class': _0x449eac({
'icon--pressed': true
})
})), _0x58309d.appendChild(_0x7f9f36.createLabel.call(this, _0x2c1832, {
'class': _0x449eac({
'icon--not-pressed': true
})
}))) : (_0x58309d.appendChild(_0x7f9f36.createIcon.call(this, _0x325832)), _0x58309d.appendChild(_0x7f9f36.createLabel.call(this, _0x2c1832))), _0x5dc0c0(_0x183584, _0xa949a8(this.config.selectors.buttons[_0x2345d2], _0x183584)), _0x154f99(_0x58309d, _0x183584), 'play' === _0x2345d2 ? (_0x1e2c68.array(this.elements.buttons[_0x2345d2]) || (this.elements.buttons[_0x2345d2] = []), this.elements.buttons[_0x2345d2].push(_0x58309d)) : this.elements.buttons[_0x2345d2] = _0x58309d, _0x58309d;
},
'createRange': function(_0x491b4a, _0x5f154a) {
var _0x2c1832 = _0x428003('input', _0x5dc0c0(_0xa949a8(this.config.selectors.inputs[_0x491b4a]), {
'type': 'range',
'min': 0x0,
'max': 0x64,
'step': 0.1,
'value': 0x0,
'autocomplete': 'off',
'role': 'slider',
'aria-label': _0xb9b2ff.get(_0x491b4a, this.config),
'aria-valuemin': 0x0,
'aria-valuemax': 0x64,
'aria-valuenow': 0x0
}, _0x5f154a));
return this.elements.inputs[_0x491b4a] = _0x2c1832, _0x7f9f36.updateRangeFill.call(this, _0x2c1832), _0x2c1832;
},
'bindMenuItemShortcuts': function(_0x1830d1, _0x2770c2) {
var _0x2c1832 = this;
_0x1093f4(_0x1830d1, 'keydown keyup', function(_0x4bb9d6) {
if ([0x20, 0x26, 0x27, 0x28].includes(_0x4bb9d6.which) && (_0x4bb9d6.preventDefault(), _0x4bb9d6.stopPropagation(), 'keydown' !== _0x4bb9d6.type)) {
var _0x20ac7b, _0x51717d = _0x13f491(_0x1830d1, '[role="menuitemradio"]');
if (!_0x51717d && [0x20, 0x27].includes(_0x4bb9d6.which)) _0x7f9f36.showMenuPanel.call(_0x2c1832, _0x2770c2, true);
else 0x20 !== _0x4bb9d6.which && (0x28 === _0x4bb9d6.which || _0x51717d && 0x27 === _0x4bb9d6.which ? (_0x20ac7b = _0x1830d1.nextElementSibling, _0x1e2c68.element(_0x20ac7b) || (_0x20ac7b = _0x1830d1.parentNode.firstElementChild)) : (_0x20ac7b = _0x1830d1.previousElementSibling, _0x1e2c68.element(_0x20ac7b) || (_0x20ac7b = _0x1830d1.parentNode.lastElementChild)), _0x31cc23.call(_0x2c1832, _0x20ac7b, true));
}
}, !0x1), _0x1093f4(_0x1830d1, 'keyup', function(_0x3844f5) {
0xd === _0x3844f5.which && _0x7f9f36.focusFirstMenuItem.call(_0x2c1832, null, true);
});
},
'createMenuItem': function(_0x52a95b) {
var _0x17edbf = this,
_0x2c1832 = _0x52a95b.value,
_0x231912 = _0x52a95b.list,
_0x57313b = _0x52a95b.type,
_0x19be70 = _0x52a95b.title,
_0x4ae558 = _0x52a95b.badge,
_0x34a012 = void 0x0 === _0x4ae558 ? null : _0x4ae558,
_0x7dbb2e = _0x52a95b.checked,
_0x200580 = void 0x0 !== _0x7dbb2e && _0x7dbb2e,
_0x4bb1a2 = _0xa949a8(this.config.selectors.inputs[_0x57313b]),
_0xb8fbbd = _0x428003('button', _0x5dc0c0(_0x4bb1a2, {
'type': 'button',
'role': 'menuitemradio',
'class': '' .concat(this.config.classNames.control, ' ').concat(_0x4bb1a2.class ? _0x4bb1a2.class : '').trim(),
'aria-checked': _0x200580,
'value': _0x2c1832
})),
_0x560073 = _0x428003('span');
_0x560073.innerHTML = _0x19be70, _0x1e2c68.element(_0x34a012) && _0x560073.appendChild(_0x34a012), _0xb8fbbd.appendChild(_0x560073), Object.defineProperty(_0xb8fbbd, 'checked', {
'enumerable': true,
'get': function() {
return 'true' === _0xb8fbbd.getAttribute('aria-checked');
},
'set': function(_0x2da7f0) {
_0x2da7f0 && Array.from(_0xb8fbbd.parentNode.children).filter(function(_0x480c16) {
return _0x13f491(_0x480c16, '[role="menuitemradio"]');
}).forEach(function(_0x363b12) {
return _0x363b12.setAttribute('aria-checked', 'false');
}), _0xb8fbbd.setAttribute('aria-checked', _0x2da7f0 ? 'true' : 'false');
}
});
var _0x449ec7 = _0x7f9f36.getCoreOptions.call(this);
this.listeners.bind(_0xb8fbbd, 'click keyup', function(_0x270d0d) {
_0x1e2c68.keyboardEvent(_0x270d0d) && 0x20 !== _0x270d0d.which || (_0x270d0d.preventDefault(), _0x270d0d.stopPropagation(), _0xb8fbbd.checked = true, Object.keys(_0x7f9f36.normalOptions).includes(_0x57313b) && _0x7f9f36.updateNormalOptions.call(_0x17edbf, _0x57313b, _0x2c1832), Object.keys(_0x449ec7).includes(_0x57313b) && (_0x449ec7[_0x57313b].netplay && _0xdcec2a.connected ? _0xdcec2a.connection.isInitiator && (_0x7f9f36.updateCoreOptions.call(_0x17edbf, _0x57313b, _0x2c1832), _0xdcec2a.connection.send(JSON.stringify({
'act': 'update-core-option',
'key': _0x57313b,
'value': _0x2c1832,
'frame': _0xdcec2a.currentFrame
}))) : _0x7f9f36.updateCoreOptions.call(_0x17edbf, _0x57313b, _0x2c1832)), _0x7f9f36.showMenuPanel.call(_0x17edbf, 'home', _0x1e2c68.keyboardEvent(_0x270d0d)));
}, _0x57313b, !0x1), _0x7f9f36.bindMenuItemShortcuts.call(this, _0xb8fbbd, _0x57313b), _0x231912.appendChild(_0xb8fbbd);
},
'updateVolume': function() {
_0x1e2c68.element(this.elements.inputs.volume) && _0x7f9f36.setRange.call(this, this.elements.inputs.volume, this.muted ? 0x0 : this.volume), _0x1e2c68.element(this.elements.buttons.mute) && (this.elements.buttons.mute.pressed = this.muted || 0x0 === this.volume);
},
'setRange': function(_0x1c075b) {
var _0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : 0x0;
_0x1e2c68.element(_0x1c075b) && (_0x1c075b.value = _0x17edbf, _0x7f9f36.updateRangeFill.call(this, _0x1c075b));
},
'updateRangeFill': function(_0x2dbde1) {
var _0x17edbf = _0x1e2c68.event(_0x2dbde1) ? _0x2dbde1.target : _0x2dbde1;
if (_0x1e2c68.element(_0x17edbf) && 'range' === _0x17edbf.getAttribute('type')) {
if (_0x13f491(_0x17edbf, this.config.selectors.inputs.volume)) {
var _0x2c1832 = 0x64 * _0x17edbf.value;
_0x17edbf.setAttribute('aria-valuenow', _0x2c1832), _0x17edbf.setAttribute('aria-valuetext', '' .concat(_0x2c1832.toFixed(0x1), '%'));
} else _0x17edbf.setAttribute('aria-valuenow', _0x17edbf.value);
_0x59aa33.isWebkit && _0x17edbf.style.setProperty('--value', '' .concat(_0x17edbf.value / _0x17edbf.max * 0x64, '%'));
}
},
'toggleMenuButton': function(_0x249814, _0x2948e9) {
_0x132da7(this.elements.settings.buttons[_0x249814], !_0x2948e9);
},
'updateNormalOptions': function(_0x255b79, _0x505484) {
this.elements.settings.buttons[_0x255b79].querySelector('.' .concat(this.config.classNames.menu.value)).innerHTML = _0x7f9f36.normalOptions[_0x255b79].options[_0x505484];
var _0x2c1832 = {};
_0x2c1832[_0x255b79] = _0x505484, _0x7f9f36.storage.set(_0x2c1832), 'shader' === _0x255b79 && _0xdcec2a.setShader(_0x505484), 'virtual-gamepad' === _0x255b79 && _0xdcec2a.toggleVirtualGamepad.call(this, 'enabled' === _0x505484);
},
'updateCoreOptions': function(_0x41c4ad, _0x15823e) {
if ('nds' == this.system && !_0x2d904a.wasm) return true;
if (_0x7f9f36.coreOptionsValues[_0x41c4ad] = _0x15823e, 'psx' === this.system && _0x27f4c4.Module.pauseMainLoop(), _0xdcec2a.setVariable(_0x41c4ad, _0x15823e), 'psx' === this.system && (setTimeout(function() {
_0x27f4c4.Module.resumeMainLoop();
}, 0x64), setTimeout(function() {
_0x27f4c4.Module.resumeMainLoop();
}, 0xc8)), 'nds' === this.system && _0x2d904a.wasm) {
var _0x2c1832 = this.elements.wrapper.querySelector('canvas');
if ('screen_rotation' === _0x41c4ad) switch (_0x2c1832.style.transform = 'rotate(' + _0x15823e + 'deg)', _0x15823e) {
case '0':
_0xdcec2a.setVariable('desmume_input_rotation', '0');
break;
case '90':
_0xdcec2a.setVariable('desmume_input_rotation', '270');
break;
case '180':
_0xdcec2a.setVariable('desmume_input_rotation', '180');
break;
case '270':
_0xdcec2a.setVariable('desmume_input_rotation', '90');
}
}
if ('arcade' === this.system || 'fba0.2.97.29' === this.system) {
var _0x567ccc = this.elements.wrapper.querySelector('canvas');
_0x567ccc.height = 'fba-aspect' === _0x41c4ad && 'PAR' === _0x15823e ? _0x567ccc.width / 0x10 * 0x9 : _0x567ccc.width / 0x4 * 0x3;
}
var _0x31bdec = _0x7f9f36.getCoreOptions.call(this),
_0x3efd0b = _0x7f9f36.storage.get('core-options');
if (_0x1e2c68.empty(_0x3efd0b) && (_0x3efd0b = {}), _0x3efd0b[_0x41c4ad] = _0x15823e, _0x7f9f36.storage.set({
'core-options': _0x3efd0b
}), this.elements.settings.buttons[_0x41c4ad]) {
var _0xe95e6a = this.elements.settings.buttons[_0x41c4ad].querySelector('.' .concat(this.config.classNames.menu.value));
_0x1e2c68.object(_0x31bdec[_0x41c4ad].options) ? _0xe95e6a.innerHTML = _0x31bdec[_0x41c4ad].options[_0x15823e] : _0xe95e6a.innerHTML = _0x15823e;
}
},
'setOptionMenuItem': function(_0x2f0e6a, _0x85969e) {
var _0x2c1832 = this,
_0x549dac = this.elements.settings.panels.home.querySelector('[role="menu"]'),
_0x43777f = this.elements.settings.popup.children[0x0];
Object.keys(_0x2f0e6a).forEach(function(_0x4fcbcf) {
var _0x177975 = _0x4fcbcf,
_0x5f3e0f = _0x428003('button', _0x5dc0c0(_0xa949a8(_0x2c1832.config.selectors.buttons.settings), {
'type': 'button',
'class': _0x449eac({
'ejs__control': true,
'ejs__control--forward': true
}),
'role': 'menuitem',
'item': _0x4fcbcf,
'aria-haspopup': true
}));
_0x1093f4(_0x5f3e0f, 'click', function() {
_0x7f9f36.showMenuPanel.call(_0x2c1832, _0x177975, !0x1);
});
var _0x1d5857, _0x1838f9 = _0x428003('span', null, _0x2f0e6a[_0x4fcbcf].label),
_0x455b20 = _0x428003('span', {
'class': _0x2c1832.config.classNames.menu.value
});
_0x1e2c68.object(_0x85969e) && (_0x1d5857 = _0x85969e[_0x177975]), _0x1e2c68.empty(_0x1d5857) && (_0x1d5857 = _0x2f0e6a[_0x177975].default), _0x1e2c68.object(_0x2f0e6a[_0x177975].options) ? _0x455b20.innerHTML = _0x2f0e6a[_0x177975].options[_0x1d5857] : _0x455b20.innerHTML = _0x1d5857, _0x1838f9.appendChild(_0x455b20), _0x5f3e0f.appendChild(_0x1838f9), _0x549dac.appendChild(_0x5f3e0f);
var _0x3f3775 = _0x428003('div', {
'data-pane': escape(_0x177975),
'hidden': ''
}),
_0x288440 = _0x428003('button', {
'type': 'button',
'class': _0x449eac({
'ejs__control': true,
'ejs__control--back': true
})
});
_0x288440.appendChild(_0x428003('span', {
'aria-hidden': true
}, _0x2f0e6a[_0x4fcbcf].label)), _0x288440.appendChild(_0x428003('span', {
'class': _0x449eac({
'ejs__sr-only': true
})
}, _0xb9b2ff.get('menuBack', _0x2c1832.config))), _0x1093f4(_0x3f3775, 'keydown', function(_0x462a4d) {
0x25 === _0x462a4d.which && (_0x462a4d.preventDefault(), _0x462a4d.stopPropagation(), _0x7f9f36.showMenuPanel.call(_0x2c1832, 'home', true));
}, !0x1), _0x1093f4(_0x288440, 'click', function() {
_0x7f9f36.showMenuPanel.call(_0x2c1832, 'home', !0x1);
}), _0x3f3775.appendChild(_0x288440), _0x3f3775.appendChild(_0x428003('div', {
'role': 'menu'
})), _0x43777f.appendChild(_0x3f3775), _0x2c1832.elements.settings.buttons[_0x177975] = _0x5f3e0f, _0x2c1832.elements.settings.panels[_0x177975] = _0x3f3775;
var _0x2c172f = _0x3f3775.querySelector('[role="menu"]');
! function(_0x301c01) {
if (_0x1e2c68.element(_0x301c01))
for (var _0x85969e = _0x301c01.childNodes.length; _0x85969e > 0x0;) _0x301c01.removeChild(_0x301c01.lastChild), _0x85969e -= 0x1;
}(_0x2c172f), _0x1e2c68.object(_0x2f0e6a[_0x4fcbcf].options) ? Object.keys(_0x2f0e6a[_0x4fcbcf].options).forEach(function(_0x281296) {
_0x7f9f36.createMenuItem.call(_0x2c1832, {
'value': _0x281296,
'list': _0x2c172f,
'type': _0x177975,
'title': _0x2f0e6a[_0x4fcbcf].options[_0x281296],
'badge': null,
'checked': Boolean(_0x1d5857 === _0x281296)
});
}) : _0x1e2c68.array(_0x2f0e6a[_0x4fcbcf].options) && _0x2f0e6a[_0x4fcbcf].options.forEach(function(_0x28a341) {
_0x7f9f36.createMenuItem.call(_0x2c1832, {
'value': _0x28a341,
'list': _0x2c172f,
'type': _0x177975,
'title': _0x28a341,
'badge': null,
'checked': Boolean(_0x1d5857 === _0x28a341)
});
});
});
},
'setNormalOptionsMenu': function() {
var _0xa88a13 = this,
_0x17edbf = this,
_0x2c1832 = {};
_0x2c1832.orientation = _0x7f9f36.storage.get('orientation'), _0x2c1832.shader = _0x7f9f36.storage.get('shader'), _0x17edbf.touch ? (_0x2c1832['virtual-gamepad'] = _0x7f9f36.storage.get('virtual-gamepad'), _0x1093f4.call(_0x17edbf, _0x17edbf.elements.container, 'start-game', function() {
('enabled' === _0x2c1832['virtual-gamepad'] || _0x1e2c68.empty(_0x2c1832['virtual-gamepad'])) && _0xdcec2a.toggleVirtualGamepad.call(_0x17edbf, true);
})) : delete _0x7f9f36.normalOptions['virtual-gamepad'], _0x7f9f36.setOptionMenuItem.call(this, _0x7f9f36.normalOptions, _0x2c1832), _0x1093f4.call(_0x17edbf, _0x17edbf.elements.container, 'start-game', function() {
var _0x2c1832 = _0xa88a13,
_0x42a7b1 = {};
_0xdcec2a.getGameCoreOptions && _0xdcec2a.getGameCoreOptions().split('\n').forEach(function(_0x4a987e, _0x10bd7a) {
var _0x2c1832 = _0x4a987e.split('; '),
_0x1ddc5f = _0x2c1832[0x0];
if (0x0 === _0x1ddc5f.indexOf('fba-dipswitch-')) {
var _0xbd808 = _0x2c1832[0x1].split('|'),
_0x3c4b1a = _0x1ddc5f.replace(/_/g, ' ').replace(/.+\-(.+)/, '$1');
_0x42a7b1[_0x1ddc5f] = {
'label': _0x3c4b1a,
'options': _0xbd808.slice(0x1, -0x1),
'default': _0xbd808[0x0].replace('(Default) ', ''),
'netplay': true
};
}
});
var _0x27d859 = _0x7f9f36.storage.get('core-options');
_0x7f9f36.coreOptionsValues = _0x27d859 || {}, _0x7f9f36.setOptionMenuItem.call(_0x17edbf, _0x42a7b1, _0x7f9f36.coreOptionsValues), Object.keys(_0x7f9f36.coreOptionsValues).forEach(function(_0x51fc4e, _0x4b3613) {
_0x7f9f36.updateCoreOptions.call(_0x2c1832, _0x51fc4e, _0x7f9f36.coreOptionsValues[_0x51fc4e]);
}), 'nds' == _0x17edbf.system && (_0x27f4c4.Module._fast_forward_2 ? _0x27f4c4.Module._fast_forward_2(0x1) : _0x27f4c4.Module._fast_forward && _0x27f4c4.Module._fast_forward(0x1));
});
},
'updateCoreOptionMenuItems': function() {
var _0xa88a13 = this.elements.settings.panels.home.querySelector('[role="menu"]'),
_0x17edbf = _0x7f9f36.getCoreOptions.call(this);
_0xdcec2a.connected && !_0xdcec2a.connection.isInitiator ? Object.keys(_0x17edbf).forEach(function(_0x480587) {
_0x17edbf[_0x480587].netplay && _0xa88a13.querySelector('[item="' .concat(_0x480587, '"]')).setAttribute('disabled', '');
}) : Object.keys(_0x17edbf).forEach(function(_0x20e093) {
_0xa88a13.querySelector('[item="' .concat(_0x20e093, '"]')).removeAttribute('disabled');
});
},
'setCoreOptionsMenu': function() {
this.elements.settings.panels.home.querySelector('[role="menu"]'), this.elements.settings.popup.children[0x0];
var _0xa88a13 = _0x7f9f36.getCoreOptions.call(this),
_0x17edbf = _0x7f9f36.storage.get('core-options');
_0x7f9f36.coreOptionsValues = _0x17edbf || {}, _0x7f9f36.setOptionMenuItem.call(this, _0xa88a13, _0x17edbf);
},
'checkMenu': function() {
var _0xa88a13 = this.elements.settings.buttons;
!_0x1e2c68.empty(_0xa88a13) && Object.values(_0xa88a13).some(function(_0x200331) {
return !_0x200331.hidden;
});
_0x132da7(this.elements.settings.menu, !0x1);
},
'focusFirstMenuItem': function(_0x303d68) {
var _0x17edbf = arguments.length > 0x1 && void 0x0 !== arguments[0x1] && arguments[0x1];
if (!this.elements.settings.popup.hidden) {
var _0x2c1832 = _0x303d68;
_0x1e2c68.element(_0x2c1832) || (_0x2c1832 = Object.values(this.elements.settings.panels).find(function(_0xe94c5e) {
return !_0xe94c5e.hidden;
}));
var _0x2f3a13 = _0x2c1832.querySelector('[role^="menuitem"]');
_0x31cc23.call(this, _0x2f3a13, _0x17edbf);
}
},
'toggleMenu': function(_0x28d6c6) {
var _0x17edbf = this.elements.settings.popup,
_0x2c1832 = this.elements.buttons.settings;
if (_0x1e2c68.element(_0x17edbf) && _0x1e2c68.element(_0x2c1832)) {
var _0x614270 = _0x17edbf.hidden,
_0x14f1b5 = _0x614270;
if (_0x1e2c68.boolean(_0x28d6c6)) _0x14f1b5 = _0x28d6c6;
else if (_0x1e2c68.keyboardEvent(_0x28d6c6) && 0x1b === _0x28d6c6.which) _0x14f1b5 = !0x1;
else if (_0x1e2c68.event(_0x28d6c6)) {
var _0xcf75ac = _0x17edbf.contains(_0x28d6c6.target);
if (_0xcf75ac || !_0xcf75ac && _0x28d6c6.target !== _0x2c1832 && _0x14f1b5) return;
}
_0x2c1832.setAttribute('aria-expanded', _0x14f1b5);
_0x132da7(_0x17edbf, !_0x14f1b5);
_0x3a8e2f(this.elements.container, this.config.classNames.menu.open, _0x14f1b5);
_0x14f1b5 && _0x1e2c68.keyboardEvent(_0x28d6c6) ? _0x7f9f36.focusFirstMenuItem.call(this, null, true) : _0x14f1b5 || _0x614270 || _0x31cc23.call(this, _0x2c1832, _0x1e2c68.keyboardEvent(_0x28d6c6));
}
},
'getMenuSize': function(_0x55fe13) {
var _0x17edbf = _0x55fe13.cloneNode(true);
_0x17edbf.style.position = 'absolute';
_0x17edbf.style.opacity = 0;
_0x17edbf.removeAttribute('hidden');
_0x55fe13.parentNode.appendChild(_0x17edbf);
var _0x2c1832 = _0x17edbf.scrollWidth,
_0x5edd1b = _0x17edbf.scrollHeight;
return _0x12a55d(_0x17edbf), {
'width': _0x2c1832,
'height': _0x5edd1b
};
},
'showLoadStatePanel': function() {},
'showMenuPanel': function() {
var _0xa88a13 = this,
_0x17edbf = arguments.length > 0x0 && void 0x0 !== arguments[0x0] ? arguments[0x0] : '',
_0x2c1832 = arguments.length > 0x1 && void 0x0 !== arguments[0x1] && arguments[0x1],
_0x7f4281 = _0x530042.call(this, '[data-pane="' .concat(escape(_0x17edbf), '"]'));
if (_0x1e2c68.element(_0x7f4281)) {
var _0x3d6729 = _0x7f4281.parentNode,
_0x381f57 = Array.from(_0x3d6729.children).find(function(_0x57c652) {
return !_0x57c652.hidden;
});
if (_0x2d904a.transitions && !_0x2d904a.reducedMotion) {
_0x3d6729.style.width = '' .concat(_0x381f57.scrollWidth, 'px');
_0x3d6729.style.height = '' .concat(_0x381f57.scrollHeight, 'px');
var _0x29dd37 = _0x7f9f36.getMenuSize.call(this, _0x7f4281),
_0x37447f = function _0x17edbf(_0x40854f) {
_0x40854f.target === _0x3d6729 && ['width', 'height'].includes(_0x40854f.propertyName) && (_0x3d6729.style.width = '', _0x3d6729.style.height = '', _0x20109b.call(_0xa88a13, _0x3d6729, _0x5a2767, _0x17edbf));
};
_0x1093f4.call(this, _0x3d6729, _0x5a2767, _0x37447f), _0x3d6729.style.width = '' .concat(_0x29dd37.width, 'px'), _0x3d6729.style.height = '' .concat(_0x29dd37.height, 'px');
}
_0x132da7(_0x381f57, true);
_0x132da7(_0x7f4281, false);
_0x7f9f36.focusFirstMenuItem.call(this, _0x7f4281, _0x2c1832);
}
},
'setLoadState': function(_0x526828, _0x2ce330) {
_0x526828.appendChild(_0x7f9f36.createButton.call(this, 'load-state', {
'aria-haspopup': true,
'aria-expanded': !0x1
}));
var _0x2c1832 = _0x428003('div', {
'class': _0x449eac({
'ejs__dialog': true,
'ejs__load-state__container': true
}),
'hidden': ''
}),
_0x2e5e96 = _0x428003('div');
_0x2c1832.appendChild(_0x2e5e96);
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).appendChild(_0x2c1832);
this.elements.dialogs.loadState = _0x2c1832;
},
'setScreenRecord': function(_0x4c3fdd, _0x289259) {
if (! window.MediaRecorder) {
return;
};
var _0x2c1832 = _0x7f9f36.createButton.call(this, 'screen-record', {
'aria-haspopup': true,
'aria-expanded': !0x1,
'style': 'margin-right:auto;'
});
_0x4c3fdd.appendChild(_0x2c1832);
var _0x13c0e4 = _0x428003('div', {
'class': _0x449eac({
'ejs__dialog': true,
'ejs__screenRecord__container': true
}),
'hidden': ''
}),
_0xe2c02a = _0x428003('div');
_0x13c0e4.appendChild(_0xe2c02a);
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).appendChild(_0x13c0e4);
this.elements.dialogs.screenRecord = _0x13c0e4;
},
'setGamepad': function(_0x4c3fdd, _0x289259) {
var _0x2c1832 = _0x7f9f36.createButton.call(this, 'gamepad', {
'aria-haspopup': true,
'aria-expanded': !0x1,
'style': 'margin-right:auto;'
});
_0x4c3fdd.appendChild(_0x2c1832);
var _0x13c0e4 = _0x428003('div', {
'class': _0x449eac({
'ejs__dialog': true,
'ejs__gamepad__container': true
}),
'hidden': ''
}),
_0xe2c02a = _0x428003('div');
_0x13c0e4.appendChild(_0xe2c02a);
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).appendChild(_0x13c0e4);
this.elements.dialogs.gamepad = _0x13c0e4;
},
'setCheat': function(_0x328075, _0x21ca37) {
var _0x2c1832 = _0x7f9f36.createButton.call(this, 'cheat', {
'aria-haspopup': true,
'aria-expanded': !0x1,
'style': 'margin-right:auto;'
});
_0x328075.appendChild(_0x2c1832);
var _0x173db3 = _0x428003('div', {
'class': _0x449eac({
'ejs__dialog': true,
'ejs__cheat__container': true
}),
'hidden': ''
}),
_0x460961 = _0x428003('div');
_0x173db3.appendChild(_0x460961);
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).appendChild(_0x173db3);
this.elements.dialogs.cheat = _0x173db3;
},
'setStateInfoBarWidget': function() {
var _0xa88a13 = _0x428003('div', {
'class': _0x449eac({
'ejs__widget': true
})
}),
_0x17edbf = _0x428003('div');
_0x17edbf.style = 'color: red;font-size: 17px;padding:10px;text-align:left;text-shadow:1px 1px 1px #000'
_0xa88a13.appendChild(_0x17edbf);
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__widgets': true
}))).appendChild(_0xa88a13);
this.elements.widgets.stateInfo = _0xa88a13;
this.elements.widgets.stateInfoDiv = _0x17edbf;
},
'setNetplayWidgets': function() {
var _0xa88a13 = _0x428003('div', {
'class': _0x449eac({
'ejs__widget': true,
'ejs__widget_netplay': true
})
}),
_0x17edbf = _0x428003('div');
_0xa88a13.appendChild(_0x17edbf);
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__widgets': true
}))).appendChild(_0xa88a13);
this.elements.widgets.netplay = _0xa88a13;
},
'setNetplay': function(_0x1859f4, _0x50544d) {
_0x1859f4.appendChild(_0x7f9f36.createButton.call(this, 'netplay', {
'aria-haspopup': true,
'aria-expanded': !0x1
}));
var _0x2c1832 = _0x428003('div', {
'class': _0x449eac({
'ejs__dialog': true,
'ejs__netplay__container': true
}),
'hidden': ''
}),
_0x4f63e0 = _0x428003('div');
_0x2c1832.appendChild(_0x4f63e0);
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).appendChild(_0x2c1832);
this.elements.dialogs.netplay = _0x2c1832;
_0x7f9f36.setNetplayWidgets.call(this);
},
'toggleContextMenu': function(_0x24b1f3, _0x4b9d55) {
if (_0x350d73(_0x24b1f3.target, _0x449eac({
'ejs__dialogs': true
}))) {
var _0x2c1832 = _0x7f9f36.contextMenu.timer;
if (this.started) {
var _0x2a1dda = _0x530042.call(this, '.' .concat(_0x449eac({
'ejs__contextmenu': true
}), ' ul'));
if (_0x4b9d55) {
_0x7f9f36.contextMenu.style.display = 'block', _0x7f9f36.contextMenu.style.left = '' .concat(_0x24b1f3.layerX, 'px'), _0x7f9f36.contextMenu.style.top = '' .concat(_0x24b1f3.layerY, 'px');
var _0x1b48e7 = _0x23ffa1.call(this, '.' .concat(_0x449eac({
'ejs__contextmenu': true
}), ' ul li'));
if (Array.from(_0x1b48e7).forEach(function(_0x228a1f, _0x123a07) {}), null === _0x2a1dda.getAttribute('menu')) {
var _0x14264d = _0x428003('li', {});
_0x14264d.appendChild(_0x428003('a', {
'target': '_blank',
'href': 'https://github.com/ethanaobrien/emulatorjs'
}, 'EmulatorJS v' .concat(this.version))), _0x2a1dda.appendChild(_0x14264d), _0x2a1dda.setAttribute('menu', '');
}
_0x2c1832 && clearTimeout(_0x2c1832), _0x2c1832 = setTimeout(function() {}, 0xfa0), _0x7f9f36.contextMenu.timer = _0x2c1832;
} else _0x7f9f36.contextMenu.style.display = 'none';
}
}
},
'create': function(_0x42e40d) {
var _0xa88a13 = this;
_0x7f9f36.storage = new _0x2f61ba(this, 'ejs_' .concat(this.system, '_settings'));
var _0x17edbf = _0x428003('div', _0xa949a8(this.config.selectors.controls.wrapper));
_0x7f9f36.contextMenu = _0x428003('div', {
'class': _0x449eac({
'ejs__contextmenu': true
}),
'style': 'position: absolute; display:none;z-index:9'
})
var contextHtml = ['<ul>', '</ul>']
var contextFunctions = []
function addContextHtml(title, hidden, functi0n) {
if (typeof functi0n == 'function') {
contextFunctions.push(functi0n)
} else {
contextFunctions.push(function(){})
}
var i = contextHtml.length - 1
if (hidden) {
contextHtml.splice(i, 0, '<li hidden><a href="#" onclick="return false">'+title+'</a></li>')
} else {
contextHtml.splice(i, 0, '<li><a href="#" onclick="return false">'+title+'</a></li>')
}
}
addContextHtml('Take Screenshot', false, function(_0x20faaa) {
var _0x4e898a = _0x378b5c.getScreenData();
_0x4e4ca6()(new Blob([_0x4e898a]), 'game.png');
return false;
})
addContextHtml('Cache Manage', true, function(_0x275730) {
if (_0x27f4c4.romdb) {
_0x132da7(_0xa88a13.elements.dialogs.cache, !0x1);
var _0x3360eb = _0xa88a13.elements.dialogs.cache.querySelector('.' .concat(_0x378b5c.classNames['dialog-content']));
_0x3360eb.innerHTML = '<div>Loading</div>', _0x27f4c4.romdb.getAll(function(_0x43586d) {
if (_0x43586d) {
var _0x275730 = _0x428003('table', {
'style': 'width:100%;padding-left:10px;text-align:left'
}),
_0x1ab9e9 = _0x428003('tbody');
_0x43586d.length > 0x0 ? (_0x3360eb.innerHTML = '', _0x43586d.forEach(function(_0x32b0b7, _0x2d0e11) {
if (_0x32b0b7.key && !/-part-\d+$/ .test(_0x32b0b7.key)) {
for (var _0x3360eb = Math.ceil(_0x32b0b7.filesize / 0x6400000), _0x277c67 = [_0x32b0b7.key], _0x3edb35 = 0x1; _0x3edb35 < _0x3360eb; _0x3edb35 += 0x1) _0x277c67.push('' .concat(_0x32b0b7.key, '-part-').concat(_0x3edb35));
var _0x3e31f6 = _0x428003('tr'),
_0x5b75bd = _0x428003('td'),
_0x227405 = _0x428003('td'),
_0xac150b = _0x428003('td');
_0x5b75bd.innerHTML = _0x32b0b7.filename, _0x227405.innerHTML = function(_0x1858c1) {
var _0x2d0e11 = -0x1;
do {
_0x1858c1 /= 0x400, _0x2d0e11++;
} while (_0x1858c1 > 0x400);
return Math.max(_0x1858c1, 0.1).toFixed(0x1) + [' kB', ' MB', ' GB', ' TB', 'PB', 'EB', 'ZB', 'YB'][_0x2d0e11];
}(_0x32b0b7.filesize), _0xac150b.innerHTML = '<a href="#" onclick="return false">Remove</a>', _0x3e31f6.appendChild(_0x5b75bd), _0x3e31f6.appendChild(_0x227405), _0x3e31f6.appendChild(_0xac150b), _0x1ab9e9.appendChild(_0x3e31f6), _0xac150b.querySelector('a').onclick = function(_0x16f22a) {
_0x27f4c4.romdb.removeBatch(_0x277c67);
try {
_0x1ab9e9.removeChild(_0x3e31f6);
} catch (_0x353628) {}
return _0x16f22a.stopPropagation(), !0x1;
};
}
}), _0x275730.appendChild(_0x1ab9e9), _0x3360eb.appendChild(_0x275730)) : _0x3360eb.innerHTML = '<div style="text-align:center">Empty</div>';
} else _0x3360eb.innerHTML = '<div style="text-align:center">Empty</div>';
}, function() {});
}
return false;
})
addContextHtml('Quick Save (F2)', false, function(_0x395c73) {
_0x378b5c.quickSaveState();
_0x2593da.contextMenu.style.display = 'none';
})
addContextHtml('Quick Load (F4)', false, function(_0x124f69) {
_0x378b5c.quickLoadState();
_0x2593da.contextMenu.style.display = 'none';
})
_0x7f9f36.contextMenu.innerHTML = contextHtml.join('')
var a = _0x7f9f36.contextMenu.getElementsByTagName('li')
for (var i=0; i<a.length; i++) {
_0x1093f4.call(_0xa88a13, a[i], 'click', contextFunctions[i])
}
this.elements.container.appendChild(_0x7f9f36.contextMenu);
var _0x2c1832 = _0x428003('div', {
'class': _0x449eac({
'ejs__dialog': true,
'ejs__cache__container': true
}),
'hidden': ''
});
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).appendChild(_0x2c1832), this.elements.dialogs.cache = _0x2c1832;
var _0x3c66db = _0x428003('div', {
'class': _0x449eac({
'ejs__dialog': true,
'ejs__loading__container': true
}),
'hidden': ''
});
_0x530042.call(this, '.' .concat(_0x449eac({
'ejs__dialogs': true
}))).appendChild(_0x3c66db);
this.elements.dialogs.loading = _0x3c66db;
_0x17edbf.appendChild(_0x7f9f36.createButton.call(this, 'restart'));
_0x17edbf.appendChild(_0x7f9f36.createButton.call(this, 'play'));
_0x17edbf.appendChild(_0x7f9f36.createButton.call(this, 'save-state', {
'aria-expanded': !0x1
}));
_0x7f9f36.setLoadState.call(this, _0x17edbf, _0x42e40d);
_0x7f9f36.setScreenRecord.call(this, _0x17edbf, _0x42e40d);
_0x7f9f36.supportNetPlay.call(this, _0x17edbf) && parseInt(this.config.gameId, 0xa) > 0x0 && _0x7f9f36.setNetplay.call(this, _0x17edbf, _0x42e40d);
_0x7f9f36.setGamepad.call(this, _0x17edbf, _0x42e40d);
_0x7f9f36.setStateInfoBarWidget.call(this, _0x17edbf, _0x42e40d);
_0x7f9f36.setCheat.call(this, _0x17edbf, _0x42e40d);
_0x17edbf.appendChild(_0x428003('span', {
'style': 'flex:1'
}));
var _0xd299b = _0x428003('div', {
'class': _0x449eac({
'ejs__volume': true
})
});
_0xd299b.appendChild(_0x7f9f36.createButton.call(this, 'mute'));
var _0x4bbcd0 = {
'max': 0x1,
'step': 0.01,
'value': this.config.volume
};
_0xd299b.appendChild(_0x7f9f36.createRange.call(this, 'volume', _0x5dc0c0(_0x4bbcd0, {}))), this.elements.volume = _0xd299b, _0x17edbf.appendChild(_0xd299b);
var _0x2746ab = _0x428003('div', {
'class': _0x449eac({
'ejs__menu': true
})
});
_0x2746ab.appendChild(_0x7f9f36.createButton.call(this, 'settings', {
'aria-haspopup': true,
'aria-expanded': !0x1
}));
var _0x262fb8 = _0x428003('div', {
'class': _0x449eac({
'ejs__menu__container': true
}),
'hidden': ''
}),
_0x3f3385 = _0x428003('div'),
_0x174c46 = _0x428003('div', {
'data-pane': 'home'
}),
_0x17f3cb = _0x428003('div', {
'role': 'menu'
});
_0x174c46.appendChild(_0x17f3cb)
_0x3f3385.appendChild(_0x174c46)
this.elements.settings.panels.home = _0x174c46
_0x262fb8.appendChild(_0x3f3385)
_0x2746ab.appendChild(_0x262fb8)
_0x17edbf.appendChild(_0x2746ab)
this.elements.settings.popup = _0x262fb8
this.elements.settings.menu = _0x2746ab
_0x17edbf.appendChild(_0x7f9f36.createButton.call(this, 'fullscreen'))
var _0x19edbf = this;
_0x17edbf.addEventListener('mousedown', function() {
_0x19edbf.onButtonDiv = true;
})
this.elements.controls = _0x17edbf
_0x7f9f36.setNormalOptionsMenu.call(this)
_0x7f9f36.setCoreOptionsMenu.call(this)
return _0x17edbf;
},
'inject': function() {
var _0xa88a13 = this;
this.icons = {
'enter-fullscreen': '<path d="M10 3h3.6l-4 4L11 8.4l4-4V8h2V1h-7zM7 9.6l-4 4V10H1v7h7v-2H4.4l4-4z"/>',
'exit-fullscreen': '<path d="M1 12h3.6l-4 4L2 17.4l4-4V17h2v-7H1zM16 .6l-4 4V1h-2v7h7V6h-3.6l4-4z"/>',
'gamepad': '<svg viewBox="0 0 640 512"><path fill="currentColor" d="M480 96H160C71.6 96 0 167.6 0 256s71.6 160 160 160c44.8 0 85.2-18.4 114.2-48h91.5c29 29.6 69.5 48 114.2 48 88.4 0 160-71.6 160-160S568.4 96 480 96zM256 276c0 6.6-5.4 12-12 12h-52v52c0 6.6-5.4 12-12 12h-40c-6.6 0-12-5.4-12-12v-52H76c-6.6 0-12-5.4-12-12v-40c0-6.6 5.4-12 12-12h52v-52c0-6.6 5.4-12 12-12h40c6.6 0 12 5.4 12 12v52h52c6.6 0 12 5.4 12 12v40zm184 68c-26.5 0-48-21.5-48-48s21.5-48 48-48 48 21.5 48 48-21.5 48-48 48zm80-80c-26.5 0-48-21.5-48-48s21.5-48 48-48 48 21.5 48 48-21.5 48-48 48z"/></svg>',
'load-state': '<svg viewBox="0 0 576 512"><path fill="currentColor" d="M572.694 292.093L500.27 416.248A63.997 63.997 0 0 1 444.989 448H45.025c-18.523 0-30.064-20.093-20.731-36.093l72.424-124.155A64 64 0 0 1 152 256h399.964c18.523 0 30.064 20.093 20.73 36.093zM152 224h328v-48c0-26.51-21.49-48-48-48H272l-64-64H48C21.49 64 0 85.49 0 112v278.046l69.077-118.418C86.214 242.25 117.989 224 152 224z"/></svg>',
'muted': '<path d="M12.4 12.5l2.1-2.1 2.1 2.1 1.4-1.4L15.9 9 18 6.9l-1.4-1.4-2.1 2.1-2.1-2.1L11 6.9 13.1 9 11 11.1zM3.786 6.008H.714C.286 6.008 0 6.31 0 6.76v4.512c0 .452.286.752.714.752h3.072l4.071 3.858c.5.3 1.143 0 1.143-.602V2.752c0-.601-.643-.977-1.143-.601L3.786 6.008z"/>',
'netplay': '<svg viewBox="0 0 512 512"><path fill="currentColor" d="M364.215 192h131.43c5.439 20.419 8.354 41.868 8.354 64s-2.915 43.581-8.354 64h-131.43c5.154-43.049 4.939-86.746 0-128zM185.214 352c10.678 53.68 33.173 112.514 70.125 151.992.221.001.44.008.661.008s.44-.008.661-.008c37.012-39.543 59.467-98.414 70.125-151.992H185.214zm174.13-192h125.385C452.802 84.024 384.128 27.305 300.95 12.075c30.238 43.12 48.821 96.332 58.394 147.925zm-27.35 32H180.006c-5.339 41.914-5.345 86.037 0 128h151.989c5.339-41.915 5.345-86.037-.001-128zM152.656 352H27.271c31.926 75.976 100.6 132.695 183.778 147.925-30.246-43.136-48.823-96.35-58.393-147.925zm206.688 0c-9.575 51.605-28.163 104.814-58.394 147.925 83.178-15.23 151.852-71.949 183.778-147.925H359.344zm-32.558-192c-10.678-53.68-33.174-112.514-70.125-151.992-.221 0-.44-.008-.661-.008s-.44.008-.661.008C218.327 47.551 195.872 106.422 185.214 160h141.572zM16.355 192C10.915 212.419 8 233.868 8 256s2.915 43.581 8.355 64h131.43c-4.939-41.254-5.154-84.951 0-128H16.355zm136.301-32c9.575-51.602 28.161-104.81 58.394-147.925C127.872 27.305 59.198 84.024 27.271 160h125.385z"/></svg>',
'pause': '<path d="M6 1H3c-.6 0-1 .4-1 1v14c0 .6.4 1 1 1h3c.6 0 1-.4 1-1V2c0-.6-.4-1-1-1zM12 1c-.6 0-1 .4-1 1v14c0 .6.4 1 1 1h3c.6 0 1-.4 1-1V2c0-.6-.4-1-1-1h-3z"/>',
'play': '<path d="M15.562 8.1L3.87.225C3.052-.337 2 .225 2 1.125v15.75c0 .9 1.052 1.462 1.87.9L15.563 9.9c.584-.45.584-1.35 0-1.8z"/>',
'restart': '<path d="M9.7 1.2l.7 6.4 2.1-2.1c1.9 1.9 1.9 5.1 0 7-.9 1-2.2 1.5-3.5 1.5-1.3 0-2.6-.5-3.5-1.5-1.9-1.9-1.9-5.1 0-7 .6-.6 1.4-1.1 2.3-1.3l-.6-1.9C6 2.6 4.9 3.2 4 4.1 1.3 6.8 1.3 11.2 4 14c1.3 1.3 3.1 2 4.9 2 1.9 0 3.6-.7 4.9-2 2.7-2.7 2.7-7.1 0-9.9L16 1.9l-6.3-.7z"/>',
'save-state': '<svg viewBox="0 0 448 512"><path fill="currentColor" d="M433.941 129.941l-83.882-83.882A48 48 0 0 0 316.118 32H48C21.49 32 0 53.49 0 80v352c0 26.51 21.49 48 48 48h352c26.51 0 48-21.49 48-48V163.882a48 48 0 0 0-14.059-33.941zM224 416c-35.346 0-64-28.654-64-64 0-35.346 28.654-64 64-64s64 28.654 64 64c0 35.346-28.654 64-64 64zm96-304.52V212c0 6.627-5.373 12-12 12H76c-6.627 0-12-5.373-12-12V108c0-6.627 5.373-12 12-12h228.52c3.183 0 6.235 1.264 8.485 3.515l3.48 3.48A11.996 11.996 0 0 1 320 111.48z"/></svg>',
'screen-record': '<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px",viewBox="0 0 39.989 39.989" style="enable-background:new 0 0 39.989 39.989;" xml:space="preserve"><path id="XMLID_155_" d="M19.994,0C8.952,0,0,8.952,0,19.995c0,11.043,8.952,19.994,19.994,19.994s19.995-8.952,19.995-19.994,C39.989,8.952,31.037,0,19.994,0z M19.994,27.745c-4.28,0-7.75-3.47-7.75-7.75s3.47-7.75,7.75-7.75s7.75,3.47,7.75,7.75,S24.275,27.745,19.994,27.745z"/></svg>',
'settings': '<path d="M16.135 7.784a2 2 0 0 1-1.23-2.969c.322-.536.225-.998-.094-1.316l-.31-.31c-.318-.318-.78-.415-1.316-.094a2 2 0 0 1-2.969-1.23C10.065 1.258 9.669 1 9.219 1h-.438c-.45 0-.845.258-.997.865a2 2 0 0 1-2.969 1.23c-.536-.322-.999-.225-1.317.093l-.31.31c-.318.318-.415.781-.093 1.317a2 2 0 0 1-1.23 2.969C1.26 7.935 1 8.33 1 8.781v.438c0 .45.258.845.865.997a2 2 0 0 1 1.23 2.969c-.322.536-.225.998.094 1.316l.31.31c.319.319.782.415 1.316.094a2 2 0 0 1 2.969 1.23c.151.607.547.865.997.865h.438c.45 0 .845-.258.997-.865a2 2 0 0 1 2.969-1.23c.535.321.997.225 1.316-.094l.31-.31c.318-.318.415-.781.094-1.316a2 2 0 0 1 1.23-2.969c.607-.151.865-.547.865-.997v-.438c0-.451-.26-.846-.865-.997zM9 12a3 3 0 1 1 0-6 3 3 0 0 1 0 6z"/>',
'volume': '<path d="M15.5999996,3.3 C15.1999996,2.9 14.5999996,2.9 14.1999996,3.3 C13.7999996,3.7 13.7999996,4.3 14.1999996,4.7 C15.3999996,5.9 15.9999996,7.4 15.9999996,9 C15.9999996,10.6 15.3999996,12.1 14.1999996,13.3 C13.7999996,13.7 13.7999996,14.3 14.1999996,14.7 C14.3999996,14.9 14.6999996,15 14.8999996,15 C15.1999996,15 15.3999996,14.9 15.5999996,14.7 C17.0999996,13.2 17.9999996,11.2 17.9999996,9 C17.9999996,6.8 17.0999996,4.8 15.5999996,3.3 L15.5999996,3.3 Z"></path>\n <path d="M11.2819745,5.28197449 C10.9060085,5.65794047 10.9060085,6.22188944 11.2819745,6.59785542 C12.0171538,7.33303477 12.2772954,8.05605449 12.2772954,9.00000021 C12.2772954,9.93588462 11.851678,10.9172014 11.2819745,11.4869049 C10.9060085,11.8628709 10.9060085,12.4268199 11.2819745,12.8027859 C11.4271642,12.9479755 11.9176724,13.0649528 12.2998149,12.9592565 C12.4124479,12.9281035 12.5156669,12.8776063 12.5978555,12.8027859 C13.773371,11.732654 14.1311161,10.1597914 14.1312523,9.00000021 C14.1312723,8.8299555 14.1286311,8.66015647 14.119665,8.4897429 C14.0674781,7.49784946 13.8010171,6.48513613 12.5978554,5.28197449 C12.2218894,4.9060085 11.6579405,4.9060085 11.2819745,5.28197449 Z"></path>\n <path d="M3.78571429,6.00820648 L0.714285714,6.00820648 C0.285714286,6.00820648 0,6.30901277 0,6.76022222 L0,11.2723167 C0,11.7235261 0.285714286,12.0243324 0.714285714,12.0243324 L3.78571429,12.0243324 L7.85714286,15.8819922 C8.35714286,16.1827985 9,15.8819922 9,15.2803796 L9,2.75215925 C9,2.15054666 8.35714286,1.77453879 7.85714286,2.15054666 L3.78571429,6.00820648 Z"></path>',
'cheat': '<svg viewBox="0 0 496 512"><path fill="currentColor" d="M248 8C111 8 0 119 0 256s111 248 248 248 248-111 248-248S385 8 248 8zm0 448c-110.3 0-200-89.7-200-200S137.7 56 248 56s200 89.7 200 200-89.7 200-200 200zm-80-216c17.7 0 32-14.3 32-32s-14.3-32-32-32-32 14.3-32 32 14.3 32 32 32zm160 0c17.7 0 32-14.3 32-32s-14.3-32-32-32-32 14.3-32 32 14.3 32 32 32zm4 72.6c-20.8 25-51.5 39.4-84 39.4s-63.2-14.3-84-39.4c-8.5-10.2-23.7-11.5-33.8-3.1-10.2 8.5-11.5 23.6-3.1 33.8 30 36 74.1 56.6 120.9 56.6s90.9-20.6 120.9-56.6c8.5-10.2 7.1-25.3-3.1-33.8-10.1-8.4-25.3-7.1-33.8 3.1z" class=""></path></svg>'
}, this.id = Math.floor(0x2710 * Math.random());
var _0x17edbf = null;
this.elements.controls = null;
var _0x2c1832 = {
'id': this.id
},
_0x270a9e = true;
_0x1e2c68.function(this.config.controls) && (this.config.controls = this.config.controls.call(this.props)), this.config.controls || (this.config.controls = []), _0x1e2c68.element(this.config.controls) || _0x1e2c68.string(this.config.controls) ? _0x17edbf = this.config.controls : (_0x17edbf = _0x7f9f36.create.call(this, {
'id': this.id,
'seektime': this.config.seekTime,
'speed': this.speed,
'quality': this.quality
}), _0x270a9e = !0x1);
var _0x1e6644, _0x3ec218 = function(_0x406126) {
var _0x17edbf = _0x406126;
return Object.entries(_0x2c1832).forEach(function(_0x3e992c) {
var _0x2c1832 = _0x5005e6(_0x3e992c, 0x2),
_0x47c2ec = _0x2c1832[0x0],
_0x547588 = _0x2c1832[0x1];
_0x17edbf = _0x1a0e98(_0x17edbf, '{' .concat(_0x47c2ec, '}'), _0x547588);
}), _0x17edbf;
};
if (_0x270a9e && (_0x1e2c68.string(this.config.controls) ? _0x17edbf = _0x3ec218(_0x17edbf) : _0x1e2c68.element(_0x17edbf) && (_0x17edbf.innerHTML = _0x3ec218(_0x17edbf.innerHTML))), _0x1e2c68.string(this.config.selectors.controls.container) && (_0x1e6644 = document.querySelector(this.config.selectors.controls.container)), _0x1e2c68.element(_0x1e6644) || (_0x1e6644 = this.elements.container), _0x1e6644[_0x1e2c68.element(_0x17edbf) ? 'insertAdjacentElement' : 'insertAdjacentHTML']('afterbegin', _0x17edbf), _0x1e2c68.element(this.elements.controls) || _0x7f9f36.findElements.call(this), !_0x1e2c68.empty(this.elements.buttons)) {
var _0xe8272c = function(_0x176a3e) {
var _0x2c1832 = _0xa88a13.config.classNames.controlPressed;
Object.defineProperty(_0x176a3e, 'pressed', {
'enumerable': true,
'get': function() {
return _0x350d73(_0x176a3e, _0x2c1832);
},
'set': function() {
var _0xa88a13 = arguments.length > 0x0 && void 0x0 !== arguments[0x0] && arguments[0x0];
_0x3a8e2f(_0x176a3e, _0x2c1832, _0xa88a13);
}
});
};
Object.values(this.elements.buttons).filter(Boolean).forEach(function(_0x577b9b) {
_0x1e2c68.array(_0x577b9b) || _0x1e2c68.nodeList(_0x577b9b) ? Array.from(_0x577b9b).filter(Boolean).forEach(_0xe8272c) : _0xe8272c(_0x577b9b);
});
}
window.navigator.userAgent.includes('Edge') && _0x2b30e0(_0x1e6644);
}
},
_0x2593da = _0x7f9f36;
function _0x28bdca(_0x5c50c3, _0x217698) {
for (var _0x2c1832 = 0x0; _0x2c1832 < _0x217698.length; _0x2c1832++) {
var _0x172cdb = _0x217698[_0x2c1832];
_0x172cdb.enumerable = _0x172cdb.enumerable || !0x1, _0x172cdb.configurable = true, 'value' in _0x172cdb && (_0x172cdb.writable = true), Object.defineProperty(_0x5c50c3, _0x172cdb.key, _0x172cdb);
}
}
function _0x238270() {
if (this.enabled) {
var _0xa88a13 = this.player.elements.buttons.fullscreen;
_0x1e2c68.element(_0xa88a13) && (_0xa88a13.pressed = this.active), _0xbae705.call(this.player, this.target, this.active ? 'enterfullscreen' : 'exitfullscreen', true), _0x59aa33.isIos || _0x5e0c7d.call(this.player, this.target, this.active);
}
}
function _0x3c983f() {
var _0xa88a13 = arguments.length > 0x0 && void 0x0 !== arguments[0x0] && arguments[0x0];
_0xa88a13 ? this.scrollPosition = {
'x': window.scrollX || 0x0,
'y': window.scrollY || 0x0
} : window.scrollTo(this.scrollPosition.x, this.scrollPosition.y), document.body.style.overflow = _0xa88a13 ? 'hidden' : '', _0x3a8e2f(this.target, this.player.config.classNames.fullscreen.fallback, _0xa88a13), _0x238270.call(this);
}
var _0x335854 = function() {
function _0x1339df(_0x5f19c6) {
var _0x2c1832 = this;
! function(_0x143a8a, _0x32a0a1) {
if (!(_0x143a8a instanceof _0x32a0a1)) throw new TypeError('Cannot call a class as a function');
}(this, _0x1339df), this.player = _0x5f19c6, this.prefix = _0x1339df.prefix, this.property = _0x1339df.property, this.scrollPosition = {
'x': 0x0,
'y': 0x0
}, _0x1093f4.call(this.player, document, 'ms' === this.prefix ? 'MSFullscreenChange' : '' .concat(this.prefix, 'fullscreenchange'), function() {
_0x238270.call(_0x2c1832);
}), this.update();
}
var _0x17edbf, _0x2c1832, _0x50d347;
_0x17edbf = _0x1339df
_0x50d347 = [{
'key': 'native',
'get': function() {
return !!(document.fullscreenEnabled || document.webkitFullscreenEnabled || document.mozFullScreenEnabled || document.msFullscreenEnabled);
}
}, {
'key': 'prefix',
'get': function() {
if (_0x1e2c68.function(document.exitFullscreen)) return '';
var _0x1339df = '';
return ['webkit', 'moz', 'ms'].some(function(_0xc1e0c3) {
return !(!_0x1e2c68.function(document['' .concat(_0xc1e0c3, 'ExitFullscreen')]) && !_0x1e2c68.function(document['' .concat(_0xc1e0c3, 'CancelFullScreen')]) || (_0x1339df = _0xc1e0c3, 0x0));
}), _0x1339df;
}
}, {
'key': 'property',
'get': function() {
return 'moz' === this.prefix ? 'FullScreen' : 'Fullscreen';
}
}]
_0x2c1832 = [{
'key': 'update',
'value': function() {
this.enabled, _0x3a8e2f(this.player.elements.container, this.player.config.classNames.fullscreen.enabled, this.enabled);
}
}, {
'key': 'enter',
'value': function() {
this.enabled && (_0x1339df.native ? this.prefix ? _0x1e2c68.empty(this.prefix) || this.target['' .concat(this.prefix, 'Request').concat(this.property)]() : this.target.requestFullscreen() : _0x3c983f.call(this, true));
}
}, {
'key': 'exit',
'value': function() {
if (this.enabled)
if (_0x1339df.native)
if (this.prefix) {
if (!_0x1e2c68.empty(this.prefix)) {
var _0x17edbf = 'moz' === this.prefix ? 'Cancel' : 'Exit';
document['' .concat(this.prefix).concat(_0x17edbf).concat(this.property)]();
}
} else(document.cancelFullScreen || document.exitFullscreen).call(document);
else _0x3c983f.call(this, !0x1);
}
}, {
'key': 'toggle',
'value': function() {
this.active ? this.exit() : this.enter();
}
}, {
'key': 'enabled',
'get': function() {
return _0x1339df.native;
}
}, {
'key': 'active',
'get': function() {
return !!this.enabled && (_0x1339df.native ? (this.prefix ? document['' .concat(this.prefix).concat(this.property, 'Element')] : document.fullscreenElement) === this.target : _0x350d73(this.target, this.player.config.classNames.fullscreen.fallback));
}
}, {
'key': 'target',
'get': function() {
return _0x59aa33.isEdge ? this.player.game : this.player.elements.container;
}
}]
_0x28bdca(_0x17edbf.prototype, _0x2c1832)
_0x50d347 && _0x28bdca(_0x17edbf, _0x50d347)
return _0x1339df;
}();
function _0x42b437(_0x458e02, _0x5729e1) {
if (Array.isArray(_0x458e02)) return _0x458e02;
var _0x2c1832 = [],
_0x2e7aa7 = true,
_0x393a5f = false,
_0x3b4eca = void 0x0;
try {
for (var _0x1aca09, _0x1bb8de = _0x458e02[Symbol.iterator](); !(_0x2e7aa7 = (_0x1aca09 = _0x1bb8de.next()).done) && (_0x2c1832.push(_0x1aca09.value), !_0x5729e1 || _0x2c1832.length !== _0x5729e1); _0x2e7aa7 = true);
} catch (_0x190997) {
_0x393a5f = true, _0x3b4eca = _0x190997;
} finally {
try {
_0x2e7aa7 || null == _0x1bb8de.return || _0x1bb8de.return();
} finally {
if (_0x393a5f) throw _0x3b4eca;
}
}
return _0x2c1832;
throw new TypeError('Invalid attempt to destructure non-iterable instance');
}
var _0x37093c = function() {
function _0x31e271(_0x948a97) {
! function(_0x3adedc, _0x5952e9) {
if (!(_0x3adedc instanceof _0x5952e9)) throw new TypeError('Cannot call a class as a function');
}(this, _0x31e271), this.emulator = _0x948a97, this.lastKey = null, this.focusTimer = null, this.lastKeyDown = null, this.toggleMenu = this.toggleMenu.bind(this), this.setTabFocus = this.setTabFocus.bind(this), this.firstTouch = this.firstTouch.bind(this);
}
_0x31e271.prototype = {
toggleMenu: function(_0x3156ad) {
_0x2593da.toggleMenu.call(this.emulator, _0x3156ad);
},
firstTouch: function() {
var _0x31e271 = this.emulator,
_0x17edbf = _0x31e271.elements;
_0x31e271.touch = true, _0x3a8e2f(_0x17edbf.container, _0x31e271.config.classNames.isTouch, true);
},
setTabFocus: function(_0x20bf1f) {
var _0x17edbf = this.emulator,
_0x2c1832 = _0x17edbf.elements,
_0x48d087 = _0x17edbf;
if (clearTimeout(this.focusTimer), 'keydown' !== _0x20bf1f.type || 0x9 === _0x20bf1f.which) {
'keydown' === _0x20bf1f.type && (this.lastKeyDown = _0x20bf1f.timeStamp);
var _0x27d4ad, _0x967a30 = _0x20bf1f.timeStamp - this.lastKeyDown <= 0x14;
('focus' !== _0x20bf1f.type || _0x967a30) && (_0x27d4ad = _0x48d087.config.classNames.tabFocus, _0x3a8e2f(_0x23ffa1.call(_0x48d087, '.' .concat(_0x27d4ad)), _0x27d4ad, !0x1), this.focusTimer = setTimeout(function() {
var _0x20bf1f = document.activeElement;
_0x2c1832.container.contains(_0x20bf1f) && _0x3a8e2f(document.activeElement, _0x48d087.config.classNames.tabFocus, true);
}, 0xa));
}
},
global: function() {
var _0x31e271 = !(arguments.length > 0x0 && void 0x0 !== arguments[0x0]) || arguments[0x0],
_0x17edbf = this.emulator,
_0x2c1832 = _0x17edbf;
_0x1ef215.call(_0x2c1832, document.body, 'click', this.toggleMenu, _0x31e271), _0x455c85.call(_0x2c1832, document.body, 'touchstart', this.firstTouch), _0x1ef215.call(_0x2c1832, document.body, 'keydown focus blur', this.setTabFocus, _0x31e271, !0x1, true), _0x1093f4.call(_0x2c1832, document.body, 'keyup', function(_0x4e289a) {
_0x2c1832.started && (0x71 === _0x4e289a.which && (_0x4e289a.shiftKey ? null === _0x2c1832.elements.buttons.saveState.getAttribute('hidden') && _0xbae705.call(_0x2c1832, _0x2c1832.elements.buttons.saveState, 'click') : _0xdcec2a.quickSaveState()), 0x73 === _0x4e289a.which && (_0x4e289a.shiftKey ? null === _0x2c1832.elements.buttons.loadState.getAttribute('hidden') && _0xbae705.call(_0x2c1832, _0x2c1832.elements.buttons.loadState, 'click') : _0xdcec2a.quickLoadState()), 0x78 === _0x4e289a.which && _0xbae705.call(_0x2c1832, _0x2c1832.elements.buttons.mute, 'click'));
});
},
container: function() {
var _0x31e271, _0x17edbf = this.emulator,
_0x2c1832 = _0x17edbf.elements,
_0x45d275 = _0x17edbf;
_0x1093f4.call(_0x45d275, _0x2c1832.container, 'mousemove mouseleave touchstart touchmove enterfullscreen exitfullscreen start-game', function(_0x1ac81c) {
var _0x3953b5 = _0x2c1832.controls;
_0x3953b5 && 'enterfullscreen' === _0x1ac81c.type && (_0x3953b5.pressed = !0x1, _0x3953b5.hover = !0x1);
var _0x2c3de3 = 0x0;
if (['touchstart', 'touchmove', 'mousemove', 'start-game'].includes(_0x1ac81c.type)) {
if (_0x17edbf.system == 'nds' && ['touchstart', 'touchmove', 'mousemove'].includes(_0x1ac81c.type) && _0x27f4c4 && _0x27f4c4.Module && _0x27f4c4.Module.canvas && (document.pointerLockElement === _0x27f4c4.Module.canvas || document.mozPointerLockElement === _0x27f4c4.Module.canvas)) {
_0x5ab74d.toggleControls.call(_0x45d275, false);
return;
}
if (_0xdcec2a && (_0xdcec2a.virtualGamepadContainer == 'undefined' || _0xdcec2a.virtualGamepadContainer === null)) {
_0x5ab74d.toggleControls.call(_0x45d275, true);
_0x2c3de3 = _0x45d275.touch ? 0xbb8 : 0x7d0;
clearTimeout(_0x31e271);
_0x31e271 = setTimeout(function() {
return _0x5ab74d.toggleControls.call(_0x45d275, !0x1);
}, _0x2c3de3);
_0x2c1832.controls.setAttribute('data-timer', _0x31e271);
}
};
}), _0x1093f4.call(_0x45d275, window, 'resize', function(_0x3e2cc5) {
var _0x17edbf = _0x45d275.elements.container.clientHeight,
_0x2c1832 = _0x23ffa1.call(_0x45d275, '.' .concat(_0x449eac({
'ejs__menu__container': true
}), ' [role="menu"]'));
Array.from(_0x2c1832).forEach(function(_0x43a16d) {
_0x43a16d.style.maxHeight = '' .concat(_0x17edbf - 0x5f, 'px'), _0x43a16d.style.overflow = 'auto';
}), _0x27f4c4.Module && _0x27f4c4.Module.canvas, _0x45d275.elements.container.clientWidth / _0x45d275.elements.container.clientHeight < 1.3 ? _0x3a8e2f(_0x45d275.elements.container, _0x449eac({
'portrait': true
}), true) : _0x3a8e2f(_0x45d275.elements.container, _0x449eac({
'portrait': true
}), !0x1);
}), _0xbae705.call(_0x45d275, window, 'resize'), _0x1093f4.call(_0x45d275, _0x45d275.elements.container, 'enterfullscreen', function(_0x586586) {
setTimeout(function() {
_0xbae705.call(_0x45d275, window, 'resize');
}, 0x12c);
}), _0x1093f4.call(_0x45d275, _0x45d275.elements.container, 'exitfullscreen', function(_0x4de2f3) {
setTimeout(function() {
_0xbae705.call(_0x45d275, window, 'resize');
}, 0x12c);
}), _0x1093f4.call(_0x45d275, _0x45d275.elements.container, 'start-game', function(_0x5b0a00) {
var _0x17edbf = window.document.createEvent('UIEvents');
_0x17edbf.initUIEvent('resize', true, !0x1, window, 0x0), window.dispatchEvent(_0x17edbf), _0x3a8e2f(_0x45d275.elements.container, _0x449eac({
'game-started': true
}), true);
}), _0x1093f4.call(_0x45d275, _0x45d275.elements.container, 'blur', function(_0x893c7a) {
var _0x17edbf = _0x893c7a.currentTarget;
setTimeout(function() {
_0x17edbf.contains(document.activeElement) || _0xbae705.call(_0x45d275, _0x45d275.elements, 'blurgame');
}, 0x0);
}), _0x1093f4.call(_0x45d275, _0x45d275.elements.container, 'focus', function(_0xe0dee3) {
var _0x17edbf = _0xe0dee3.currentTarget;
setTimeout(function() {
_0x17edbf.contains(document.activeElement) && _0xbae705.call(_0x45d275, _0x45d275.elements, 'focusgame');
}, 0x0);
});
},
media: function() {
var _0x31e271 = this.emulator,
_0x17edbf = _0x31e271.elements,
_0x2c1832 = _0x31e271;
_0x1093f4.call(_0x2c1832, _0x2c1832.game, 'volumechange', function(_0x3099fe) {
return _0x2593da.updateVolume.call(_0x2c1832, _0x3099fe);
}), _0x1093f4.call(_0x2c1832, _0x17edbf.container, 'contextmenu', function(_0x5c22bc) {
_0x2c1832.touch || _0x2c1832.lightgun || _0x2593da.toggleContextMenu.call(_0x2c1832, _0x5c22bc, true), _0x5c22bc.preventDefault();
}, !0x1), _0x1093f4.call(_0x2c1832, _0x17edbf.container, 'mousewheel', function(_0x49c974) {
_0x49c974.stopPropagation();
}, !0x1), _0x1093f4.call(_0x2c1832, _0x17edbf.container, 'mousedown', function(_0x53ee35) {
if (_0x31e271.system == 'nds' && _0x27f4c4 && _0x27f4c4.Module && _0x27f4c4.Module.canvas && ! _0x27f4c4.isMobileDevice && !(document.pointerLockElement === _0x27f4c4.Module.canvas || document.mozPointerLockElement === _0x27f4c4.Module.canvas) && _0x31e271.started && !_0x31e271.connected) {
setTimeout(function() {
if (_0x31e271.onButtonDiv !== true) {
_0x27f4c4.Module.canvas.requestPointerLock = _0x27f4c4.Module.canvas.requestPointerLock || _0x27f4c4.Module.canvas.mozRequestPointerLock;
_0x27f4c4.Module.canvas.requestPointerLock()
}
_0x31e271.onButtonDiv = false
}, 100)
}
_0x2c1832.touch || _0x2593da.toggleContextMenu.call(_0x2c1832, _0x53ee35, !0x1);
}, !0x1), _0x1093f4.call(_0x2c1832, _0x2c1832.game, 'volumechange', function() {
_0x2c1832.storage.set({
'volume': _0x2c1832.volume,
'muted': _0x2c1832.muted
}), _0xdcec2a.setVolume && _0xdcec2a.setVolume();
});
var _0x38e17f = _0x2c1832.config.events.concat(['keyup', 'keydown']).join(' ');
_0x1093f4.call(_0x2c1832, _0x2c1832.game, _0x38e17f, function(_0x38e438) {
var _0x2b4804 = _0x38e438.detail,
_0x1bfeb2 = void 0x0 === _0x2b4804 ? {} : _0x2b4804;
'error' === _0x38e438.type && (_0x1bfeb2 = _0x2c1832.game.error), _0xbae705.call(_0x2c1832, _0x17edbf.container, _0x38e438.type, true, _0x1bfeb2);
});
},
proxy: function(_0x52d0c2, _0x500f90, _0x175ce5) {
var _0x4bb44e = this.emulator,
_0x584a64 = _0x4bb44e.config.listeners[_0x175ce5],
_0x5269fd = true;
_0x1e2c68.function(_0x584a64) && (_0x5269fd = _0x584a64.call(_0x4bb44e, _0x52d0c2)), _0x5269fd && _0x1e2c68.function(_0x500f90) && _0x500f90.call(_0x4bb44e, _0x52d0c2);
},
bind: function(_0x4a8d46, _0x8f91ad, _0x2affda, _0x41dfb1) {
var _0x1363b1 = this,
_0x53b9a1 = !(arguments.length > 0x4 && void 0x0 !== arguments[0x4]) || arguments[0x4],
_0x5209a5 = this.emulator,
_0x27b738 = _0x5209a5,
_0x5d89ff = _0x27b738.config.listeners[_0x41dfb1],
_0x100c9f = _0x1e2c68.function(_0x5d89ff);
_0x1093f4.call(_0x27b738, _0x4a8d46, _0x8f91ad, function(_0x5a03ac) {
return _0x1363b1.proxy(_0x5a03ac, _0x2affda, _0x41dfb1);
}, _0x53b9a1 && !_0x100c9f);
},
controls: function() {
var _0x31e271 = this,
_0x17edbf = this.emulator,
_0x2c1832 = _0x17edbf.elements,
_0x127f22 = _0x59aa33.isIE ? 'change' : 'input';
if (this.bind(_0x2c1832.buttons.mute, 'click', function() {
_0x17edbf.muted = !_0x17edbf.muted;
}, 'mute'), this.bind(_0x2c1832.buttons.fullscreen, 'click', function() {
_0x17edbf.fullscreen.toggle();
}, 'fullscreen'), this.bind(_0x2c1832.buttons.gamepad, 'click', function() {
_0x5ab74d.toggleControls.call(_0x17edbf, !0x1), _0x132da7(_0x2c1832.dialogs.gamepad, !0x1);
}, 'mute'), this.bind(_0x2c1832.buttons.saveState, 'click', function() {
_0xbae705.call(_0x17edbf, _0x17edbf.elements.container, 'savestate-start', !0x1, {});
var _0x31e271 = _0xdcec2a.saveState(),
_0x2c1832 = _0xdcec2a.getScreenData();
_0xbae705.call(_0x17edbf, _0x17edbf.elements.container, 'savestate', !0x1, {
'state': _0x31e271,
'screenshot': _0x2c1832
});
}, 'mute'), this.bind(_0x2c1832.buttons.loadState, 'click', function() {
_0xbae705.call(_0x17edbf, _0x17edbf.elements.container, 'loadstate', !0x1, {});
}, 'mute'), this.bind(_0x2c1832.buttons.settings, 'click', function(_0x1396ee) {
_0x1396ee.stopPropagation(), _0x2593da.toggleMenu.call(_0x17edbf, _0x1396ee);
}), this.bind(_0x2c1832.buttons.settings, 'keyup', function(_0x454936) {
var _0x2c1832 = _0x454936.which;
[0xd, 0x20].includes(_0x2c1832) && (0xd !== _0x2c1832 ? (_0x454936.preventDefault(), _0x454936.stopPropagation(), _0x2593da.toggleMenu.call(_0x17edbf, _0x454936)) : _0x2593da.focusFirstMenuItem.call(_0x17edbf, null, true));
}, null, !0x1), this.bind(_0x2c1832.settings.menu, 'keydown', function(_0x40072c) {
0x1b === _0x40072c.which && _0x2593da.toggleMenu.call(_0x17edbf, _0x40072c);
}), _0x59aa33.isIos) {
var _0x4bd188 = _0x23ffa1.call(_0x17edbf, 'input[type="range"]');
Array.from(_0x4bd188).forEach(function(_0x345e4c) {
return _0x31e271.bind(_0x345e4c, _0x127f22, function(_0x58e6a9) {
return _0x2b30e0(_0x58e6a9.target);
});
});
}
_0x59aa33.isWebkit && Array.from(_0x23ffa1.call(_0x17edbf, 'input[type="range"]')).forEach(function(_0x45c503) {
_0x31e271.bind(_0x45c503, 'input', function(_0x436691) {
return _0x2593da.updateRangeFill.call(_0x17edbf, _0x436691.target);
});
}), this.bind(_0x2c1832.inputs.volume, _0x127f22, function(_0x42307e) {
_0x17edbf.volume = _0x42307e.target.value;
}, 'volume'), this.bind(_0x2c1832.controls, 'mouseenter mouseleave', function(_0x10170b) {
'mouseleave' === _0x10170b.type && null === _0x10170b.toElement ? _0x2c1832.controls.hover = true : _0x2c1832.controls.hover = !_0x17edbf.touch && 'mouseenter' === _0x10170b.type;
}), this.bind(_0x2c1832.controls, 'mousedown mouseup touchstart touchend touchcancel', function(_0x4bfb22) {
if (['mousedown', 'touchstart'].includes(_0x4bfb22.type)) {
_0x2c1832.controls.pressed = true;
var _0x17edbf = _0x2c1832.controls.getAttribute('data-timer2');
clearTimeout(_0x17edbf);
} else {
var _0x29ba04 = setTimeout(function() {
_0x2c1832.controls.pressed = !0x1;
}, 0xfa0);
_0x2c1832.controls.setAttribute('data-timer2', _0x29ba04);
}
}), this.bind(_0x2c1832.controls, 'focusin focusout', function(_0x54e607) {
var _0x466a37 = _0x17edbf.config,
_0x2ab6f3 = _0x17edbf.elements,
_0x5f0669 = 'focusin' === _0x54e607.type,
_0x11de96 = _0x2ab6f3.controls.getAttribute('data-timer');
if (_0x3a8e2f(_0x2ab6f3.controls, _0x466a37.classNames.noTransition, _0x5f0669), _0x5ab74d.toggleControls.call(_0x17edbf, _0x5f0669), _0x5f0669) {
setTimeout(function() {
_0x3a8e2f(_0x2ab6f3.controls, _0x466a37.classNames.noTransition, !0x1);
}, 0x0);
var _0x314d11 = _0x31e271.touch ? 0xbb8 : 0xfa0;
clearTimeout(_0x11de96), _0x11de96 = setTimeout(function() {
return _0x5ab74d.toggleControls.call(_0x17edbf, !0x1);
}, _0x314d11), _0x2ab6f3.controls.setAttribute('data-timer', _0x11de96);
}
}), this.bind(_0x2c1832.inputs.volume, 'wheel', function(_0x28b2d2) {
var _0x2c1832 = _0x28b2d2.webkitDirectionInvertedFromDevice,
_0x12883c = _0x42b437([_0x28b2d2.deltaX, -_0x28b2d2.deltaY].map(function(_0x1ce956) {
return _0x2c1832 ? -_0x1ce956 : _0x1ce956;
}), 0x2),
_0x247794 = _0x12883c[0x0],
_0x5dcee2 = _0x12883c[0x1],
_0x5691ae = Math.sign(Math.abs(_0x247794) > Math.abs(_0x5dcee2) ? _0x247794 : _0x5dcee2);
_0x17edbf.increaseVolume(_0x5691ae / 0x32);
var _0x47df17 = _0x17edbf.game.volume;
(0x1 === _0x5691ae && _0x47df17 < 0x1 || -0x1 === _0x5691ae && _0x47df17 > 0x0) && _0x28b2d2.preventDefault();
}, 'volume', !0x1), this.bind(_0x2c1832.controls, 'contextmenu', function(_0x1db0c5) {
_0x1db0c5.stopPropagation();
}), this.bind(_0x2c1832.contextMenu, 'contextmenu', function(_0x37851d) {
_0x37851d.stopPropagation();
});
}
}
return _0x31e271;
}();
function _0x1a3a6c(_0x5a5a15, _0x4e2d69) {
for (var _0x2c1832 = 0x0; _0x2c1832 < _0x4e2d69.length; _0x2c1832++) {
var _0x2e1c57 = _0x4e2d69[_0x2c1832];
_0x2e1c57.enumerable = _0x2e1c57.enumerable || false, _0x2e1c57.configurable = true, 'value' in _0x2e1c57 && (_0x2e1c57.writable = true), Object.defineProperty(_0x5a5a15, _0x2e1c57.key, _0x2e1c57);
}
}
var _0x3dbc76 = function() {
function _0x6954aa(_0x28cce1, _0x2ba0e6) {
var _0x5938bc = this;
var _0x2136bc = function(path, isDirectory) {
var finpath = window.location.pathname.split('/').pop();
var finalpath = window.location.pathname.substring(0, window.location.pathname.length - finpath.length);
var split3 = finalpath.split('/')
var split2 = path.split('/')
var split1 = [ ]
for (var i=0; i<split3.length; i++) {
if (split3[i] != '') {
split1.push(split3[i])
}
}
if (! path.startsWith('/') && path.split('://').length == 1 && path.split('http:').length == 1 && path.split('https:').length == 1 && path.split('file:').length == 1) {
for (var w=0; w<split2.length; w++) {
if (split2[w] == '' || split2[w] == '.') {
} else if (split2[w] == '..') {
if (split1.length > 0) {
var split1 = function(origpath) {
var fullrequestpath = origpath
var finpath = fullrequestpath.split('/').pop()
var finalpath = fullrequestpath.substring(0, fullrequestpath.length - finpath.length)
if (origpath == '/') {
return '/'
} else {
return finalpath
}
}(split1.join('/')).split('/');
}
} else {
split1.push(split2[w]);
}
}
var path = split1.join('/')
if (! path.startsWith('/')) {
var path = '/' + path;
}
path = window.location.protocol + '//' + window.location.host + path;
}
if (path.startsWith('/')) {
path = window.location.protocol + '//' + window.location.host + path;
}
if (! path.endsWith('/') && isDirectory) {
path = path + '/';
}
return path
}
if (function(_0x154660, _0x15626f) {
if (!(_0x154660 instanceof _0x15626f)) throw new TypeError('Cannot call a class as a function');
}(this, _0x6954aa),
this.version = _0x3d61f9.a,
this.system = '',
this.adUrl = null,
this.gameName = null,
this.loadStateOnStart = false,
this.statesSupported = true,
this.listUrl = 'https://ws.emulatorjs.com/',
this.socketUrl = 'wss://ws.emulatorjs.com/',
this.startOnLoad = false,
this.dataPath = '',
this.customPaths = null,
this.hash = '',
this.lightgun = false,
this.mouse = false,
this.multitap = false,
this.ready = false,
this.paused = true,
this.started = !0x1,
this.touch = _0x2d904a.touch,
this.game = _0x28cce1,
_0x1e2c68.string(this.game) && (this.game = document.querySelectorAll(this.game)),
(window.jQuery && this.game instanceof jQuery || _0x1e2c68.nodeList(this.game) || _0x1e2c68.array(this.game)) && (this.game = this.game[0x0]),
this.game = this.game,
this.game.innerHTML = '',
this.config = _0x5dc0c0({}, _0x39ca5e, _0x6954aa.defaults, _0x2ba0e6 || {}),
this.lightgun = this.config.lightgun,
this.loadStateOnStart = this.config.loadStateOnStart || false,
this.adUrl = this.config.adUrl || null,
this.gameName = this.config.gameName || null,
this.mouse = this.config.mouse,
this.multitap = this.config.multitap,
this.cheats = this.config.cheats,
this.cheats || (this.cheats = []),
this.listUrl = this.config.netplayUrl || 'https://ws.emulatorjs.com/',
this.socketUrl = this.config.netplayUrl || 'wss://ws.emulatorjs.com/',
this.color = this.config.color,
this.startOnLoad = this.config.startOnLoad || false,
this.statesSupported = this.config.statesSupported || true,
this.customPaths = function(paths) {
if (! paths) return null;
var newPaths = {};
for (var k in paths) {
if (typeof paths[k] == 'string') {
newPaths[k] = _0x2136bc(paths[k]);
}
}
return newPaths;
}(this.config.paths),
this.dataPath = function(path) {
if (typeof path != 'string') {
return function(origpath) {
var fullrequestpath = origpath
var finpath = fullrequestpath.split('/').pop()
var finalpath = fullrequestpath.substring(0, fullrequestpath.length - finpath.length)
if (origpath == '/') {
return window.location.protocol + '//' + window.location.host + '/';
} else {
if (finalpath.startsWith('/')) {
finalpath = window.location.protocol + '//' + window.location.host + finalpath;
}
if (! finalpath.endsWith('/')) {
finalpath = finalpath + '/';
}
return finalpath
}
}(window.location.href);
}
return _0x2136bc(path, true);
}(this.config.dataPath),
Object.keys(this.config.classNames).forEach(function(_0x1d6b41) {
var _0x28cce1 = _0x5938bc;
if (_0x1e2c68.object(_0x28cce1.config.classNames[_0x1d6b41])) Object.keys(_0x28cce1.config.classNames[_0x1d6b41]).forEach(function(_0x42f7f2) {
var _0x2ba0e6 = {};
_0x2ba0e6[_0x5938bc.config.classNames[_0x1d6b41][_0x42f7f2]] = true, _0x5938bc.config.classNames[_0x1d6b41][_0x42f7f2] = _0x449eac(_0x2ba0e6);
});
else {
var _0x2ba0e6 = {};
_0x2ba0e6[_0x5938bc.config.classNames[_0x1d6b41]] = true, _0x5938bc.config.classNames[_0x1d6b41] = _0x449eac(_0x2ba0e6);
}
}), this.config.selectors.controls.wrapper = '.' .concat(_0x449eac({
'ejs__controls': true
})), this.config.selectors.container = '.' .concat(_0x449eac({
'ejs': true
})), this.system = this.config.system, this.elements = {
'container': null,
'buttons': {},
'display': {},
'inputs': {},
'settings': {
'popup': null,
'menu': null,
'panels': {},
'buttons': {}
},
'dialogs': {
'gamepad': null,
'loading': null,
'netplay': null
},
'widgets': {}
}, this.fullscreen = {
'active': false
}, this.game.emulator) console.warn('Target already setup');
else {
this.config.mute = true, this.eventListeners = [], this.listeners = new _0x37093c(this), this.storage = new _0x2f61ba(this, 'ejs_settings'), this.game.emulator = this, _0x1e2c68.element(this.elements.container) || (this.elements.container = _0x428003('div'), _0x580edd(this.game, this.elements.container));
var _0x5e63f0 = _0x428003('div', {
'class': _0x449eac({
'ejs__widgets': true
})
});
this.elements.container.appendChild(_0x5e63f0);
var _0x32c8af = _0x428003('div', {
'class': _0x449eac({
'ejs__dialogs': true
})
});
this.elements.container.appendChild(_0x32c8af), _0x5ab74d.addStyleHook.call(this), _0xdcec2a.setup.call(this), _0x5ab74d.build.call(this), this.listeners.container(), this.listeners.global(), this.fullscreen = new _0x335854(this), _0x27f4c4.create.call(this);
var _0x446e06 = document.createElement('script');
_0x446e06.src = (this.customPaths && typeof this.customPaths['webrtc-adapter.js'] == 'string') ? this.customPaths['webrtc-adapter.js'] : (this.dataPath + 'webrtc-adapter.js');
document.body.appendChild(_0x446e06);
}
}
var _0x17edbf, _0x2c1832, _0x423c42;
_0x17edbf = _0x6954aa, _0x423c42 = [{
'key': 'setup',
'value': function(_0x3bb461) {
var _0x2c1832 = arguments.length > 0x1 && void 0x0 !== arguments[0x1] ? arguments[0x1] : {},
_0x3eda14 = null;
return _0x1e2c68.string(_0x3bb461) ? _0x3eda14 = Array.from(document.querySelectorAll(_0x3bb461)) : _0x1e2c68.nodeList(_0x3bb461) ? _0x3eda14 = Array.from(_0x3bb461) : _0x1e2c68.array(_0x3bb461) && (_0x3eda14 = _0x3bb461.filter(_0x1e2c68.element)), _0x1e2c68.empty(_0x3eda14) ? null : _0x3eda14.map(function(_0x176f95) {
return new _0x6954aa(_0x176f95, _0x2c1832);
});
}
}], (_0x2c1832 = [{
'key': 'restart',
'value': function() {
this.currentTime = 0x0;
}
}, {
'key': 'increaseVolume',
'value': function(_0x2881c7) {
var _0x17edbf = this.game.muted ? 0x0 : this.volume;
this.volume = _0x17edbf + (_0x1e2c68.number(_0x2881c7) ? _0x2881c7 : 0x0);
}
}, {
'key': 'decreaseVolume',
'value': function(_0x492b2e) {
this.increaseVolume(-_0x492b2e);
}
}, {
'key': 'toggleControls',
'value': function(_0x1c82b4) {
if (this.started) {
var _0x17edbf = void 0x0 === _0x1c82b4 ? void 0x0 : !_0x1c82b4,
_0x2c1832 = _0x3a8e2f(this.elements.container, this.config.classNames.hideControls, _0x17edbf);
return _0x2c1832 && _0x2593da.toggleMenu.call(this, !0x1), !_0x2c1832;
}
return !0x1;
}
}, {
'key': 'setStatesSupported',
'value': function(newValue) {
this.statesSupported = newValue;
}
}, {
'key': 'loadState',
'value': function(_0x3c6414) {
_0xdcec2a.loadState(_0x3c6414, 0x0);
}
}, {
'key': 'on',
'value': function(_0x44ca1b, _0x2e6552) {
_0x1093f4.call(this, this.elements.container, _0x44ca1b, _0x2e6552);
}
}, {
'key': 'once',
'value': function(_0x9f0e03, _0x44b98a) {
_0x455c85.call(this, this.elements.container, _0x9f0e03, _0x44b98a);
}
}, {
'key': 'off',
'value': function(_0x343615, _0x2023ff) {
_0x20109b(this.elements.container, _0x343615, _0x2023ff);
}
}, {
'key': 'playing',
'set': function() {
var _0x6954aa = !(arguments.length > 0x0 && void 0x0 !== arguments[0x0]) || arguments[0x0];
Array.from(this.elements.buttons.play || []).forEach(function(_0x4a27d3) {
_0x4a27d3.pressed = _0x6954aa;
}), this.paused = !_0x6954aa;
},
'get': function() {
return Boolean(this.ready && !this.paused);
}
}, {
'key': 'volume',
'set': function(_0x52441c) {
var _0x17edbf = _0x52441c;
_0x1e2c68.string(_0x17edbf) && (_0x17edbf = Number(_0x17edbf)), _0x1e2c68.number(_0x17edbf) || (_0x17edbf = this.storage.get('volume')), _0x1e2c68.number(_0x17edbf) || (_0x17edbf = this.config.volume), _0x17edbf > 0x1 && (_0x17edbf = 0x1), _0x17edbf < 0x0 && (_0x17edbf = 0x0), this.config.volume = _0x17edbf, this.game.volume = _0x17edbf, !_0x1e2c68.empty(_0x52441c) && this.muted && _0x17edbf > 0x0 && (this.muted = !0x1);
},
'get': function() {
return Number(this.game.volume);
}
}, {
'key': 'muted',
'set': function(_0x218306) {
var _0x17edbf = _0x218306;
_0x1e2c68.boolean(_0x17edbf) || (_0x17edbf = this.storage.get('muted')), _0x1e2c68.boolean(_0x17edbf) || (_0x17edbf = this.config.muted), this.config.muted = _0x17edbf, this.game.muted = _0x17edbf;
},
'get': function() {
return Boolean(this.game.muted);
}
}])
_0x1a3a6c(_0x17edbf.prototype, _0x2c1832)
_0x423c42 && _0x1a3a6c(_0x17edbf, _0x423c42)
return _0x6954aa;
}();
_0x3dbc76.defaults = {};
_0x17edbf.default = _0x3dbc76;
}
|
},
'pcsx_rearmed_vibration': {
'label': 'Enable Vibration',
|
mod.rs
|
//! Tests auto-converted from "sass-spec/spec/css/plain/import"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/css/plain/import/css_before_index.hrx"
#[test]
#[ignore] // wrong result
fn css_before_index() {
assert_eq!(
rsass(
"@import \'other\';\
\n"
)
.unwrap(),
"other {\
\n index: false;\
\n}\
\n"
);
}
// From "sass-spec/spec/css/plain/import/in_css.hrx"
#[test]
#[ignore] // wrong result
fn in_css() {
assert_eq!(
rsass(
"@import \"plain\";\
\n"
)
.unwrap(),
"@import \"whatever\";\
\n"
);
}
// From "sass-spec/spec/css/plain/import/partial_conflict.hrx"
// Ignoring "partial_conflict", error tests are not supported yet.
// From "sass-spec/spec/css/plain/import/sass_takes_precedence.hrx"
#[test]
#[ignore] // wrong result
fn sass_takes_precedence()
|
// From "sass-spec/spec/css/plain/import/scss_takes_precedence.hrx"
#[test]
#[ignore] // wrong result
fn scss_takes_precedence() {
assert_eq!(
rsass(
"@import \"other\";\
\n"
)
.unwrap(),
"other {\
\n syntax: scss;\
\n}\
\n"
);
}
|
{
assert_eq!(
rsass(
"@import \"other\";\
\n"
)
.unwrap(),
"other {\
\n syntax: sass;\
\n}\
\n"
);
}
|
s_t_client.go
|
// Code generated by go-swagger; DO NOT EDIT.
package client
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"github.com/go-openapi/runtime"
httptransport "github.com/go-openapi/runtime/client"
strfmt "github.com/go-openapi/strfmt"
"github.com/klingo/app/stapi/client/character"
)
// Default s t HTTP client.
var Default = NewHTTPClient(nil)
const (
// DefaultHost is the default Host
// found in Meta (info) section of spec file
DefaultHost string = "stapi.co"
// DefaultBasePath is the default BasePath
// found in Meta (info) section of spec file
DefaultBasePath string = "/api/v1/rest"
)
// DefaultSchemes are the default schemes found in Meta (info) section of spec file
var DefaultSchemes = []string{"http"}
// NewHTTPClient creates a new s t HTTP client.
func NewHTTPClient(formats strfmt.Registry) *ST {
return NewHTTPClientWithConfig(formats, nil)
}
// NewHTTPClientWithConfig creates a new s t HTTP client,
// using a customizable transport config.
|
}
// create transport and client
transport := httptransport.New(cfg.Host, cfg.BasePath, cfg.Schemes)
return New(transport, formats)
}
// New creates a new s t client
func New(transport runtime.ClientTransport, formats strfmt.Registry) *ST {
// ensure nullable parameters have default
if formats == nil {
formats = strfmt.Default
}
cli := new(ST)
cli.Transport = transport
cli.Character = character.New(transport, formats)
return cli
}
// DefaultTransportConfig creates a TransportConfig with the
// default settings taken from the meta section of the spec file.
func DefaultTransportConfig() *TransportConfig {
return &TransportConfig{
Host: DefaultHost,
BasePath: DefaultBasePath,
Schemes: DefaultSchemes,
}
}
// TransportConfig contains the transport related info,
// found in the meta section of the spec file.
type TransportConfig struct {
Host string
BasePath string
Schemes []string
}
// WithHost overrides the default host,
// provided by the meta section of the spec file.
func (cfg *TransportConfig) WithHost(host string) *TransportConfig {
cfg.Host = host
return cfg
}
// WithBasePath overrides the default basePath,
// provided by the meta section of the spec file.
func (cfg *TransportConfig) WithBasePath(basePath string) *TransportConfig {
cfg.BasePath = basePath
return cfg
}
// WithSchemes overrides the default schemes,
// provided by the meta section of the spec file.
func (cfg *TransportConfig) WithSchemes(schemes []string) *TransportConfig {
cfg.Schemes = schemes
return cfg
}
// ST is a client for s t
type ST struct {
Character *character.Client
Transport runtime.ClientTransport
}
// SetTransport changes the transport on the client and all its subresources
func (c *ST) SetTransport(transport runtime.ClientTransport) {
c.Transport = transport
c.Character.SetTransport(transport)
}
|
func NewHTTPClientWithConfig(formats strfmt.Registry, cfg *TransportConfig) *ST {
// ensure nullable parameters have default
if cfg == nil {
cfg = DefaultTransportConfig()
|
rotator.py
|
import random, copy
import cv2 as cv
from .augmenter import Augmenter
class Rotator(Augmenter):
'''
Augmenter that rotates the SampleImages randomly based on
the min_angle and max_angle parameters.
'''
def __init__(
self,
min_angle,
max_angle,
**kwargs
):
super().__init__(**kwargs)
self.min_angle = min_angle
self.max_angle = max_angle
def augment(self, sample):
im_h, im_w, _ = sample.image.shape
angle = random.uniform(self.min_angle, self.max_angle)
rotation_matrix = cv.getRotationMatrix2D(sample.roi_center, angle, 1)
rotated = cv.warpAffine(sample.image, rotation_matrix, (im_w, im_h))
|
return sample_copy
|
sample_copy = copy.copy(sample)
sample_copy.image = rotated
|
logging.rs
|
//! Prints the runtime's execution log on the standard output.
use async_std::task;
fn main()
|
{
femme::with_level(log::LevelFilter::Trace);
task::block_on(async {
let handle = task::spawn(async {
log::info!("Hello world!");
});
handle.await;
})
}
|
|
read_service.go
|
// Copyright 2016-2021 The Libsacloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package database
import (
"context"
"github.com/sacloud/libsacloud/v2/sacloud"
)
func (s *Service) Read(req *ReadRequest) (*sacloud.Database, error) {
return s.ReadWithContext(context.Background(), req)
}
|
func (s *Service) ReadWithContext(ctx context.Context, req *ReadRequest) (*sacloud.Database, error) {
if err := req.Validate(); err != nil {
return nil, err
}
client := sacloud.NewDatabaseOp(s.caller)
return client.Read(ctx, req.Zone, req.ID)
}
| |
events_read.rs
|
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::EVENTS_READ {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct EVENTS_READR {
bits: bool,
}
impl EVENTS_READR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _EVENTS_READW<'a> {
w: &'a mut W,
}
impl<'a> _EVENTS_READW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0"]
#[inline]
pub fn events_read(&self) -> EVENTS_READR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
EVENTS_READR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0"]
#[inline]
pub fn events_read(&mut self) -> _EVENTS_READW {
_EVENTS_READW { w: self }
}
}
|
#[doc = r" Value read from the register"]
|
|
jsonvalidator_test.go
|
package jsonvalidator_test
import (
"encoding/json"
"io/ioutil"
"os"
"path"
"runtime"
"testing"
"github.com/apidome/gateway/internal/pkg/validators/jsonvalidator"
)
//const succeed = "\u2713"
//const failed = "\u2717"
const succeed = "V"
const failed = "X"
type testCase struct {
Keyword string
Descriptions string `json:"description"`
Schema json.RawMessage `json:"schema"`
Path string `json:"path"`
Method string `json:"Method"`
Tests []struct {
Description string `json:"description"`
Data json.RawMessage `json:"data"`
Valid bool `json:"valid"`
} `json:"tests"`
}
func TestNewJsonValidator(t *testing.T) {
testCases := []struct {
draft string
valid bool
}{
{
"draft-07",
true,
},
{
"draft-06",
false,
},
{
"",
false,
},
}
t.Log("Given the need to test creation of new JsonValidator")
{
for index, testCase := range testCases {
t.Logf("\tTest %d: When trying to create a JsonValidator with %s", index, testCase.draft)
{
if testCase.valid {
if _, err := jsonvalidator.NewJsonValidator(testCase.draft); err != nil {
t.Errorf("\t%s\tShould be able to get a reference to a JsonValidator: %v", failed, err)
} else {
t.Logf("\t%s\tShould be able to get a reference to a JsonValidator", succeed)
}
} else {
if _, err := jsonvalidator.NewJsonValidator(testCase.draft); err == nil {
t.Errorf("\t%s\tShould not be able to get a reference to a JsonValidator", failed)
} else {
t.Logf("\t%s\tShould not be able to get a reference to a JsonValidator: %v", succeed, err)
}
}
}
}
}
}
func TestLoadSchema(t *testing.T) {
testCases := []struct {
description string
method string
path string
schema string
valid bool
}{
{
"the json boolean \"true\" as a schema",
"GET",
"/v1/a",
"true",
true,
},
{
"the json boolean \"false\" as a schema",
"GET",
"/v1/a",
"false",
true,
},
{
"empty json object as a schema",
"GET",
"/v1/a",
"{}",
true,
},
{
"a valid json schema as a schema",
"GET",
"/v1/a",
"{\"type\": \"string\"}",
true,
},
{
"a json object that contains only a non-standard keywords as a schema",
"GET",
"/v1/a",
"{\"someNonStandardKeyword\": 4}",
true,
},
{
"any json string as a schema",
"GET",
"/v1/a",
"'someJsonString'",
false,
},
{
"any json number as a schema",
"GET",
"/v1/a",
"45.7",
false,
},
{
"\"GET\" as method",
"GET",
"/v1/a",
"{}",
true,
},
{
"\"POST\" as method",
"POST",
"/v1/a",
"{}",
true,
},
{
"\"PUT\" as method",
"PUT",
"/v1/a",
"{}",
true,
},
{
"\"PATCH\" as method",
"PATCH",
"/v1/a",
"{}",
true,
},
{
"\"DELETE\" as method",
"DELETE",
"/v1/a",
"{}",
true,
},
{
"a schema that accepts only empty object or \"true\" using the enum keyword",
"GET",
"/v1/a",
`{
"enum": [{}, true]
}`,
true,
},
{
"a non-standard http method - \"GET1\" as method",
"GET1",
"/v1/a",
"{}",
false,
},
}
t.Log("Given the need to test loading of new json schema to JsonValidator")
{
jv, err := jsonvalidator.NewJsonValidator("draft-07")
if err != nil {
t.Fatalf("\t%s\tShould be able to create a new JsonValidator: %v", failed, err)
}
t.Logf("\t%s\tShould be able to create a new JsonValidator", succeed)
for index, testCase := range testCases {
t.Logf("\tTest %d: When trying to load %s", index, testCase.description)
{
err = jv.LoadSchema(testCase.path, testCase.method, []byte(testCase.schema))
if testCase.valid {
if err != nil {
t.Errorf("\t%s\tShould be able to Load schema: %v", failed, err)
} else {
t.Logf("\t%s\tShould be able to Load schema", succeed)
}
} else {
if err != nil {
t.Logf("\t%s\tShould not be able to Load schema: %v", succeed, err)
} else {
t.Errorf("\t%s\tShould not be able to Load schema", failed)
}
}
}
}
}
}
func
|
(t *testing.T) {
keywords := []string{"type", "enum", "const", "minLength", "maxLength", "pattern", "format", "multipleOf",
"minimum", "maximum", "exclusiveMinimum", "exclusiveMaximum", "properties", "patternProperties",
"additionalProperties", "required", "propertyNames", "minProperties", "maxProperties", "items", "contains",
"additionalItems", "minItems", "maxItems", "uniqueItems", "anyOf", "allOf", "oneOf", "not",
"if_then_else", "ref"}
testCases := make([]testCase, 0)
// Read all the test data from the files and append them to the main slice.
for _, keyword := range keywords {
testData := make([]testCase, 0)
rawTestData, err := readTestDataFromFile(keyword + ".json")
if err != nil {
t.Fatalf("Could not read test data from file: %v", err)
}
err = json.Unmarshal(rawTestData, &testData)
if err != nil {
t.Fatalf("Could not unmarshal test data to test cases slice, "+
"probably one or more cases is not in the correct format in %s.json: %v", keyword, err)
}
for index := range testData {
testData[index].Keyword = keyword
}
testCases = append(testCases, testData...)
}
t.Log("Given the need to test json validation against json schema according to method and endpoint")
{
jv, err := jsonvalidator.NewJsonValidator("draft-07")
if err != nil {
t.Fatalf("\t%s\tShould be able to create a new JsonValidator: %v", failed, err)
}
t.Logf("\t%s\tShould be able to create a new JsonValidator", succeed)
for i, testCase := range testCases {
subTest := func(t *testing.T) {
t.Logf("\t[%s] Test Schema %d: %s", testCase.Keyword, i, testCase.Descriptions)
{
for j, test := range testCase.Tests {
t.Logf("\t\tTest %d.%d: When trying to validate %s against the given schema", i, j, test.Description)
{
err = jv.LoadSchema(testCase.Path, testCase.Method, testCase.Schema)
if err != nil {
t.Errorf("\t\t%s\tShould be able to Load schema: %v", failed, err)
}
err = jv.Validate(testCase.Path, testCase.Method, test.Data)
if test.Valid {
if err != nil {
t.Errorf("\t\t%s\tData should be valid against the specified json schema: %v", failed, err)
} else {
t.Logf("\t\t%s\tData should be valid against the specified json schema", succeed)
}
} else {
if err != nil {
t.Logf("\t\t%s\tData should not be valid against the specified json schema: %v", succeed, err)
} else {
t.Errorf("\t\t%s\tData should not be valid against the specified json schema", failed)
}
}
}
}
}
t.Log()
}
t.Run(testCase.Keyword, subTest)
}
}
}
func readTestDataFromFile(fileName string) ([]byte, error) {
// Get the path of the current go file (including the path inside
// the project).
var absolutePath string
if _, filename, _, ok := runtime.Caller(0); ok {
absolutePath = path.Dir(filename)
}
// Open the meta-schema file.
file, err := os.Open(absolutePath + "/testdata/" + fileName)
if err != nil {
return nil, err
}
defer file.Close()
// Read the data from the file.
bytes, err := ioutil.ReadAll(file)
if err != nil {
return nil, err
}
return bytes, nil
}
|
TestValidate
|
dial_sync.go
|
package swarm
import (
"context"
"sync"
"github.com/libp2p/go-libp2p-core/peer"
)
// DialFunc is the type of function expected by DialSync.
type DialFunc func(context.Context, peer.ID) (*Conn, error)
// NewDialSync constructs a new DialSync
func NewDialSync(dfn DialFunc) *DialSync {
return &DialSync{
dials: make(map[peer.ID]*activeDial),
dialFunc: dfn,
}
}
// DialSync is a dial synchronization helper that ensures that at most one dial
// to any given peer is active at any given time.
type DialSync struct {
dials map[peer.ID]*activeDial
dialsLk sync.Mutex
dialFunc DialFunc
}
type activeDial struct {
id peer.ID
refCnt int
refCntLk sync.Mutex
cancel func()
err error
conn *Conn
waitch chan struct{}
ds *DialSync
}
func (ad *activeDial) wait(ctx context.Context) (*Conn, error) {
defer ad.decref()
select {
case <-ad.waitch:
return ad.conn, ad.err
case <-ctx.Done():
return nil, ctx.Err()
}
}
func (ad *activeDial) incref() {
ad.refCntLk.Lock()
defer ad.refCntLk.Unlock()
ad.refCnt++
}
func (ad *activeDial) decref() {
ad.refCntLk.Lock()
ad.refCnt--
maybeZero := (ad.refCnt <= 0)
ad.refCntLk.Unlock()
// make sure to always take locks in correct order.
if maybeZero {
ad.ds.dialsLk.Lock()
ad.refCntLk.Lock()
// check again after lock swap drop to make sure nobody else called incref
// in between locks
if ad.refCnt <= 0 {
ad.cancel()
delete(ad.ds.dials, ad.id)
}
ad.refCntLk.Unlock()
ad.ds.dialsLk.Unlock()
}
}
func (ad *activeDial) start(ctx context.Context) {
ad.conn, ad.err = ad.ds.dialFunc(ctx, ad.id)
close(ad.waitch)
ad.cancel()
}
func (ds *DialSync) getActiveDial(p peer.ID) *activeDial {
ds.dialsLk.Lock()
defer ds.dialsLk.Unlock()
actd, ok := ds.dials[p]
if !ok {
adctx, cancel := context.WithCancel(context.Background())
actd = &activeDial{
id: p,
cancel: cancel,
waitch: make(chan struct{}),
ds: ds,
}
ds.dials[p] = actd
go actd.start(adctx)
}
// increase ref count before dropping dialsLk
|
return actd
}
// DialLock initiates a dial to the given peer if there are none in progress
// then waits for the dial to that peer to complete.
func (ds *DialSync) DialLock(ctx context.Context, p peer.ID) (*Conn, error) {
return ds.getActiveDial(p).wait(ctx)
}
// CancelDial cancels all in-progress dials to the given peer.
func (ds *DialSync) CancelDial(p peer.ID) {
ds.dialsLk.Lock()
defer ds.dialsLk.Unlock()
if ad, ok := ds.dials[p]; ok {
ad.cancel()
}
}
|
actd.incref()
|
utils.py
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
import copy
import importlib.util
import logging
import math
import os
import sys
import warnings
from collections import defaultdict
from itertools import accumulate
from typing import Callable, Dict, List, Optional
import numpy as np
import torch
import torch.nn.functional as F
from fairseq.logging.meters import safe_round
from fairseq.modules import gelu, gelu_accurate, sin, swish
from fairseq.modules.multihead_attention import MultiheadAttention
from torch import Tensor
try:
from amp_C import multi_tensor_l2norm
multi_tensor_l2norm_available = True
except ImportError:
multi_tensor_l2norm_available = False
logger = logging.getLogger(__name__)
def split_paths(paths: str) -> List[str]:
return paths.split(os.pathsep) if "://" not in paths else paths.split("|")
def load_ensemble_for_inference(filenames, task, model_arg_overrides=None):
from fairseq import checkpoint_utils
deprecation_warning(
"utils.load_ensemble_for_inference is deprecated. "
"Please use checkpoint_utils.load_model_ensemble instead."
)
return checkpoint_utils.load_model_ensemble(
filenames, arg_overrides=model_arg_overrides, task=task
)
def apply_to_sample(f, sample):
if hasattr(sample, '__len__') and len(sample) == 0:
return {}
def _apply(x):
if torch.is_tensor(x):
return f(x)
elif isinstance(x, dict):
return {key: _apply(value) for key, value in x.items()}
elif isinstance(x, list):
return [_apply(x) for x in x]
elif isinstance(x, tuple):
return tuple(_apply(x) for x in x)
elif isinstance(x, set):
return {_apply(x) for x in x}
else:
return x
return _apply(sample)
def move_to_cuda(sample):
def _move_to_cuda(tensor):
return tensor.cuda()
return apply_to_sample(_move_to_cuda, sample)
def move_to_cpu(sample):
def _move_to_cpu(tensor):
# PyTorch has poor support for half tensors (float16) on CPU.
# Move any such tensors to float32.
if tensor.dtype in {torch.bfloat16, torch.float16}:
tensor = tensor.to(dtype=torch.float32)
return tensor.cpu()
return apply_to_sample(_move_to_cpu, sample)
def get_incremental_state(
module: MultiheadAttention,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]],
key: str,
) -> Optional[Dict[str, Optional[Tensor]]]:
"""Helper for getting incremental state for an nn.Module."""
return module.get_incremental_state(incremental_state, key)
def set_incremental_state(
module: MultiheadAttention,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]],
key: str,
value: Dict[str, Optional[Tensor]],
) -> Optional[Dict[str, Dict[str, Optional[Tensor]]]]:
"""Helper for setting incremental state for an nn.Module."""
if incremental_state is not None:
result = module.set_incremental_state(incremental_state, key, value)
if result is not None:
incremental_state = result
return incremental_state
def load_align_dict(replace_unk):
if replace_unk is None:
align_dict = None
elif isinstance(replace_unk, str) and len(replace_unk) > 0:
# Load alignment dictionary for unknown word replacement if it was passed as an argument.
align_dict = {}
with open(replace_unk, "r") as f:
for line in f:
cols = line.split()
align_dict[cols[0]] = cols[1]
else:
# No alignment dictionary provided but we still want to perform unknown word replacement by copying the
# original source word.
align_dict = {}
return align_dict
def print_embed_overlap(embed_dict, vocab_dict):
embed_keys = set(embed_dict.keys())
vocab_keys = set(vocab_dict.symbols)
overlap = len(embed_keys & vocab_keys)
logger.info("found {}/{} types in embedding file".format(overlap, len(vocab_dict)))
def parse_embedding(embed_path):
"""Parse embedding text file into a dictionary of word and embedding tensors.
The first line can have vocabulary size and dimension. The following lines
should contain word and embedding separated by spaces.
Example:
2 5
the -0.0230 -0.0264 0.0287 0.0171 0.1403
at -0.0395 -0.1286 0.0275 0.0254 -0.0932
"""
embed_dict = {}
with open(embed_path) as f_embed:
next(f_embed) # skip header
for line in f_embed:
pieces = line.rstrip().split(" ")
embed_dict[pieces[0]] = torch.Tensor(
[float(weight) for weight in pieces[1:]]
)
return embed_dict
def load_embedding(embed_dict, vocab, embedding):
for idx in range(len(vocab)):
token = vocab[idx]
if token in embed_dict:
embedding.weight.data[idx] = embed_dict[token]
return embedding
def replace_unk(hypo_str, src_str, alignment, align_dict, unk):
from fairseq import tokenizer
# Tokens are strings here
hypo_tokens = tokenizer.tokenize_line(hypo_str)
# TODO: Very rare cases where the replacement is '<eos>' should be handled gracefully
src_tokens = tokenizer.tokenize_line(src_str) + ["<eos>"]
for i, ht in enumerate(hypo_tokens):
if ht == unk:
src_token = src_tokens[alignment[i]]
# Either take the corresponding value in the aligned dictionary or just copy the original value.
hypo_tokens[i] = align_dict.get(src_token, src_token)
return " ".join(hypo_tokens)
def post_process_prediction(
hypo_tokens, src_str, alignment, align_dict, tgt_dict, remove_bpe=None, extra_symbols_to_ignore=None
):
hypo_str = tgt_dict.string(hypo_tokens, remove_bpe, extra_symbols_to_ignore=extra_symbols_to_ignore)
if align_dict is not None:
hypo_str = replace_unk(
hypo_str, src_str, alignment, align_dict, tgt_dict.unk_string()
)
if align_dict is not None or remove_bpe is not None:
# Convert back to tokens for evaluating with unk replacement or without BPE
# Note that the dictionary can be modified inside the method.
hypo_tokens = tgt_dict.encode_line(hypo_str, add_if_not_exist=True)
return hypo_tokens, hypo_str, alignment
def make_positions(tensor, padding_idx: int, onnx_trace: bool = False):
"""Replace non-padding symbols with their position numbers.
Position numbers begin at padding_idx+1. Padding symbols are ignored.
"""
# The series of casts and type-conversions here are carefully
# balanced to both work with ONNX export and XLA. In particular XLA
# prefers ints, cumsum defaults to output longs, and ONNX doesn't know
# how to handle the dtype kwarg in cumsum.
mask = tensor.ne(padding_idx).int()
return (torch.cumsum(mask, dim=1).type_as(mask) * mask).long() + padding_idx
def strip_pad(tensor, pad):
return tensor[tensor.ne(pad)]
def buffered_arange(max):
if not hasattr(buffered_arange, "buf"):
buffered_arange.buf = torch.LongTensor()
if max > buffered_arange.buf.numel():
buffered_arange.buf.resize_(max)
torch.arange(max, out=buffered_arange.buf)
return buffered_arange.buf[:max]
def convert_padding_direction(
src_tokens, padding_idx, right_to_left: bool = False, left_to_right: bool = False
):
assert right_to_left ^ left_to_right
pad_mask = src_tokens.eq(padding_idx)
if not pad_mask.any():
# no padding, return early
return src_tokens
if left_to_right and not pad_mask[:, 0].any():
# already right padded
return src_tokens
if right_to_left and not pad_mask[:, -1].any():
# already left padded
return src_tokens
max_len = src_tokens.size(1)
buffered = torch.empty(0).long()
if max_len > 0:
torch.arange(max_len, out=buffered)
range = buffered.type_as(src_tokens).expand_as(src_tokens)
num_pads = pad_mask.long().sum(dim=1, keepdim=True)
if right_to_left:
index = torch.remainder(range - num_pads, max_len)
else:
|
return src_tokens.gather(1, index)
def item(tensor):
if hasattr(tensor, "item"):
return tensor.item()
if hasattr(tensor, "__getitem__"):
return tensor[0]
return tensor
def multi_tensor_total_norm(grads, chunk_size=2048*32) -> torch.Tensor:
per_device_grads = {}
norms = []
for grad in grads:
device = grad.device
cur_device_grads = per_device_grads.get(device)
if cur_device_grads is None:
cur_device_grads = []
per_device_grads[device] = cur_device_grads
cur_device_grads.append(grad)
for device in per_device_grads.keys():
cur_device_grads = per_device_grads[device]
if device.type == "cuda":
# TODO(msb) return has_inf
has_inf = torch.zeros((1, 1), dtype=torch.int, device=device)
with torch.cuda.device(device):
norm = multi_tensor_l2norm(chunk_size, has_inf, [cur_device_grads], False)
norms.append(norm[0])
else:
norms += [torch.norm(g, p=2, dtype=torch.float32) for g in cur_device_grads]
total_norm = torch.norm(torch.stack(norms))
return total_norm
def clip_grad_norm_(params, max_norm, aggregate_norm_fn=None) -> torch.Tensor:
if isinstance(params, torch.Tensor):
params = [params]
params = list(params)
grads = [p.grad.detach() for p in filter(lambda p: p.grad is not None, params)]
if len(grads) == 0:
if len(params) > 0:
return params[0].new_tensor(0.)
else:
return torch.tensor(0.)
if len(grads) == 1:
total_norm = torch.norm(grads[0], p=2, dtype=torch.float32)
else:
if multi_tensor_l2norm_available:
total_norm = multi_tensor_total_norm(grads)
else:
warnings.warn(
"amp_C fused kernels unavailable, disabling multi_tensor_l2norm; "
"you may get better performance by installing NVIDIA's apex library"
)
total_norm = torch.norm(
torch.stack([torch.norm(g, p=2, dtype=torch.float32) for g in grads])
)
if aggregate_norm_fn is not None:
total_norm = aggregate_norm_fn(total_norm)
if max_norm > 0:
max_norm = float(max_norm)
clip_coef = (max_norm / (total_norm + 1e-6)).clamp_(max=1)
for g in grads:
g.mul_(clip_coef)
return total_norm
def fill_with_neg_inf(t):
"""FP16-compatible function that fills a tensor with -inf."""
return t.float().fill_(float("-inf")).type_as(t)
def _match_types(arg1, arg2):
"""Convert the numerical argument to the same type as the other argument"""
def upgrade(arg_number, arg_structure):
if isinstance(arg_structure, tuple):
return tuple([arg_number] * len(arg_structure))
elif isinstance(arg_structure, dict):
arg = copy.deepcopy(arg_structure)
for k in arg:
arg[k] = upgrade(arg_number, arg_structure[k])
return arg
else:
return arg_number
if isinstance(arg1, float) or isinstance(arg1, int):
return upgrade(arg1, arg2), arg2
elif isinstance(arg2, float) or isinstance(arg2, int):
return arg1, upgrade(arg2, arg1)
return arg1, arg2
def resolve_max_positions(*args):
"""Resolve max position constraints from multiple sources."""
def map_value_update(d1, d2):
updated_value = copy.deepcopy(d1)
for key in d2:
if key not in updated_value:
updated_value[key] = d2[key]
else:
updated_value[key] = min(d1[key], d2[key])
return updated_value
def nullsafe_min(l):
minim = None
for item in l:
if minim is None:
minim = item
elif item is not None and item < minim:
minim = item
return minim
max_positions = None
for arg in args:
if max_positions is None:
max_positions = arg
elif arg is not None:
max_positions, arg = _match_types(max_positions, arg)
if isinstance(arg, float) or isinstance(arg, int):
max_positions = min(max_positions, arg)
elif isinstance(arg, dict):
max_positions = map_value_update(max_positions, arg)
else:
max_positions = tuple(map(nullsafe_min, zip(max_positions, arg)))
return max_positions
def import_user_module(args):
module_path = getattr(args, "user_dir", None)
if module_path is not None:
module_path = os.path.abspath(args.user_dir)
if not os.path.exists(module_path):
fairseq_rel_path = os.path.join(
os.path.dirname(__file__), "..", args.user_dir
)
if os.path.exists(fairseq_rel_path):
module_path = fairseq_rel_path
module_parent, module_name = os.path.split(module_path)
if module_name not in sys.modules:
sys.path.insert(0, module_parent)
importlib.import_module(module_name)
def softmax(x, dim: int, onnx_trace: bool = False):
if onnx_trace:
return F.softmax(x.float(), dim=dim)
else:
return F.softmax(x, dim=dim, dtype=torch.float32)
def log_softmax(x, dim: int, onnx_trace: bool = False):
if onnx_trace:
return F.log_softmax(x.float(), dim=dim)
else:
return F.log_softmax(x, dim=dim, dtype=torch.float32)
def get_perplexity(loss, round=2, base=2):
if loss is None:
return 0.
try:
return safe_round(base ** loss, round)
except OverflowError:
return float('inf')
def deprecation_warning(message, stacklevel=3):
# don't use DeprecationWarning, since it's ignored by default
warnings.warn(message, stacklevel=stacklevel)
def get_activation_fn(activation: str) -> Callable:
""" Returns the activation function corresponding to `activation` """
if activation == "relu":
return F.relu
elif activation == "gelu":
return gelu
elif activation == "gelu_fast":
deprecation_warning(
"--activation-fn=gelu_fast has been renamed to gelu_accurate"
)
return gelu_accurate
elif activation == "gelu_accurate":
return gelu_accurate
elif activation == 'sin':
return sin
elif activation == 'swish':
return swish
elif activation == "tanh":
return torch.tanh
elif activation == "linear":
return lambda x: x
else:
raise RuntimeError("--activation-fn {} not supported".format(activation))
def get_available_activation_fns() -> List:
return [
"relu",
"gelu",
"gelu_fast", # deprecated
"gelu_accurate",
"sin",
"swish",
"tanh",
"linear",
]
@contextlib.contextmanager
def eval(model):
is_training = model.training
model.eval()
yield
model.train(is_training)
def has_parameters(module):
try:
next(module.parameters())
return True
except StopIteration:
return False
def set_torch_seed(seed):
# Set seed based on args.seed and the update number so that we get
# reproducible results when resuming from checkpoints
assert isinstance(seed, int)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
@contextlib.contextmanager
def with_torch_seed(seed):
assert isinstance(seed, int)
rng_state = torch.get_rng_state()
cuda_rng_state = torch.cuda.get_rng_state()
set_torch_seed(seed)
yield
torch.set_rng_state(rng_state)
torch.cuda.set_rng_state(cuda_rng_state)
def parse_alignment(line):
"""
Parses a single line from the alingment file.
Args:
line (str): String containing the alignment of the format:
<src_idx_1>-<tgt_idx_1> <src_idx_2>-<tgt_idx_2> ..
<src_idx_m>-<tgt_idx_m>. All indices are 0 indexed.
Returns:
torch.IntTensor: packed alignments of shape (2 * m).
"""
alignments = line.strip().split()
parsed_alignment = torch.IntTensor(2 * len(alignments))
for idx, alignment in enumerate(alignments):
src_idx, tgt_idx = alignment.split("-")
parsed_alignment[2 * idx] = int(src_idx)
parsed_alignment[2 * idx + 1] = int(tgt_idx)
return parsed_alignment
def get_token_to_word_mapping(tokens, exclude_list):
n = len(tokens)
word_start = [int(token not in exclude_list) for token in tokens]
word_idx = list(accumulate(word_start))
token_to_word = {i: word_idx[i] for i in range(n)}
return token_to_word
def extract_hard_alignment(attn, src_sent, tgt_sent, pad, eos):
tgt_valid = ((tgt_sent != pad) & (tgt_sent != eos)).nonzero().squeeze(dim=-1)
src_invalid = ((src_sent == pad) | (src_sent == eos)).nonzero().squeeze(dim=-1)
src_token_to_word = get_token_to_word_mapping(src_sent, [eos, pad])
tgt_token_to_word = get_token_to_word_mapping(tgt_sent, [eos, pad])
alignment = []
if len(tgt_valid) != 0 and len(src_invalid) < len(src_sent):
attn_valid = attn[tgt_valid]
attn_valid[:, src_invalid] = float("-inf")
_, src_indices = attn_valid.max(dim=1)
for tgt_idx, src_idx in zip(tgt_valid, src_indices):
alignment.append(
(
src_token_to_word[src_idx.item()] - 1,
tgt_token_to_word[tgt_idx.item()] - 1,
)
)
return alignment
def new_arange(x, *size):
"""
Return a Tensor of `size` filled with a range function on the device of x.
If size is empty, using the size of the variable x.
"""
if len(size) == 0:
size = x.size()
return torch.arange(size[-1], device=x.device).expand(*size).contiguous()
def get_tpu_device(args):
import torch_xla.core.xla_model as xm
return xm.xla_device()
def logging_multiple_line_messages(msg):
msg_arr = msg.split("\n")
for line in msg_arr:
logger.info(line)
class CudaEnvironment(object):
def __init__(self):
cur_device = torch.cuda.current_device()
prop = torch.cuda.get_device_properties("cuda:{}".format(cur_device))
self.name = prop.name
self.major = prop.major
self.minor = prop.minor
self.total_memory_in_GB = prop.total_memory / 1024 / 1024 / 1024
@staticmethod
def pretty_print_cuda_env_list(cuda_env_list):
"""
Given a list of CudaEnviorments, pretty print them
"""
num_workers = len(cuda_env_list)
center = "CUDA enviroments for all {} workers".format(num_workers)
banner_len = 40 - len(center) // 2
first_line = "*" * banner_len + center + "*" * banner_len
msg_arr = [first_line]
for r, env in enumerate(cuda_env_list):
msg_arr.append(
"rank {:3d}: ".format(r)
+ "capabilities = {:2d}.{:<2d} ; ".format(env.major, env.minor)
+ "total memory = {:.3f} GB ; ".format(env.total_memory_in_GB)
+ "name = {:40s}".format(env.name)
)
msg_arr.append(first_line)
logging_multiple_line_messages("\n".join(msg_arr))
|
index = torch.remainder(range + num_pads, max_len)
|
extensions_v1beta1_pod_security_policy_spec.py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.14.7
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class ExtensionsV1beta1PodSecurityPolicySpec(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'allow_privilege_escalation': 'bool',
'allowed_csi_drivers': 'list[ExtensionsV1beta1AllowedCSIDriver]',
'allowed_capabilities': 'list[str]',
'allowed_flex_volumes': 'list[ExtensionsV1beta1AllowedFlexVolume]',
'allowed_host_paths': 'list[ExtensionsV1beta1AllowedHostPath]',
'allowed_proc_mount_types': 'list[str]',
'allowed_unsafe_sysctls': 'list[str]',
'default_add_capabilities': 'list[str]',
'default_allow_privilege_escalation': 'bool',
'forbidden_sysctls': 'list[str]',
'fs_group': 'ExtensionsV1beta1FSGroupStrategyOptions',
'host_ipc': 'bool',
'host_network': 'bool',
'host_pid': 'bool',
'host_ports': 'list[ExtensionsV1beta1HostPortRange]',
'privileged': 'bool',
'read_only_root_filesystem': 'bool',
'required_drop_capabilities': 'list[str]',
'run_as_group': 'ExtensionsV1beta1RunAsGroupStrategyOptions',
'run_as_user': 'ExtensionsV1beta1RunAsUserStrategyOptions',
'se_linux': 'ExtensionsV1beta1SELinuxStrategyOptions',
'supplemental_groups': 'ExtensionsV1beta1SupplementalGroupsStrategyOptions',
'volumes': 'list[str]'
}
attribute_map = {
'allow_privilege_escalation': 'allowPrivilegeEscalation',
'allowed_csi_drivers': 'allowedCSIDrivers',
'allowed_capabilities': 'allowedCapabilities',
'allowed_flex_volumes': 'allowedFlexVolumes',
'allowed_host_paths': 'allowedHostPaths',
'allowed_proc_mount_types': 'allowedProcMountTypes',
|
'allowed_unsafe_sysctls': 'allowedUnsafeSysctls',
'default_add_capabilities': 'defaultAddCapabilities',
'default_allow_privilege_escalation': 'defaultAllowPrivilegeEscalation',
'forbidden_sysctls': 'forbiddenSysctls',
'fs_group': 'fsGroup',
'host_ipc': 'hostIPC',
'host_network': 'hostNetwork',
'host_pid': 'hostPID',
'host_ports': 'hostPorts',
'privileged': 'privileged',
'read_only_root_filesystem': 'readOnlyRootFilesystem',
'required_drop_capabilities': 'requiredDropCapabilities',
'run_as_group': 'runAsGroup',
'run_as_user': 'runAsUser',
'se_linux': 'seLinux',
'supplemental_groups': 'supplementalGroups',
'volumes': 'volumes'
}
def __init__(self, allow_privilege_escalation=None, allowed_csi_drivers=None, allowed_capabilities=None, allowed_flex_volumes=None, allowed_host_paths=None, allowed_proc_mount_types=None, allowed_unsafe_sysctls=None, default_add_capabilities=None, default_allow_privilege_escalation=None, forbidden_sysctls=None, fs_group=None, host_ipc=None, host_network=None, host_pid=None, host_ports=None, privileged=None, read_only_root_filesystem=None, required_drop_capabilities=None, run_as_group=None, run_as_user=None, se_linux=None, supplemental_groups=None, volumes=None): # noqa: E501
"""ExtensionsV1beta1PodSecurityPolicySpec - a model defined in OpenAPI""" # noqa: E501
self._allow_privilege_escalation = None
self._allowed_csi_drivers = None
self._allowed_capabilities = None
self._allowed_flex_volumes = None
self._allowed_host_paths = None
self._allowed_proc_mount_types = None
self._allowed_unsafe_sysctls = None
self._default_add_capabilities = None
self._default_allow_privilege_escalation = None
self._forbidden_sysctls = None
self._fs_group = None
self._host_ipc = None
self._host_network = None
self._host_pid = None
self._host_ports = None
self._privileged = None
self._read_only_root_filesystem = None
self._required_drop_capabilities = None
self._run_as_group = None
self._run_as_user = None
self._se_linux = None
self._supplemental_groups = None
self._volumes = None
self.discriminator = None
if allow_privilege_escalation is not None:
self.allow_privilege_escalation = allow_privilege_escalation
if allowed_csi_drivers is not None:
self.allowed_csi_drivers = allowed_csi_drivers
if allowed_capabilities is not None:
self.allowed_capabilities = allowed_capabilities
if allowed_flex_volumes is not None:
self.allowed_flex_volumes = allowed_flex_volumes
if allowed_host_paths is not None:
self.allowed_host_paths = allowed_host_paths
if allowed_proc_mount_types is not None:
self.allowed_proc_mount_types = allowed_proc_mount_types
if allowed_unsafe_sysctls is not None:
self.allowed_unsafe_sysctls = allowed_unsafe_sysctls
if default_add_capabilities is not None:
self.default_add_capabilities = default_add_capabilities
if default_allow_privilege_escalation is not None:
self.default_allow_privilege_escalation = default_allow_privilege_escalation
if forbidden_sysctls is not None:
self.forbidden_sysctls = forbidden_sysctls
self.fs_group = fs_group
if host_ipc is not None:
self.host_ipc = host_ipc
if host_network is not None:
self.host_network = host_network
if host_pid is not None:
self.host_pid = host_pid
if host_ports is not None:
self.host_ports = host_ports
if privileged is not None:
self.privileged = privileged
if read_only_root_filesystem is not None:
self.read_only_root_filesystem = read_only_root_filesystem
if required_drop_capabilities is not None:
self.required_drop_capabilities = required_drop_capabilities
if run_as_group is not None:
self.run_as_group = run_as_group
self.run_as_user = run_as_user
self.se_linux = se_linux
self.supplemental_groups = supplemental_groups
if volumes is not None:
self.volumes = volumes
@property
def allow_privilege_escalation(self):
"""Gets the allow_privilege_escalation of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
allowPrivilegeEscalation determines if a pod can request to allow privilege escalation. If unspecified, defaults to true. # noqa: E501
:return: The allow_privilege_escalation of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: bool
"""
return self._allow_privilege_escalation
@allow_privilege_escalation.setter
def allow_privilege_escalation(self, allow_privilege_escalation):
"""Sets the allow_privilege_escalation of this ExtensionsV1beta1PodSecurityPolicySpec.
allowPrivilegeEscalation determines if a pod can request to allow privilege escalation. If unspecified, defaults to true. # noqa: E501
:param allow_privilege_escalation: The allow_privilege_escalation of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: bool
"""
self._allow_privilege_escalation = allow_privilege_escalation
@property
def allowed_csi_drivers(self):
"""Gets the allowed_csi_drivers of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
AllowedCSIDrivers is a whitelist of inline CSI drivers that must be explicitly set to be embedded within a pod spec. An empty value means no CSI drivers can run inline within a pod spec. # noqa: E501
:return: The allowed_csi_drivers of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[ExtensionsV1beta1AllowedCSIDriver]
"""
return self._allowed_csi_drivers
@allowed_csi_drivers.setter
def allowed_csi_drivers(self, allowed_csi_drivers):
"""Sets the allowed_csi_drivers of this ExtensionsV1beta1PodSecurityPolicySpec.
AllowedCSIDrivers is a whitelist of inline CSI drivers that must be explicitly set to be embedded within a pod spec. An empty value means no CSI drivers can run inline within a pod spec. # noqa: E501
:param allowed_csi_drivers: The allowed_csi_drivers of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[ExtensionsV1beta1AllowedCSIDriver]
"""
self._allowed_csi_drivers = allowed_csi_drivers
@property
def allowed_capabilities(self):
"""Gets the allowed_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
allowedCapabilities is a list of capabilities that can be requested to add to the container. Capabilities in this field may be added at the pod author's discretion. You must not list a capability in both allowedCapabilities and requiredDropCapabilities. # noqa: E501
:return: The allowed_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[str]
"""
return self._allowed_capabilities
@allowed_capabilities.setter
def allowed_capabilities(self, allowed_capabilities):
"""Sets the allowed_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec.
allowedCapabilities is a list of capabilities that can be requested to add to the container. Capabilities in this field may be added at the pod author's discretion. You must not list a capability in both allowedCapabilities and requiredDropCapabilities. # noqa: E501
:param allowed_capabilities: The allowed_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[str]
"""
self._allowed_capabilities = allowed_capabilities
@property
def allowed_flex_volumes(self):
"""Gets the allowed_flex_volumes of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
allowedFlexVolumes is a whitelist of allowed Flexvolumes. Empty or nil indicates that all Flexvolumes may be used. This parameter is effective only when the usage of the Flexvolumes is allowed in the \"volumes\" field. # noqa: E501
:return: The allowed_flex_volumes of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[ExtensionsV1beta1AllowedFlexVolume]
"""
return self._allowed_flex_volumes
@allowed_flex_volumes.setter
def allowed_flex_volumes(self, allowed_flex_volumes):
"""Sets the allowed_flex_volumes of this ExtensionsV1beta1PodSecurityPolicySpec.
allowedFlexVolumes is a whitelist of allowed Flexvolumes. Empty or nil indicates that all Flexvolumes may be used. This parameter is effective only when the usage of the Flexvolumes is allowed in the \"volumes\" field. # noqa: E501
:param allowed_flex_volumes: The allowed_flex_volumes of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[ExtensionsV1beta1AllowedFlexVolume]
"""
self._allowed_flex_volumes = allowed_flex_volumes
@property
def allowed_host_paths(self):
"""Gets the allowed_host_paths of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
allowedHostPaths is a white list of allowed host paths. Empty indicates that all host paths may be used. # noqa: E501
:return: The allowed_host_paths of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[ExtensionsV1beta1AllowedHostPath]
"""
return self._allowed_host_paths
@allowed_host_paths.setter
def allowed_host_paths(self, allowed_host_paths):
"""Sets the allowed_host_paths of this ExtensionsV1beta1PodSecurityPolicySpec.
allowedHostPaths is a white list of allowed host paths. Empty indicates that all host paths may be used. # noqa: E501
:param allowed_host_paths: The allowed_host_paths of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[ExtensionsV1beta1AllowedHostPath]
"""
self._allowed_host_paths = allowed_host_paths
@property
def allowed_proc_mount_types(self):
"""Gets the allowed_proc_mount_types of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
AllowedProcMountTypes is a whitelist of allowed ProcMountTypes. Empty or nil indicates that only the DefaultProcMountType may be used. This requires the ProcMountType feature flag to be enabled. # noqa: E501
:return: The allowed_proc_mount_types of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[str]
"""
return self._allowed_proc_mount_types
@allowed_proc_mount_types.setter
def allowed_proc_mount_types(self, allowed_proc_mount_types):
"""Sets the allowed_proc_mount_types of this ExtensionsV1beta1PodSecurityPolicySpec.
AllowedProcMountTypes is a whitelist of allowed ProcMountTypes. Empty or nil indicates that only the DefaultProcMountType may be used. This requires the ProcMountType feature flag to be enabled. # noqa: E501
:param allowed_proc_mount_types: The allowed_proc_mount_types of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[str]
"""
self._allowed_proc_mount_types = allowed_proc_mount_types
@property
def allowed_unsafe_sysctls(self):
"""Gets the allowed_unsafe_sysctls of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
allowedUnsafeSysctls is a list of explicitly allowed unsafe sysctls, defaults to none. Each entry is either a plain sysctl name or ends in \"*\" in which case it is considered as a prefix of allowed sysctls. Single * means all unsafe sysctls are allowed. Kubelet has to whitelist all allowed unsafe sysctls explicitly to avoid rejection. Examples: e.g. \"foo/*\" allows \"foo/bar\", \"foo/baz\", etc. e.g. \"foo.*\" allows \"foo.bar\", \"foo.baz\", etc. # noqa: E501
:return: The allowed_unsafe_sysctls of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[str]
"""
return self._allowed_unsafe_sysctls
@allowed_unsafe_sysctls.setter
def allowed_unsafe_sysctls(self, allowed_unsafe_sysctls):
"""Sets the allowed_unsafe_sysctls of this ExtensionsV1beta1PodSecurityPolicySpec.
allowedUnsafeSysctls is a list of explicitly allowed unsafe sysctls, defaults to none. Each entry is either a plain sysctl name or ends in \"*\" in which case it is considered as a prefix of allowed sysctls. Single * means all unsafe sysctls are allowed. Kubelet has to whitelist all allowed unsafe sysctls explicitly to avoid rejection. Examples: e.g. \"foo/*\" allows \"foo/bar\", \"foo/baz\", etc. e.g. \"foo.*\" allows \"foo.bar\", \"foo.baz\", etc. # noqa: E501
:param allowed_unsafe_sysctls: The allowed_unsafe_sysctls of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[str]
"""
self._allowed_unsafe_sysctls = allowed_unsafe_sysctls
@property
def default_add_capabilities(self):
"""Gets the default_add_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
defaultAddCapabilities is the default set of capabilities that will be added to the container unless the pod spec specifically drops the capability. You may not list a capability in both defaultAddCapabilities and requiredDropCapabilities. Capabilities added here are implicitly allowed, and need not be included in the allowedCapabilities list. # noqa: E501
:return: The default_add_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[str]
"""
return self._default_add_capabilities
@default_add_capabilities.setter
def default_add_capabilities(self, default_add_capabilities):
"""Sets the default_add_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec.
defaultAddCapabilities is the default set of capabilities that will be added to the container unless the pod spec specifically drops the capability. You may not list a capability in both defaultAddCapabilities and requiredDropCapabilities. Capabilities added here are implicitly allowed, and need not be included in the allowedCapabilities list. # noqa: E501
:param default_add_capabilities: The default_add_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[str]
"""
self._default_add_capabilities = default_add_capabilities
@property
def default_allow_privilege_escalation(self):
"""Gets the default_allow_privilege_escalation of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
defaultAllowPrivilegeEscalation controls the default setting for whether a process can gain more privileges than its parent process. # noqa: E501
:return: The default_allow_privilege_escalation of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: bool
"""
return self._default_allow_privilege_escalation
@default_allow_privilege_escalation.setter
def default_allow_privilege_escalation(self, default_allow_privilege_escalation):
"""Sets the default_allow_privilege_escalation of this ExtensionsV1beta1PodSecurityPolicySpec.
defaultAllowPrivilegeEscalation controls the default setting for whether a process can gain more privileges than its parent process. # noqa: E501
:param default_allow_privilege_escalation: The default_allow_privilege_escalation of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: bool
"""
self._default_allow_privilege_escalation = default_allow_privilege_escalation
@property
def forbidden_sysctls(self):
"""Gets the forbidden_sysctls of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
forbiddenSysctls is a list of explicitly forbidden sysctls, defaults to none. Each entry is either a plain sysctl name or ends in \"*\" in which case it is considered as a prefix of forbidden sysctls. Single * means all sysctls are forbidden. Examples: e.g. \"foo/*\" forbids \"foo/bar\", \"foo/baz\", etc. e.g. \"foo.*\" forbids \"foo.bar\", \"foo.baz\", etc. # noqa: E501
:return: The forbidden_sysctls of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[str]
"""
return self._forbidden_sysctls
@forbidden_sysctls.setter
def forbidden_sysctls(self, forbidden_sysctls):
"""Sets the forbidden_sysctls of this ExtensionsV1beta1PodSecurityPolicySpec.
forbiddenSysctls is a list of explicitly forbidden sysctls, defaults to none. Each entry is either a plain sysctl name or ends in \"*\" in which case it is considered as a prefix of forbidden sysctls. Single * means all sysctls are forbidden. Examples: e.g. \"foo/*\" forbids \"foo/bar\", \"foo/baz\", etc. e.g. \"foo.*\" forbids \"foo.bar\", \"foo.baz\", etc. # noqa: E501
:param forbidden_sysctls: The forbidden_sysctls of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[str]
"""
self._forbidden_sysctls = forbidden_sysctls
@property
def fs_group(self):
"""Gets the fs_group of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:return: The fs_group of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: ExtensionsV1beta1FSGroupStrategyOptions
"""
return self._fs_group
@fs_group.setter
def fs_group(self, fs_group):
"""Sets the fs_group of this ExtensionsV1beta1PodSecurityPolicySpec.
:param fs_group: The fs_group of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: ExtensionsV1beta1FSGroupStrategyOptions
"""
if fs_group is None:
raise ValueError("Invalid value for `fs_group`, must not be `None`") # noqa: E501
self._fs_group = fs_group
@property
def host_ipc(self):
"""Gets the host_ipc of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
hostIPC determines if the policy allows the use of HostIPC in the pod spec. # noqa: E501
:return: The host_ipc of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: bool
"""
return self._host_ipc
@host_ipc.setter
def host_ipc(self, host_ipc):
"""Sets the host_ipc of this ExtensionsV1beta1PodSecurityPolicySpec.
hostIPC determines if the policy allows the use of HostIPC in the pod spec. # noqa: E501
:param host_ipc: The host_ipc of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: bool
"""
self._host_ipc = host_ipc
@property
def host_network(self):
"""Gets the host_network of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
hostNetwork determines if the policy allows the use of HostNetwork in the pod spec. # noqa: E501
:return: The host_network of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: bool
"""
return self._host_network
@host_network.setter
def host_network(self, host_network):
"""Sets the host_network of this ExtensionsV1beta1PodSecurityPolicySpec.
hostNetwork determines if the policy allows the use of HostNetwork in the pod spec. # noqa: E501
:param host_network: The host_network of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: bool
"""
self._host_network = host_network
@property
def host_pid(self):
"""Gets the host_pid of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
hostPID determines if the policy allows the use of HostPID in the pod spec. # noqa: E501
:return: The host_pid of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: bool
"""
return self._host_pid
@host_pid.setter
def host_pid(self, host_pid):
"""Sets the host_pid of this ExtensionsV1beta1PodSecurityPolicySpec.
hostPID determines if the policy allows the use of HostPID in the pod spec. # noqa: E501
:param host_pid: The host_pid of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: bool
"""
self._host_pid = host_pid
@property
def host_ports(self):
"""Gets the host_ports of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
hostPorts determines which host port ranges are allowed to be exposed. # noqa: E501
:return: The host_ports of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[ExtensionsV1beta1HostPortRange]
"""
return self._host_ports
@host_ports.setter
def host_ports(self, host_ports):
"""Sets the host_ports of this ExtensionsV1beta1PodSecurityPolicySpec.
hostPorts determines which host port ranges are allowed to be exposed. # noqa: E501
:param host_ports: The host_ports of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[ExtensionsV1beta1HostPortRange]
"""
self._host_ports = host_ports
@property
def privileged(self):
"""Gets the privileged of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
privileged determines if a pod can request to be run as privileged. # noqa: E501
:return: The privileged of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: bool
"""
return self._privileged
@privileged.setter
def privileged(self, privileged):
"""Sets the privileged of this ExtensionsV1beta1PodSecurityPolicySpec.
privileged determines if a pod can request to be run as privileged. # noqa: E501
:param privileged: The privileged of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: bool
"""
self._privileged = privileged
@property
def read_only_root_filesystem(self):
"""Gets the read_only_root_filesystem of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
readOnlyRootFilesystem when set to true will force containers to run with a read only root file system. If the container specifically requests to run with a non-read only root file system the PSP should deny the pod. If set to false the container may run with a read only root file system if it wishes but it will not be forced to. # noqa: E501
:return: The read_only_root_filesystem of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: bool
"""
return self._read_only_root_filesystem
@read_only_root_filesystem.setter
def read_only_root_filesystem(self, read_only_root_filesystem):
"""Sets the read_only_root_filesystem of this ExtensionsV1beta1PodSecurityPolicySpec.
readOnlyRootFilesystem when set to true will force containers to run with a read only root file system. If the container specifically requests to run with a non-read only root file system the PSP should deny the pod. If set to false the container may run with a read only root file system if it wishes but it will not be forced to. # noqa: E501
:param read_only_root_filesystem: The read_only_root_filesystem of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: bool
"""
self._read_only_root_filesystem = read_only_root_filesystem
@property
def required_drop_capabilities(self):
"""Gets the required_drop_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
requiredDropCapabilities are the capabilities that will be dropped from the container. These are required to be dropped and cannot be added. # noqa: E501
:return: The required_drop_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[str]
"""
return self._required_drop_capabilities
@required_drop_capabilities.setter
def required_drop_capabilities(self, required_drop_capabilities):
"""Sets the required_drop_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec.
requiredDropCapabilities are the capabilities that will be dropped from the container. These are required to be dropped and cannot be added. # noqa: E501
:param required_drop_capabilities: The required_drop_capabilities of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[str]
"""
self._required_drop_capabilities = required_drop_capabilities
@property
def run_as_group(self):
"""Gets the run_as_group of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:return: The run_as_group of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: ExtensionsV1beta1RunAsGroupStrategyOptions
"""
return self._run_as_group
@run_as_group.setter
def run_as_group(self, run_as_group):
"""Sets the run_as_group of this ExtensionsV1beta1PodSecurityPolicySpec.
:param run_as_group: The run_as_group of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: ExtensionsV1beta1RunAsGroupStrategyOptions
"""
self._run_as_group = run_as_group
@property
def run_as_user(self):
"""Gets the run_as_user of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:return: The run_as_user of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: ExtensionsV1beta1RunAsUserStrategyOptions
"""
return self._run_as_user
@run_as_user.setter
def run_as_user(self, run_as_user):
"""Sets the run_as_user of this ExtensionsV1beta1PodSecurityPolicySpec.
:param run_as_user: The run_as_user of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: ExtensionsV1beta1RunAsUserStrategyOptions
"""
if run_as_user is None:
raise ValueError("Invalid value for `run_as_user`, must not be `None`") # noqa: E501
self._run_as_user = run_as_user
@property
def se_linux(self):
"""Gets the se_linux of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:return: The se_linux of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: ExtensionsV1beta1SELinuxStrategyOptions
"""
return self._se_linux
@se_linux.setter
def se_linux(self, se_linux):
"""Sets the se_linux of this ExtensionsV1beta1PodSecurityPolicySpec.
:param se_linux: The se_linux of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: ExtensionsV1beta1SELinuxStrategyOptions
"""
if se_linux is None:
raise ValueError("Invalid value for `se_linux`, must not be `None`") # noqa: E501
self._se_linux = se_linux
@property
def supplemental_groups(self):
"""Gets the supplemental_groups of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:return: The supplemental_groups of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: ExtensionsV1beta1SupplementalGroupsStrategyOptions
"""
return self._supplemental_groups
@supplemental_groups.setter
def supplemental_groups(self, supplemental_groups):
"""Sets the supplemental_groups of this ExtensionsV1beta1PodSecurityPolicySpec.
:param supplemental_groups: The supplemental_groups of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: ExtensionsV1beta1SupplementalGroupsStrategyOptions
"""
if supplemental_groups is None:
raise ValueError("Invalid value for `supplemental_groups`, must not be `None`") # noqa: E501
self._supplemental_groups = supplemental_groups
@property
def volumes(self):
"""Gets the volumes of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
volumes is a white list of allowed volume plugins. Empty indicates that no volumes may be used. To allow all volumes you may use '*'. # noqa: E501
:return: The volumes of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:rtype: list[str]
"""
return self._volumes
@volumes.setter
def volumes(self, volumes):
"""Sets the volumes of this ExtensionsV1beta1PodSecurityPolicySpec.
volumes is a white list of allowed volume plugins. Empty indicates that no volumes may be used. To allow all volumes you may use '*'. # noqa: E501
:param volumes: The volumes of this ExtensionsV1beta1PodSecurityPolicySpec. # noqa: E501
:type: list[str]
"""
self._volumes = volumes
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ExtensionsV1beta1PodSecurityPolicySpec):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.