file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
term_hashmap.rs | use murmurhash32;
use self::murmurhash32::murmurhash2;
use super::{Addr, MemoryArena};
use crate::postings::stacker::memory_arena::store;
use crate::postings::UnorderedTermId;
use byteorder::{ByteOrder, NativeEndian};
use std::iter;
use std::mem;
use std::slice;
/// Returns the actual memory size in bytes
/// required to create a table of size $2^num_bits$.
pub fn compute_table_size(num_bits: usize) -> usize {
(1 << num_bits) * mem::size_of::<KeyValue>()
}
/// `KeyValue` is the item stored in the hash table.
/// The key is actually a `BytesRef` object stored in an external heap.
/// The `value_addr` also points to an address in the heap.
///
/// The key and the value are actually stored contiguously.
/// For this reason, the (start, stop) information is actually redundant
/// and can be simplified in the future
#[derive(Copy, Clone)]
struct KeyValue {
key_value_addr: Addr,
hash: u32,
unordered_term_id: UnorderedTermId,
}
impl Default for KeyValue {
fn | () -> Self {
KeyValue {
key_value_addr: Addr::null_pointer(),
hash: 0u32,
unordered_term_id: UnorderedTermId::default(),
}
}
}
impl KeyValue {
fn is_empty(self) -> bool {
self.key_value_addr.is_null()
}
}
/// Customized `HashMap` with string keys
///
/// This `HashMap` takes String as keys. Keys are
/// stored in a user defined heap.
///
/// The quirky API has the benefit of avoiding
/// the computation of the hash of the key twice,
/// or copying the key as long as there is no insert.
///
pub struct TermHashMap {
table: Box<[KeyValue]>,
pub heap: MemoryArena,
mask: usize,
occupied: Vec<usize>,
len: usize,
}
struct QuadraticProbing {
hash: usize,
i: usize,
mask: usize,
}
impl QuadraticProbing {
fn compute(hash: usize, mask: usize) -> QuadraticProbing {
QuadraticProbing { hash, i: 0, mask }
}
#[inline]
fn next_probe(&mut self) -> usize {
self.i += 1;
(self.hash + self.i) & self.mask
}
}
pub struct Iter<'a> {
hashmap: &'a TermHashMap,
inner: slice::Iter<'a, usize>,
}
impl<'a> Iterator for Iter<'a> {
type Item = (&'a [u8], Addr, UnorderedTermId);
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().cloned().map(move |bucket: usize| {
let kv = self.hashmap.table[bucket];
let (key, offset): (&'a [u8], Addr) = self.hashmap.get_key_value(kv.key_value_addr);
(key, offset, kv.unordered_term_id)
})
}
}
impl TermHashMap {
pub fn new(num_bucket_power_of_2: usize) -> TermHashMap {
let heap = MemoryArena::new();
let table_size = 1 << num_bucket_power_of_2;
let table: Vec<KeyValue> = iter::repeat(KeyValue::default()).take(table_size).collect();
TermHashMap {
table: table.into_boxed_slice(),
heap,
mask: table_size - 1,
occupied: Vec::with_capacity(table_size / 2),
len: 0,
}
}
fn probe(&self, hash: u32) -> QuadraticProbing {
QuadraticProbing::compute(hash as usize, self.mask)
}
pub fn mem_usage(&self) -> usize {
self.table.len() * mem::size_of::<KeyValue>()
}
fn is_saturated(&self) -> bool {
self.table.len() < self.occupied.len() * 3
}
#[inline(always)]
fn get_key_value(&self, addr: Addr) -> (&[u8], Addr) {
let data = self.heap.slice_from(addr);
let key_bytes_len = NativeEndian::read_u16(data) as usize;
let key_bytes: &[u8] = &data[2..][..key_bytes_len];
(key_bytes, addr.offset(2u32 + key_bytes_len as u32))
}
#[inline(always)]
fn get_value_addr_if_key_match(&self, target_key: &[u8], addr: Addr) -> Option<Addr> {
let (stored_key, value_addr) = self.get_key_value(addr);
if stored_key == target_key {
Some(value_addr)
} else {
None
}
}
fn set_bucket(&mut self, hash: u32, key_value_addr: Addr, bucket: usize) -> UnorderedTermId {
self.occupied.push(bucket);
let unordered_term_id = self.len as UnorderedTermId;
self.len += 1;
self.table[bucket] = KeyValue {
key_value_addr,
hash,
unordered_term_id,
};
unordered_term_id
}
pub fn iter(&self) -> Iter<'_> {
Iter {
inner: self.occupied.iter(),
hashmap: &self,
}
}
fn resize(&mut self) {
let new_len = self.table.len() * 2;
let mask = new_len - 1;
self.mask = mask;
let new_table = vec![KeyValue::default(); new_len].into_boxed_slice();
let old_table = mem::replace(&mut self.table, new_table);
for old_pos in self.occupied.iter_mut() {
let key_value: KeyValue = old_table[*old_pos];
let mut probe = QuadraticProbing::compute(key_value.hash as usize, mask);
loop {
let bucket = probe.next_probe();
if self.table[bucket].is_empty() {
*old_pos = bucket;
self.table[bucket] = key_value;
break;
}
}
}
}
/// `update` create a new entry for a given key if it does not exists
/// or updates the existing entry.
///
/// The actual logic for this update is define in the the `updater`
/// argument.
///
/// If the key is not present, `updater` will receive `None` and
/// will be in charge of returning a default value.
/// If the key already as an associated value, then it will be passed
/// `Some(previous_value)`.
pub fn mutate_or_create<S, V, TMutator>(
&mut self,
key: S,
mut updater: TMutator,
) -> UnorderedTermId
where
S: AsRef<[u8]>,
V: Copy + 'static,
TMutator: FnMut(Option<V>) -> V,
{
if self.is_saturated() {
self.resize();
}
let key_bytes: &[u8] = key.as_ref();
let hash = murmurhash2(key.as_ref());
let mut probe = self.probe(hash);
loop {
let bucket = probe.next_probe();
let kv: KeyValue = self.table[bucket];
if kv.is_empty() {
// The key does not exists yet.
let val = updater(None);
let num_bytes =
std::mem::size_of::<u16>() + key_bytes.len() + std::mem::size_of::<V>();
let key_addr = self.heap.allocate_space(num_bytes);
{
let data = self.heap.slice_mut(key_addr, num_bytes);
NativeEndian::write_u16(data, key_bytes.len() as u16);
let stop = 2 + key_bytes.len();
data[2..stop].copy_from_slice(key_bytes);
store(&mut data[stop..], val);
}
return self.set_bucket(hash, key_addr, bucket);
} else if kv.hash == hash {
if let Some(val_addr) =
self.get_value_addr_if_key_match(key_bytes, kv.key_value_addr)
{
let v = self.heap.read(val_addr);
let new_v = updater(Some(v));
self.heap.write_at(val_addr, new_v);
return kv.unordered_term_id;
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::TermHashMap;
use std::collections::HashMap;
#[test]
fn test_hash_map() {
let mut hash_map: TermHashMap = TermHashMap::new(18);
{
hash_map.mutate_or_create("abc", |opt_val: Option<u32>| {
assert_eq!(opt_val, None);
3u32
});
}
{
hash_map.mutate_or_create("abcd", |opt_val: Option<u32>| {
assert_eq!(opt_val, None);
4u32
});
}
{
hash_map.mutate_or_create("abc", |opt_val: Option<u32>| {
assert_eq!(opt_val, Some(3u32));
5u32
});
}
let mut vanilla_hash_map = HashMap::new();
let mut iter_values = hash_map.iter();
while let Some((key, addr, _)) = iter_values.next() {
let val: u32 = hash_map.heap.read(addr);
vanilla_hash_map.insert(key.to_owned(), val);
}
assert_eq!(vanilla_hash_map.len(), 2);
}
}
| default |
get-user.decorater.ts | import { createParamDecorator, ExecutionContext } from '@nestjs/common'
import { User } from 'src/users/user.schema'
export const GetUser = createParamDecorator(
(data, context: ExecutionContext): User => {
const req = context.switchToHttp().getRequest()
return req.user
}, | ) | |
utils.py | from pdfminer.utils import PDFDocEncoding
from pdfminer.psparser import PSLiteral
from pdfminer.pdftypes import PDFObjRef
from decimal import Decimal, ROUND_HALF_UP
import numbers
from operator import itemgetter
import itertools
from functools import lru_cache as cache
DEFAULT_X_TOLERANCE = 3
DEFAULT_Y_TOLERANCE = 3
def cluster_list(xs, tolerance=0):
tolerance = decimalize(tolerance)
if tolerance == Decimal(0):
return [[x] for x in sorted(xs)]
if len(xs) < 2:
return [[x] for x in sorted(xs)]
groups = []
xs = list(sorted(xs))
current_group = [xs[0]]
last = xs[0]
for x in xs[1:]:
if x <= (last + tolerance):
current_group.append(x)
else:
groups.append(current_group)
current_group = [x]
last = x
groups.append(current_group)
return groups
def make_cluster_dict(values, tolerance):
tolerance = decimalize(tolerance)
clusters = cluster_list(set(values), tolerance)
nested_tuples = [
[(val, i) for val in value_cluster] for i, value_cluster in enumerate(clusters)
]
cluster_dict = dict(itertools.chain(*nested_tuples))
return cluster_dict
def cluster_objects(objs, attr, tolerance):
if isinstance(attr, (str, int)):
attr_getter = itemgetter(attr)
else:
attr_getter = attr
objs = to_list(objs)
values = map(attr_getter, objs)
cluster_dict = make_cluster_dict(values, tolerance)
get_0, get_1 = itemgetter(0), itemgetter(1)
cluster_tuples = sorted(
((obj, cluster_dict.get(attr_getter(obj))) for obj in objs), key=get_1
)
grouped = itertools.groupby(cluster_tuples, key=get_1)
clusters = [list(map(get_0, v)) for k, v in grouped]
return clusters
def decode_text(s):
"""
Decodes a PDFDocEncoding string to Unicode.
Adds py3 compatibility to pdfminer's version.
"""
if type(s) == bytes and s.startswith(b"\xfe\xff"):
return str(s[2:], "utf-16be", "ignore")
else:
ords = (ord(c) if type(c) == str else c for c in s)
return "".join(PDFDocEncoding[o] for o in ords)
def decode_psl_list(_list):
return [
decode_text(value.name) if isinstance(value, PSLiteral) else value
for value in _list
]
def resolve(x):
if type(x) == PDFObjRef:
return x.resolve()
else:
return x
def get_dict_type(d):
if type(d) is not dict:
return None
t = d.get("Type")
if type(t) is PSLiteral:
return decode_text(t.name)
else:
return t
def resolve_all(x):
"""
Recursively resolves the given object and all the internals.
"""
t = type(x)
if t == PDFObjRef:
resolved = x.resolve()
# Avoid infinite recursion
if get_dict_type(resolved) == "Page":
return x
return resolve_all(resolved)
elif t in (list, tuple):
return t(resolve_all(v) for v in x)
elif t == dict:
if get_dict_type(x) == "Annot":
exceptions = ["Parent"]
else:
exceptions = []
return dict((k, v if k in exceptions else resolve_all(v)) for k, v in x.items())
else:
return x
@cache(maxsize=int(10e4))
def _decimalize(v, q=None):
# Convert int-like
if isinstance(v, numbers.Integral):
return Decimal(int(v))
# Convert float-like
elif isinstance(v, numbers.Real):
if q is not None:
return Decimal(repr(v)).quantize(Decimal(repr(q)), rounding=ROUND_HALF_UP)
else:
return Decimal(repr(v))
else:
raise ValueError(f"Cannot convert {v} to Decimal.")
def decimalize(v, q=None):
# If already a decimal, just return itself
if type(v) == Decimal:
return v
# If tuple/list passed, bulk-convert
if isinstance(v, (tuple, list)):
return type(v)(decimalize(x, q) for x in v)
else:
return _decimalize(v, q)
def is_dataframe(collection):
cls = collection.__class__
name = ".".join([cls.__module__, cls.__name__])
return name == "pandas.core.frame.DataFrame"
def to_list(collection):
if is_dataframe(collection):
return collection.to_dict("records") # pragma: nocover
else:
return list(collection)
def dedupe_chars(chars, tolerance=1):
"""
Removes duplicate chars — those sharing the same text, fontname, size,
and positioning (within `tolerance`) as other characters in the set.
"""
key = itemgetter("fontname", "size", "upright", "text")
pos_key = itemgetter("doctop", "x0")
t = decimalize(tolerance)
def yield_unique_chars(chars):
sorted_chars = sorted(chars, key=key)
for grp, grp_chars in itertools.groupby(sorted_chars, key=key):
for y_cluster in cluster_objects(grp_chars, "doctop", t):
for x_cluster in cluster_objects(y_cluster, "x0", t):
yield sorted(x_cluster, key=pos_key)[0]
deduped = yield_unique_chars(chars)
return sorted(deduped, key=chars.index)
def collate_line(line_chars, tolerance=DEFAULT_X_TOLERANCE):
tolerance = decimalize(tolerance)
coll = ""
last_x1 = None
for char in sorted(line_chars, key=itemgetter("x0")):
if (last_x1 is not None) and (char["x0"] > (last_x1 + tolerance)):
coll += " "
last_x1 = char["x1"]
coll += char["text"]
return coll
def objects_to_rect(objects):
return {
"x0": min(map(itemgetter("x0"), objects)),
"x1": max(map(itemgetter("x1"), objects)),
"top": min(map(itemgetter("top"), objects)),
"bottom": max(map(itemgetter("bottom"), objects)),
}
def objects_to_bbox(objects):
return (
min(map(itemgetter("x0"), objects)),
min(map(itemgetter("top"), objects)),
max(map(itemgetter("x1"), objects)),
max(map(itemgetter("bottom"), objects)),
)
obj_to_bbox = itemgetter("x0", "top", "x1", "bottom")
def bbox_to_rect(bbox):
return {"x0": bbox[0], "top": bbox[1], "x1": bbox[2], "bottom": bbox[3]}
DEFAULT_WORD_EXTRACTION_SETTINGS = dict(
x_tolerance=DEFAULT_X_TOLERANCE,
y_tolerance=DEFAULT_Y_TOLERANCE,
keep_blank_chars=False,
use_text_flow=False,
horizontal_ltr=True, # Should words be read left-to-right?
vertical_ttb=True, # Should vertical words be read top-to-bottom?
extra_attrs=[],
)
class WordExtractor:
def __init__(self, **settings):
for s, val in settings.items():
if s not in DEFAULT_WORD_EXTRACTION_SETTINGS:
raise ValueError(f"{s} is not a valid WordExtractor parameter")
if s in ["x_tolerance", "y_tolerance"]:
val = decimalize(val)
setattr(self, s, val)
def merge_chars(self, ordered_chars):
x0, top, x1, bottom = objects_to_bbox(ordered_chars)
upright = ordered_chars[0]["upright"]
direction = 1 if (self.horizontal_ltr if upright else self.vertical_ttb) else -1
word = {
"text": "".join(map(itemgetter("text"), ordered_chars)),
"x0": x0,
"x1": x1,
"top": top,
"bottom": bottom,
"upright": upright,
"direction": direction,
}
for key in self.extra_attrs:
word[key] = ordered_chars[0][key]
return word
def char_begins_new_word(self, current_chars, next_char):
upright = current_chars[0]["upright"]
intraline_tol = self.x_tolerance if upright else self.y_tolerance
interline_tol = self.y_tolerance if upright else self.x_tolerance
word_x0, word_top, word_x1, word_bottom = objects_to_bbox(current_chars)
return (
(next_char["x0"] > word_x1 + intraline_tol)
or (next_char["x1"] < word_x0 - intraline_tol)
or (next_char["top"] > word_bottom + interline_tol)
or (next_char["bottom"] < word_top - interline_tol)
)
def iter_chars_to_words(self, chars):
current_word = []
for char in chars:
if not self.keep_blank_chars and char["text"].isspace():
if current_word:
yield current_word
current_word = []
elif current_word and self.char_begins_new_word(current_word, char):
yield current_word
current_word = [char]
else:
current_word.append(char)
if current_word:
yield current_word
def iter_sort_chars(self, chars):
def upright_key(x):
return -int(x["upright"])
for upright_cluster in cluster_objects(chars, upright_key, 0):
upright = upright_cluster[0]["upright"]
cluster_key = "doctop" if upright else "x0"
# Cluster by line
subclusters = cluster_objects(
upright_cluster, cluster_key, self.y_tolerance
)
for sc in subclusters:
# Sort within line
sort_key = "x0" if upright else "doctop"
sc = sorted(sc, key=itemgetter(sort_key))
# Reverse order if necessary
if not (self.horizontal_ltr if upright else self.vertical_ttb):
sc = reversed(sc)
yield from sc
def iter_extract(self, chars):
if not self.use_text_flow:
chars = self.iter_sort_chars(chars)
grouping_key = itemgetter("upright", *self.extra_attrs)
grouped = itertools.groupby(chars, grouping_key)
for keyvals, char_group in grouped:
for word_chars in self.iter_chars_to_words(char_group):
yield self.merge_chars(word_chars)
def extract(self, chars):
return list(self.iter_extract(chars))
def extract_words(chars, **kwargs):
settings = dict(DEFAULT_WORD_EXTRACTION_SETTINGS)
settings.update(kwargs)
return WordExtractor(**settings).extract(chars)
def extract_text(
chars, x_tolerance=DEFAULT_X_TOLERANCE, y_tolerance=DEFAULT_Y_TOLERANCE
):
if len(chars) == 0:
return None
chars = to_list(chars)
doctop_clusters = cluster_objects(chars, "doctop", y_tolerance)
lines = (collate_line(line_chars, x_tolerance) for line_chars in doctop_clusters)
coll = "\n".join(lines)
return coll
collate_chars = extract_text
def filter_objects(objs, fn):
if isinstance(objs, dict):
return dict((k, filter_objects(v, fn)) for k, v in objs.items())
initial_type = type(objs)
objs = to_list(objs)
filtered = filter(fn, objs)
return initial_type(filtered)
def get_bbox_overlap(a, b):
a_left, a_top, a_right, a_bottom = decimalize(a)
b_left, b_top, b_right, b_bottom = decimalize(b)
o_left = max(a_left, b_left)
o_right = min(a_right, b_right)
o_bottom = min(a_bottom, b_bottom)
o_top = max(a_top, b_top)
o_width = o_right - o_left
o_height = o_bottom - o_top
if o_height >= 0 and o_width >= 0 and o_height + o_width > 0:
return (o_left, o_top, o_right, o_bottom)
else:
return None
def calculate_area(bbox):
left, top, right, bottom = bbox
if left > right or top > bottom:
raise ValueError(f"{bbox} has a negative width or height.")
return (right - left) * (bottom - top)
def clip_obj(obj, bbox):
bbox = decimalize(bbox)
overlap = get_bbox_overlap(obj_to_bbox(obj), bbox)
if overlap is None:
return None
dims = bbox_to_rect(overlap)
copy = dict(obj)
for attr in ["x0", "top", "x1", "bottom"]:
copy[attr] = dims[attr]
if dims["top"] != obj["bottom"] or dims["top"] != obj["bottom"]:
diff = dims["top"] - obj["top"]
copy["doctop"] = obj["doctop"] + diff
copy["width"] = copy["x1"] - copy["x0"]
copy["height"] = copy["bottom"] - copy["top"]
return copy
def intersects_bbox(objs, bbox):
"""
Filters objs to only those intersecting the bbox
"""
initial_type = type(objs)
objs = to_list(objs)
matching = [
obj for obj in objs if get_bbox_overlap(obj_to_bbox(obj), bbox) is not None
]
return initial_type(matching)
def within_bbox(objs, bbox):
"""
Filters objs to only those fully within the bbox
"""
if isinstance(objs, dict):
return dict((k, within_bbox(v, bbox)) for k, v in objs.items())
initial_type = type(objs)
objs = to_list(objs)
matching = [
obj
for obj in objs
if get_bbox_overlap(obj_to_bbox(obj), bbox) == obj_to_bbox(obj)
]
return initial_type(matching)
def crop_to_bbox(objs, bbox):
"""
Filters objs to only those intersecting the bbox,
and crops the extent of the objects to the bbox.
"""
if isinstance(objs, dict):
return dict((k, crop_to_bbox(v, bbox)) for k, v in objs.items())
initial_type = type(objs)
objs = to_list(objs)
cropped = list(filter(None, (clip_obj(obj, bbox) for obj in objs)))
return initial_type(cropped)
def move_object(obj, axis, value):
assert axis in ("h", "v")
if axis == "h":
new_items = (
("x0", obj["x0"] + value),
("x1", obj["x1"] + value),
)
if axis == "v":
new_items = [
("top", obj["top"] + value),
("bottom", obj["bottom"] + value),
]
if "doctop" in obj:
new_items += [("doctop", obj["doctop"] + value)]
if "y0" in obj:
new_items += [
("y0", obj["y0"] - value),
("y1", obj["y1"] - value),
]
return obj.__class__(tuple(obj.items()) + tuple(new_items))
def snap_objects(objs, attr, tolerance):
axis = {"x0": "h", "x1": "h", "top": "v", "bottom": "v"}[attr]
clusters = cluster_objects(objs, attr, tolerance)
avgs = [sum(map(itemgetter(attr), objs)) / len(objs) for objs in clusters]
snapped_clusters = [
[move_object(obj, axis, avg - obj[attr]) for obj in cluster]
for cluster, avg in zip(clusters, avgs)
]
return list(itertools.chain(*snapped_clusters))
def resize_object(obj, key, value):
assert key in ("x0", "x1", "top", "bottom")
old_value = obj[key]
diff = value - old_value
new_items = [
(key, value),
]
if key == "x0":
assert value <= obj["x1"]
new_items.append(("width", obj["x1"] - value))
elif key == "x1":
assert value >= obj["x0"]
new_items.append(("width", value - obj["x0"]))
elif key == "top":
assert value <= obj["bottom"]
new_items.append(("doctop", obj["doctop"] + diff))
new_items.append(("height", obj["height"] - diff))
if "y1" in obj:
new_items.append(("y1", obj["y1"] - diff))
elif key == "bottom":
assert value >= obj["top"]
new_items.append(("height", obj["height"] + diff))
if "y0" in obj:
new_items.append(("y0", obj["y0"] - diff))
return obj.__class__(tuple(obj.items()) + tuple(new_items))
def curve_to_edges(curve):
point_pairs = zip(curve["points"], curve["points"][1:])
return [
{
"x0": min(p0[0], p1[0]),
"x1": max(p0[0], p1[0]),
"top": min(p0[1], p1[1]),
"doctop": min(p0[1], p1[1]) + (curve["doctop"] - curve["top"]),
"bottom": max(p0[1], p1[1]),
"width": abs(p0[0] - p1[0]),
"height": abs(p0[1] - p1[1]),
"orientation": "v" if p0[0] == p1[0] else ("h" if p0[1] == p1[1] else None),
}
for p0, p1 in point_pairs
]
def rec | ct):
top, bottom, left, right = [dict(rect) for x in range(4)]
top.update(
{
"object_type": "rect_edge",
"height": decimalize(0),
"y0": rect["y1"],
"bottom": rect["top"],
"orientation": "h",
}
)
bottom.update(
{
"object_type": "rect_edge",
"height": decimalize(0),
"y1": rect["y0"],
"top": rect["top"] + rect["height"],
"doctop": rect["doctop"] + rect["height"],
"orientation": "h",
}
)
left.update(
{
"object_type": "rect_edge",
"width": decimalize(0),
"x1": rect["x0"],
"orientation": "v",
}
)
right.update(
{
"object_type": "rect_edge",
"width": decimalize(0),
"x0": rect["x1"],
"orientation": "v",
}
)
return [top, bottom, left, right]
def line_to_edge(line):
edge = dict(line)
edge["orientation"] = "h" if (line["top"] == line["bottom"]) else "v"
return edge
def obj_to_edges(obj):
return {
"line": lambda x: [line_to_edge(x)],
"rect": rect_to_edges,
"rect_edge": rect_to_edges,
"curve": curve_to_edges,
}[obj["object_type"]](obj)
def filter_edges(edges, orientation=None, edge_type=None, min_length=1):
if orientation not in ("v", "h", None):
raise ValueError("Orientation must be 'v' or 'h'")
def test(e):
dim = "height" if e["orientation"] == "v" else "width"
et_correct = e["object_type"] == edge_type if edge_type is not None else True
orient_correct = orientation is None or e["orientation"] == orientation
return et_correct and orient_correct and (e[dim] >= min_length)
edges = filter(test, edges)
return list(edges)
| t_to_edges(re |
main.rs | #![no_std]
#![no_main]
extern crate alloc;
use alloc::string::String;
use contract::{
contract_api::{runtime, system},
unwrap_or_revert::UnwrapOrRevert,
};
use types::{ApiError, ContractRef, URef, U512};
const POS_BOND: &str = "bond";
const POS_UNBOND: &str = "unbond";
const COMMAND_BOND: &str = "bond";
const COMMAND_UNBOND: &str = "unbond";
fn bond(pos: &ContractRef, amount: &U512, source: URef) {
runtime::call_contract::<_, ()>(pos.clone(), (POS_BOND, *amount, source));
}
fn unbond(pos: &ContractRef, amount: Option<U512>) {
runtime::call_contract::<_, ()>(pos.clone(), (POS_UNBOND, amount));
}
#[no_mangle]
pub extern "C" fn call() {
let command: String = runtime::get_arg(0)
.unwrap_or_revert_with(ApiError::MissingArgument)
.unwrap_or_revert_with(ApiError::InvalidArgument);
let pos_pointer = system::get_proof_of_stake();
if command == COMMAND_BOND {
let rewards_purse: URef = runtime::get_arg(1)
.unwrap_or_revert_with(ApiError::MissingArgument)
.unwrap_or_revert_with(ApiError::InvalidArgument);
let available_reward = runtime::get_arg(2)
.unwrap_or_revert_with(ApiError::MissingArgument)
.unwrap_or_revert_with(ApiError::InvalidArgument);
// Attempt to bond using the rewards purse - should not be possible | }
} | bond(&pos_pointer, &available_reward, rewards_purse);
} else if command == COMMAND_UNBOND {
unbond(&pos_pointer, None); |
fr-MQ.js | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
export default [
[
['minuit', 'midi', 'mat.', 'ap.m.', 'soir', 'nuit'], ,
['minuit', 'midi', 'du matin', 'de l’après-midi', 'du soir', 'du matin']
],
[
['minuit', 'midi', 'mat.', 'ap.m.', 'soir', 'nuit'], ,
['minuit', 'midi', 'matin', 'après-midi', 'soir', 'nuit']
],
[
'00:00', '12:00', ['04:00', '12:00'], ['12:00', '18:00'], ['18:00', '24:00'],
['00:00', '04:00'] | //# sourceMappingURL=fr-MQ.js.map | ]
]; |
city_bus_api_real_time_near_stop1_parameters.go | // Code generated by go-swagger; DO NOT EDIT.
package city_bus
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"net/http"
"time"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// NewCityBusAPIRealTimeNearStop1Params creates a new CityBusAPIRealTimeNearStop1Params object,
// with the default timeout for this client.
//
// Default values are not hydrated, since defaults are normally applied by the API server side.
//
// To enforce default values in parameter, use SetDefaults or WithDefaults.
func NewCityBusAPIRealTimeNearStop1Params() *CityBusAPIRealTimeNearStop1Params {
return &CityBusAPIRealTimeNearStop1Params{
timeout: cr.DefaultTimeout,
}
}
// NewCityBusAPIRealTimeNearStop1ParamsWithTimeout creates a new CityBusAPIRealTimeNearStop1Params object
// with the ability to set a timeout on a request.
func NewCityBusAPIRealTimeNearStop1ParamsWithTimeout(timeout time.Duration) *CityBusAPIRealTimeNearStop1Params {
return &CityBusAPIRealTimeNearStop1Params{
timeout: timeout,
}
}
// NewCityBusAPIRealTimeNearStop1ParamsWithContext creates a new CityBusAPIRealTimeNearStop1Params object
// with the ability to set a context for a request.
func NewCityBusAPIRealTimeNearStop1ParamsWithContext(ctx context.Context) *CityBusAPIRealTimeNearStop1Params {
return &CityBusAPIRealTimeNearStop1Params{
Context: ctx,
}
}
// NewCityBusAPIRealTimeNearStop1ParamsWithHTTPClient creates a new CityBusAPIRealTimeNearStop1Params object
// with the ability to set a custom HTTPClient for a request.
func | (client *http.Client) *CityBusAPIRealTimeNearStop1Params {
return &CityBusAPIRealTimeNearStop1Params{
HTTPClient: client,
}
}
/* CityBusAPIRealTimeNearStop1Params contains all the parameters to send to the API endpoint
for the city bus Api real time near stop 1 operation.
Typically these are written to a http.Request.
*/
type CityBusAPIRealTimeNearStop1Params struct {
/* DollarFilter.
過濾
*/
DollarFilter *string
/* DollarFormat.
指定來源格式
*/
DollarFormat string
/* DollarOrderby.
排序
*/
DollarOrderby *string
/* DollarSelect.
挑選
*/
DollarSelect *string
/* DollarSkip.
跳過前幾筆
*/
DollarSkip *string
/* DollarTop.
取前幾筆
Default: 30
*/
DollarTop *int64
/* City.
欲查詢縣市
*/
City string
/* RouteName.
繁體中文路線名稱,如'307'
*/
RouteName string
/* Health.
加入參數'?health=true'即可查詢此API服務的健康狀態
*/
Health *string
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithDefaults hydrates default values in the city bus Api real time near stop 1 params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *CityBusAPIRealTimeNearStop1Params) WithDefaults() *CityBusAPIRealTimeNearStop1Params {
o.SetDefaults()
return o
}
// SetDefaults hydrates default values in the city bus Api real time near stop 1 params (not the query body).
//
// All values with no default are reset to their zero value.
func (o *CityBusAPIRealTimeNearStop1Params) SetDefaults() {
var (
dollarTopDefault = int64(30)
)
val := CityBusAPIRealTimeNearStop1Params{
DollarTop: &dollarTopDefault,
}
val.timeout = o.timeout
val.Context = o.Context
val.HTTPClient = o.HTTPClient
*o = val
}
// WithTimeout adds the timeout to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithTimeout(timeout time.Duration) *CityBusAPIRealTimeNearStop1Params {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithContext(ctx context.Context) *CityBusAPIRealTimeNearStop1Params {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithHTTPClient(client *http.Client) *CityBusAPIRealTimeNearStop1Params {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithDollarFilter adds the dollarFilter to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithDollarFilter(dollarFilter *string) *CityBusAPIRealTimeNearStop1Params {
o.SetDollarFilter(dollarFilter)
return o
}
// SetDollarFilter adds the dollarFilter to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetDollarFilter(dollarFilter *string) {
o.DollarFilter = dollarFilter
}
// WithDollarFormat adds the dollarFormat to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithDollarFormat(dollarFormat string) *CityBusAPIRealTimeNearStop1Params {
o.SetDollarFormat(dollarFormat)
return o
}
// SetDollarFormat adds the dollarFormat to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetDollarFormat(dollarFormat string) {
o.DollarFormat = dollarFormat
}
// WithDollarOrderby adds the dollarOrderby to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithDollarOrderby(dollarOrderby *string) *CityBusAPIRealTimeNearStop1Params {
o.SetDollarOrderby(dollarOrderby)
return o
}
// SetDollarOrderby adds the dollarOrderby to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetDollarOrderby(dollarOrderby *string) {
o.DollarOrderby = dollarOrderby
}
// WithDollarSelect adds the dollarSelect to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithDollarSelect(dollarSelect *string) *CityBusAPIRealTimeNearStop1Params {
o.SetDollarSelect(dollarSelect)
return o
}
// SetDollarSelect adds the dollarSelect to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetDollarSelect(dollarSelect *string) {
o.DollarSelect = dollarSelect
}
// WithDollarSkip adds the dollarSkip to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithDollarSkip(dollarSkip *string) *CityBusAPIRealTimeNearStop1Params {
o.SetDollarSkip(dollarSkip)
return o
}
// SetDollarSkip adds the dollarSkip to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetDollarSkip(dollarSkip *string) {
o.DollarSkip = dollarSkip
}
// WithDollarTop adds the dollarTop to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithDollarTop(dollarTop *int64) *CityBusAPIRealTimeNearStop1Params {
o.SetDollarTop(dollarTop)
return o
}
// SetDollarTop adds the dollarTop to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetDollarTop(dollarTop *int64) {
o.DollarTop = dollarTop
}
// WithCity adds the city to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithCity(city string) *CityBusAPIRealTimeNearStop1Params {
o.SetCity(city)
return o
}
// SetCity adds the city to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetCity(city string) {
o.City = city
}
// WithRouteName adds the routeName to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithRouteName(routeName string) *CityBusAPIRealTimeNearStop1Params {
o.SetRouteName(routeName)
return o
}
// SetRouteName adds the routeName to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetRouteName(routeName string) {
o.RouteName = routeName
}
// WithHealth adds the health to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) WithHealth(health *string) *CityBusAPIRealTimeNearStop1Params {
o.SetHealth(health)
return o
}
// SetHealth adds the health to the city bus Api real time near stop 1 params
func (o *CityBusAPIRealTimeNearStop1Params) SetHealth(health *string) {
o.Health = health
}
// WriteToRequest writes these params to a swagger request
func (o *CityBusAPIRealTimeNearStop1Params) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if o.DollarFilter != nil {
// query param $filter
var qrDollarFilter string
if o.DollarFilter != nil {
qrDollarFilter = *o.DollarFilter
}
qDollarFilter := qrDollarFilter
if qDollarFilter != "" {
if err := r.SetQueryParam("$filter", qDollarFilter); err != nil {
return err
}
}
}
// query param $format
qrDollarFormat := o.DollarFormat
qDollarFormat := qrDollarFormat
if qDollarFormat != "" {
if err := r.SetQueryParam("$format", qDollarFormat); err != nil {
return err
}
}
if o.DollarOrderby != nil {
// query param $orderby
var qrDollarOrderby string
if o.DollarOrderby != nil {
qrDollarOrderby = *o.DollarOrderby
}
qDollarOrderby := qrDollarOrderby
if qDollarOrderby != "" {
if err := r.SetQueryParam("$orderby", qDollarOrderby); err != nil {
return err
}
}
}
if o.DollarSelect != nil {
// query param $select
var qrDollarSelect string
if o.DollarSelect != nil {
qrDollarSelect = *o.DollarSelect
}
qDollarSelect := qrDollarSelect
if qDollarSelect != "" {
if err := r.SetQueryParam("$select", qDollarSelect); err != nil {
return err
}
}
}
if o.DollarSkip != nil {
// query param $skip
var qrDollarSkip string
if o.DollarSkip != nil {
qrDollarSkip = *o.DollarSkip
}
qDollarSkip := qrDollarSkip
if qDollarSkip != "" {
if err := r.SetQueryParam("$skip", qDollarSkip); err != nil {
return err
}
}
}
if o.DollarTop != nil {
// query param $top
var qrDollarTop int64
if o.DollarTop != nil {
qrDollarTop = *o.DollarTop
}
qDollarTop := swag.FormatInt64(qrDollarTop)
if qDollarTop != "" {
if err := r.SetQueryParam("$top", qDollarTop); err != nil {
return err
}
}
}
// path param City
if err := r.SetPathParam("City", o.City); err != nil {
return err
}
// path param RouteName
if err := r.SetPathParam("RouteName", o.RouteName); err != nil {
return err
}
if o.Health != nil {
// query param health
var qrHealth string
if o.Health != nil {
qrHealth = *o.Health
}
qHealth := qrHealth
if qHealth != "" {
if err := r.SetQueryParam("health", qHealth); err != nil {
return err
}
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
| NewCityBusAPIRealTimeNearStop1ParamsWithHTTPClient |
analytics.py | from import_reqs import *
from app import app
@app.route('/enterprise/product=<id>', methods=['GET'])
def get_advanced_analytics(id):
"""
get_advanced_analytics(id): this will be for company dashboard, they will see the advanced analytics of a product.
"""
try:
id_token = request.headers['Authorization']
claims = auth.verify_id_token(id_token)
uid = claims['uid']
if claims['Enterprise'] is True:
|
else:
return (jsonify("You are not authorized to view this specific enterprise analytics page."), 403)
except Exception as e:
return f"An Error Occured: {e}" | todo = ADVANCED_ANALYTICS.document(id).get().to_dict()
if todo['company_id'] == uid:
return jsonify(todo), 200
else:
return (jsonify({"Access Denied"}), 403) |
gaia_apps.js | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
'use strict';
var GaiaApps = {
normalizeName: function(name) {
return name.replace(/[- ]+/g, '').toLowerCase();
},
getInstalledApps: function() {
let req = navigator.mozApps.mgmt.getAll();
req.onsuccess = function() {
marionetteScriptFinished(req.result);
}
},
getRunningApps: function() {
let manager = window.wrappedJSObject.AppWindowManager || window.wrappedJSObject.WindowManager;
let runningApps = manager.getRunningApps();
// Return a simplified version of the runningApps object which can be
// JSON-serialized.
let apps = {};
for (let app in runningApps) {
let anApp = {};
for (let key in runningApps[app]) {
if (['name', 'origin', 'manifest'].indexOf(key) > -1) {
anApp[key] = runningApps[app][key];
}
}
apps[app] = anApp;
}
return apps;
},
getRunningAppOrigin: function(name) {
let manager = window.wrappedJSObject.AppWindowManager || window.wrappedJSObject.WindowManager;
let runningApps = manager.getRunningApps();
let origin;
for (let property in runningApps) {
if (runningApps[property].name == name) {
origin = property;
}
}
return origin;
},
getPermission: function(appName, permissionName) {
GaiaApps.locateWithName(appName, function(app) {
console.log("Getting permission '" + permissionName + "' for " + appName);
var mozPerms = navigator.mozPermissionSettings;
var result = mozPerms.get(
permissionName, app.manifestURL, app.origin, false
);
marionetteScriptFinished(result);
});
},
setPermission: function(appName, permissionName, value) {
GaiaApps.locateWithName(appName, function(app) {
console.log("Setting permission '" + permissionName + "' for " +
appName + "to '" + value + "'");
var mozPerms = navigator.mozPermissionSettings;
mozPerms.set(
permissionName, value, app.manifestURL, app.origin, false
);
marionetteScriptFinished();
});
},
sendLocateResponse: function(aCallback, app, appName, entryPoint) {
var callback = aCallback || marionetteScriptFinished;
if (callback === marionetteScriptFinished) {
var result = false;
if (typeof(app) === 'object') {
result = {
name: app.manifest.name,
origin: app.origin,
entryPoint: entryPoint || null,
normalizedName: appName
};
}
callback(result);
} else {
callback(app, appName, entryPoint);
}
},
locateWithName: function(name, aCallback) {
var callback = aCallback || marionetteScriptFinished;
let apps = window.wrappedJSObject.Applications.installedApps;
let normalizedSearchName = GaiaApps.normalizeName(name);
for (let manifestURL in apps) {
let app = apps[manifestURL];
let origin = null;
let entryPoints = app.manifest.entry_points;
if (entryPoints) {
for (let ep in entryPoints) {
let currentEntryPoint = entryPoints[ep];
let appName = currentEntryPoint.name;
if (normalizedSearchName === GaiaApps.normalizeName(appName)) {
return GaiaApps.sendLocateResponse(callback, app, appName, ep);
}
}
} else {
let appName = app.manifest.name;
if (normalizedSearchName === GaiaApps.normalizeName(appName)) {
return GaiaApps.sendLocateResponse(callback, app, appName);
}
}
}
callback(false);
},
locateWithManifestURL: function(manifestURL, entryPoint, aCallback) {
var callback = aCallback || marionetteScriptFinished;
var app = window.wrappedJSObject.Applications.getByManifestURL(manifestURL);
var appName;
if (entryPoint) {
if (app.manifest.entry_points[entryPoint]) {
appName = app.manifest.entry_points[entryPoint].name;
} else {
app = null;
}
} else {
appName = app.manifest.name;
}
GaiaApps.sendLocateResponse(callback, app, appName, entryPoint);
},
// Returns the number of running apps.
numRunningApps: function() {
let count = 0;
let manager = window.wrappedJSObject.AppWindowManager || window.wrappedJSObject.WindowManager;
let runningApps = manager.getRunningApps();
for (let origin in runningApps) {
count++;
}
return count;
},
// Kills the specified app.
kill: function(aOrigin, aCallback) {
var callback = aCallback || marionetteScriptFinished;
let manager = window.wrappedJSObject.AppWindowManager || window.wrappedJSObject.WindowManager;
let runningApps = manager.getRunningApps();
if (!runningApps.hasOwnProperty(aOrigin)) {
callback(false);
}
else {
window.addEventListener('appterminated', function gt_onAppTerminated() {
window.removeEventListener('appterminated', gt_onAppTerminated);
waitFor(
function() {
console.log("app with origin '" + aOrigin + "' has terminated");
callback(true);
},
function() {
let runningApps = manager.getRunningApps();
return !runningApps.hasOwnProperty(aOrigin);
}
);
});
console.log("terminating app with origin '" + aOrigin + "'");
manager.kill(aOrigin);
}
},
// Kills all running apps, except the homescreen.
killAll: function() {
let originsToClose = [];
let that = this;
let manager = window.wrappedJSObject.AppWindowManager || window.wrappedJSObject.WindowManager;
let runningApps = manager.getRunningApps();
for (let origin in runningApps) {
if (origin.indexOf('homescreen') == -1) {
originsToClose.push(origin);
}
}
if (!originsToClose.length) {
marionetteScriptFinished(true);
return;
}
originsToClose.slice(0).forEach(function(origin) {
GaiaApps.kill(origin, function() {});
});
// Even after the 'appterminated' event has been fired for an app,
// it can still exist in the apps list, so wait until 1 or fewer
// apps are running (since we don't close the homescreen app).
waitFor(
function() { marionetteScriptFinished(true); },
function() { return that.numRunningApps() <= 1; }
);
},
launch: function(app, appName, entryPoint) {
if (app) {
let manager = window.wrappedJSObject.AppWindowManager || window.wrappedJSObject.WindowManager;
let runningApps = manager.getRunningApps();
let origin = GaiaApps.getRunningAppOrigin(appName);
let sendResponse = function() {
let app = runningApps[origin];
let result = {
frame: (app.browser) ? app.browser.element : app.frame.firstChild,
src: (app.browser) ? app.browser.element.src : app.iframe.src,
name: app.name,
origin: origin};
marionetteScriptFinished(result);
};
if (manager.getDisplayedApp() == origin) {
console.log("app with origin '" + origin + "' is already running");
sendResponse();
} else {
window.addEventListener('appopen', function appOpen() {
window.removeEventListener('appopen', appOpen);
waitFor(
function() {
console.log("app with origin '" + origin + "' has launched");
sendResponse(); | origin = GaiaApps.getRunningAppOrigin(appName);
return manager.getDisplayedApp() == origin;
}
);
});
console.log("launching app with name '" + appName + "'");
app.launch(entryPoint || null);
}
} else {
marionetteScriptFinished(false);
}
},
// Launches app with the specified name (e.g., 'Calculator'); returns the
// an object with the app frame if successful, false if the app can't be
// found, or times out if the app frame can't be found after launching the
// app.
launchWithName: function(name) {
GaiaApps.locateWithName(name, this.launch);
},
// Launches app with the specified manifestURL. returns the
// an object with the app frame if successful, false if the app can't be
// found, or times out if the app frame can't be found after launching the
// app.
//
// This is prefered over launchWithName because localized builds have
// different names
launchWithManifestURL: function(manifestURL, entryPoint) {
GaiaApps.locateWithManifestURL(manifestURL, entryPoint, this.launch);
},
close: function(app, appName, entryPoint) {
if (app) {
let origin = GaiaApps.getRunningAppOrigin(appName);
GaiaApps.kill(origin);
} else {
marionetteScriptFinished(false);
}
},
// Closes app with the specified name (e.g., 'Calculator'); returns nothing
closeWithName: function(name) {
GaiaApps.locateWithName(name, this.close);
},
closeWithManifestURL: function(manifestURL, entryPoint) {
GaiaApps.locateWithManifestURL(manifestURL, entryPoint, this.close);
},
/**
* Returns the currently displayed app.
*/
displayedApp: function() {
let manager = window.wrappedJSObject.AppWindowManager || window.wrappedJSObject.WindowManager;
let runningApps = manager.getRunningApps();
let origin = manager.getDisplayedApp();
console.log("app with origin '" + origin + "' is displayed");
let app = runningApps[origin];
let result = {
frame: (app.browser) ? app.browser.element : app.frame.firstChild,
src: (app.browser) ? app.browser.element.src : app.iframe.src,
name: app.name,
origin: origin};
marionetteScriptFinished(result);
},
/**
* Uninstalls the app with the specified name.
*/
uninstallWithName: function(name) {
GaiaApps.locateWithName(name, function uninstall(app) {
navigator.mozApps.mgmt.uninstall(app);
marionetteScriptFinished(false);
});
}
}; | },
function() { |
whiteblind.rs | use crate::attribute::{Attribute, AttributeName, AttributeCommon};
use crate::character::character_common_data::CharacterCommonData;
use crate::common::item_config_type::{ItemConfig, ItemConfigType};
use crate::common::WeaponType;
use crate::weapon::weapon_base_atk::WeaponBaseATKFamily;
use crate::weapon::weapon_common_data::WeaponCommonData;
use crate::weapon::weapon_effect::WeaponEffect;
use crate::weapon::weapon_static_data::WeaponStaticData;
use crate::weapon::weapon_sub_stat::WeaponSubStatFamily;
use crate::weapon::{WeaponConfig, WeaponName};
use crate::weapon::weapon_trait::WeaponTrait;
pub struct WhiteblindEffect {
stack: f64,
}
impl WhiteblindEffect {
pub fn new(config: &WeaponConfig) -> WhiteblindEffect {
match *config {
WeaponConfig::Whiteblind { stack } => WhiteblindEffect {
stack
},
_ => WhiteblindEffect {
stack: 0.0
}
}
}
}
impl<T: Attribute> WeaponEffect<T> for WhiteblindEffect {
fn apply(&self, data: &WeaponCommonData, attribute: &mut T) {
let value = (data.refine as f64 * 0.015 + 0.045) * self.stack;
attribute.add_atk_percentage("白影剑被动等效", value);
attribute.add_def_percentage("白影剑被动等效", value);
}
}
pub struct Whiteblind;
impl WeaponTrai | eblind {
const META_DATA: WeaponStaticData = WeaponStaticData {
name: WeaponName::Whiteblind,
weapon_type: WeaponType::Claymore,
weapon_sub_stat: Some(WeaponSubStatFamily::DEF113),
weapon_base: WeaponBaseATKFamily::ATK510,
star: 4,
#[cfg(not(target_family = "wasm"))]
effect: Some("注能之锋:普通攻击和重击命中后,攻击力和防御力提高6%/7.5%/9%/10.5%/12%。该效果持续6秒,最多叠加4层,每0.5秒只能触发一次。"),
#[cfg(not(target_family = "wasm"))]
chs: "白影剑"
};
#[cfg(not(target_family = "wasm"))]
const CONFIG_DATA: Option<&'static [ItemConfig]> = Some(&[
ItemConfig {
name: "stack",
title: "被动等效层数",
config: ItemConfigType::Float {
min: 0.0,
max: 4.0,
default: 0.0
}
}
]);
fn get_effect<A: Attribute>(_character: &CharacterCommonData, config: &WeaponConfig) -> Option<Box<dyn WeaponEffect<A>>> {
Some(Box::new(WhiteblindEffect::new(config)))
}
}
| t for Whit |
test_consistent_dot.py | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
@autotest(n=1, check_graph=False)
def do_test_dot_impl(test_case, placement, sbp):
k = random(100, 1000) * 8
x = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp)
y = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp)
z = torch.dot(x, y)
return z
class TestDotConsistent(flow.unittest.TestCase):
@globaltest
def test_dot(test_case):
for placement in all_placement():
for sbp in all_sbp(placement, max_dim=1):
do_test_dot_impl(test_case, placement, sbp)
if __name__ == "__main__":
unittest.main() | ||
db.go | //go:generate go-assets-builder -p mysql -o assets.go ../../data/repository
package mysql
import (
"database/sql"
"io/ioutil"
"time"
"github.com/fireworq/fireworq/config"
_ "github.com/go-sql-driver/mysql" // initialize the driver
"github.com/rs/zerolog/log"
)
var schema []string
func init() {
schema = []string{
"/data/repository/mysql/schema/queue.sql",
"/data/repository/mysql/schema/queue_throttle.sql",
"/data/repository/mysql/schema/routing.sql",
"/data/repository/mysql/schema/config_revision.sql",
}
}
// Dsn returns the data source name of the storage specified in the
// configuration.
func Dsn() string {
dsn := config.Get("repository_mysql_dsn")
if dsn != "" {
return dsn
}
return config.Get("mysql_dsn")
}
// NewDB creates an instance of DB handler.
func NewDB() (*sql.DB, error) {
dsn := Dsn()
log.Info().Msgf("Connecting database %s ...", dsn)
db, err := sql.Open("mysql", dsn)
if err != nil {
return nil, err
}
func() {
var timeout int
if db.QueryRow("SELECT @@SESSION.wait_timeout").Scan(&timeout) != nil {
return
}
t := timeout - 1
if t < 1 {
t = 1
}
log.Debug().Msgf("wait_timeout: %d", timeout)
db.SetConnMaxLifetime(time.Duration(t) * time.Second)
}()
for _, path := range schema {
f, err := Assets.Open(path)
if err != nil {
log.Panic().Msg(err.Error()) | query, err := ioutil.ReadAll(f)
f.Close()
if err != nil {
log.Panic().Msg(err.Error())
}
_, err = db.Exec(string(query))
if err != nil {
return nil, err
}
}
return db, nil
} | }
|
mod.rs | use ndarray::*;
use num_traits::Zero;
pub mod raw;
pub mod unary;
use tract_core::internal::*;
pub fn space_to_batch_nd(pb: &crate::tfpb::node_def::NodeDef) -> TractResult<Box<Op>> {
let datum_type = pb.get_attr_datum_type("T")?;
Ok(Box::new(raw::SpaceToBatch::new(datum_type)))
}
pub fn batch_to_space_nd(pb: &crate::tfpb::node_def::NodeDef) -> TractResult<Box<Op>> {
let datum_type = pb.get_attr_datum_type("T")?;
Ok(Box::new(raw::BatchToSpace::new(datum_type)))
}
fn space_to_batch<T: Copy + Datum + Zero>(
input: Arc<Tensor>,
block_shape: &ArrayView1<i32>,
paddings: &ArrayView2<i32>,
) -> TractResult<Arc<Tensor>> |
fn batch_to_space<T: Copy + Datum + Zero>(
input: Arc<Tensor>,
block_shape: &ArrayView1<i32>,
crops: &ArrayView2<i32>,
) -> TractResult<Arc<Tensor>> {
let data = input.into_tensor().into_array()?;
let input_shape = data.shape().to_vec();
let crops: ArrayView2<i32> = crops.view().into_dimensionality()?;
let block_size = block_shape.iter().map(|a| *a as usize).product::<usize>();
// block_dim_1 .. block_dim_n, batches/bloc_size, dim_1, .. dim_n, chan_1, .., chan_n
let mut unflatten_blocked_shape = vec![];
unflatten_blocked_shape.extend(block_shape.iter().map(|a| *a as usize));
let batches = data.shape()[0] / block_size;
unflatten_blocked_shape.push(batches);
unflatten_blocked_shape.extend(&data.shape()[1..]);
let data = data.into_shape(&*unflatten_blocked_shape)?;
let mut permuted_axes = vec![block_shape.len()];
let mut padded_shape = vec![batches];
for i in 0..block_shape.shape()[0] {
permuted_axes.push(block_shape.len() + 1 + i);
permuted_axes.push(i);
padded_shape.push(block_shape[i] as usize * input_shape[i + 1]);
}
permuted_axes.extend((1 + block_shape.len() * 2)..data.ndim());
padded_shape.extend(&input_shape[1 + block_shape.len()..]);
let data = data.permuted_axes(permuted_axes);
let data: Vec<T> = data.into_iter().map(|x| *x).collect();
let data = ::ndarray::ArrayD::from_shape_vec(padded_shape, data)?;
let mut data = data;
for (i, crop) in crops.outer_iter().enumerate() {
if crop[0] != 0 || crop[1] != 0 {
let end = data.shape()[1 + i] as usize;
let range = (crop[0] as usize)..(end - crop[1] as usize);
data = data.slice_axis(Axis(i + 1), range.into()).map(|x| *x).to_owned();
}
}
Ok(data.into_arc_tensor())
}
#[cfg(test)]
mod tests {
#![allow(non_snake_case)]
use super::raw::{BatchToSpace, SpaceToBatch};
use super::*;
use tract_core::ops::InferenceOp;
// https://www.tensorflow.org/api_docs/python/tf/space_to_batch_nd
#[test]
fn space_to_batch_nd_1() {
assert_eq!(
SpaceToBatch::new(i32::datum_type())
.eval(tvec![
rctensor4(&[[[[1i32], [2]], [[3], [4]]]]),
rctensor1(&[2, 2]),
rctensor2(&[[0, 0], [0, 0]]),
])
.unwrap(),
tvec![rctensor4(&[[[[1i32]]], [[[2]]], [[[3]]], [[[4]]]])],
)
}
#[test]
fn space_to_batch_nd_2() {
assert_eq!(
SpaceToBatch::new(i32::datum_type())
.eval(tvec![
rctensor4(&[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]),
rctensor1(&[2, 2]),
rctensor2(&[[0, 0], [0, 0]]),
])
.unwrap(),
tvec![rctensor4(&[[[[1i32, 2, 3]]], [[[4, 5, 6]]], [[[7, 8, 9]]], [[[10, 11, 12]]],]),],
)
}
#[test]
fn space_to_batch_nd_3() {
assert_eq!(
SpaceToBatch::new(i32::datum_type())
.eval(tvec![
rctensor4(&[[
[[1], [2], [3], [4]],
[[5], [6], [7], [8]],
[[9], [10], [11], [12]],
[[13], [14], [15], [16]],
]]),
rctensor1(&[2, 2]),
rctensor2(&[[0, 0], [0, 0]]),
])
.unwrap(),
tvec![rctensor4(&[
[[[1], [3]], [[9], [11]]],
[[[2], [4]], [[10], [12]]],
[[[5], [7]], [[13], [15]]],
[[[6], [8]], [[14], [16]]],
])],
)
}
#[test]
fn space_to_batch_nd_4() {
assert_eq!(
SpaceToBatch::new(i32::datum_type())
.eval(tvec![
rctensor4(&[
[[[1], [2], [3], [4]], [[5], [6], [7], [8]]],
[[[9], [10], [11], [12]], [[13], [14], [15], [16]]],
]),
rctensor1(&[2, 2]),
rctensor2(&[[0, 0], [2, 0]]),
])
.unwrap(),
tvec![rctensor4(&[
[[[0], [1], [3]]],
[[[0], [9], [11]]],
[[[0], [2], [4]]],
[[[0], [10], [12]]],
[[[0], [5], [7]]],
[[[0], [13], [15]]],
[[[0], [6], [8]]],
[[[0], [14], [16]]],
]),],
)
}
#[test]
fn space_to_batch_nd_infer_1() {
let op = SpaceToBatch::new(f32::datum_type());
let data = TensorFact::dt_shape(DatumType::F32, shapefact!(1, 4, 16));
let block_shape = TensorFact::from(Tensor::from(arr1(&[2])));
let paddings = TensorFact::from(Tensor::from(arr2(&[[0.to_dim(), 0.to_dim()]])));
let any = TensorFact::default();
let (_, outputs) =
op.infer_facts(tvec!(&data, &block_shape, &paddings), tvec!(&any)).unwrap();
assert_eq!(outputs[0], TensorFact::dt_shape(DatumType::F32, shapefact!(2, 2, 16)));
}
#[test]
fn space_to_batch_nd_infer_2() {
let op = SpaceToBatch::new(f32::datum_type());
let data = TensorFact::dt_shape(DatumType::F32, shapefact!(1, (TDim::s() - 4), 16));
let block_shape = TensorFact::from(Tensor::from(arr1(&[2])));
let paddings = TensorFact::from(Tensor::from(arr2(&[[0.to_dim(), (TDim::s() % 2)]])));
let any = TensorFact::default();
let (_, outputs) =
op.infer_facts(tvec!(&data, &block_shape, &paddings), tvec!(&any)).unwrap();
assert_eq!(
outputs[0],
TensorFact::dt_shape(
DatumType::F32,
shapefact!(2, ((TDim::s() + TDim::s() % 2 - 4) / 2), 16)
)
);
}
#[test]
fn batch_to_space_nd_1() {
assert_eq!(
BatchToSpace::new(i32::datum_type())
.eval(tvec![
rctensor4(&[[[[1]]], [[[2]]], [[[3]]], [[[4]]]]),
rctensor1(&[2, 2]),
rctensor2(&[[0, 0], [0, 0]]),
])
.unwrap(),
tvec![rctensor4(&[[[[1], [2]], [[3], [4]]]])]
)
}
#[test]
fn batch_to_space_nd_2() {
assert_eq!(
BatchToSpace::new(i32::datum_type())
.eval(tvec![
rctensor4(&[[[[1i32, 2, 3]]], [[[4, 5, 6]]], [[[7, 8, 9]]], [[[10, 11, 12]]],]),
rctensor1(&[2, 2]),
rctensor2(&[[0, 0], [0, 0]]),
])
.unwrap(),
tvec![rctensor4(&[[[[1i32, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]])]
)
}
#[test]
fn batch_to_space_nd_3() {
assert_eq!(
BatchToSpace::new(i32::datum_type())
.eval(tvec![
rctensor4(&[
[[[1i32], [3]], [[9], [11]]],
[[[2], [4]], [[10], [12]]],
[[[5], [7]], [[13], [15]]],
[[[6], [8]], [[14], [16]]],
]),
rctensor1(&[2, 2]),
rctensor2(&[[0, 0], [0, 0]]),
])
.unwrap(),
tvec![rctensor4(&[[
[[1i32], [2], [3], [4]],
[[5], [6], [7], [8]],
[[9], [10], [11], [12]],
[[13], [14], [15], [16]],
]])
.into(),]
)
}
#[test]
fn batch_to_space_nd_4() {
assert_eq!(
BatchToSpace::new(i32::datum_type())
.eval(tvec![
rctensor4(&[
[[[0i32], [1], [3]]],
[[[0], [9], [11]]],
[[[0], [2], [4]]],
[[[0], [10], [12]]],
[[[0], [5], [7]]],
[[[0], [13], [15]]],
[[[0], [6], [8]]],
[[[0], [14], [16]]],
]),
rctensor1(&[2, 2]),
rctensor2(&[[0, 0], [2, 0]]),
])
.unwrap(),
tvec![rctensor4(&[
[[[1], [2], [3], [4]], [[5], [6], [7], [8]]],
[[[9], [10], [11], [12]], [[13], [14], [15], [16]]],
])
.into(),]
)
}
}
| {
let mut data = input.into_tensor().into_array::<T>()?;
for (ix, pad) in paddings.view().outer_iter().enumerate() {
if pad[0] != 0 {
let mut pad_shape = data.shape().to_vec();
pad_shape[ix + 1] = pad[0] as usize;
let tmp = ::ndarray::stack(
::ndarray::Axis(ix + 1),
&[::ndarray::ArrayD::zeros(pad_shape).view(), data.view()],
)?;
data = tmp;
}
if pad[1] != 0 {
let mut pad_shape = data.shape().to_vec();
pad_shape[ix + 1] = pad[1] as usize;
let tmp = ::ndarray::stack(
::ndarray::Axis(ix + 1),
&[data.view(), ::ndarray::ArrayD::zeros(pad_shape).view()],
)?;
data = tmp;
}
}
let mut reshaped = vec![data.shape()[0]];
let block_size = block_shape.iter().map(|a| *a as usize).product::<usize>();
let mut final_shape = vec![block_size * data.shape()[0]];
for (m, &block_shape_dim) in block_shape.iter().enumerate() {
reshaped.push(data.shape()[m + 1] / block_shape_dim as usize);
reshaped.push(block_shape_dim as usize);
final_shape.push(data.shape()[m + 1] / block_shape_dim as usize);
}
reshaped.extend(&data.shape()[block_shape.len() + 1..]);
final_shape.extend(&data.shape()[block_shape.len() + 1..]);
let data = data.into_shape(reshaped)?;
let mut permuted_axes: Vec<_> = (0..block_shape.len()).map(|x| 2 * x + 2).collect();
permuted_axes.push(0);
permuted_axes.extend((0..block_shape.len()).map(|x| 2 * x + 1));
permuted_axes.extend((block_shape.len() * 2 + 1)..data.ndim());
let data = data.permuted_axes(permuted_axes);
let data: Vec<T> = data.into_iter().map(|x| *x).collect();
let data = ::ndarray::ArrayD::from_shape_vec(final_shape, data)?;
Ok(data.into_arc_tensor())
} |
PhotonJet_Pt_10_14TeV_TuneCUETP8M1_cfi.py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaHepMCVerbosity = cms.untracked.bool(False),
maxEventsToPrint = cms.untracked.int32(0),
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(1.0),
comEnergy = cms.double(14000.0),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'PromptPhoton:all = on', | ),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters',
)
)
) | 'PhaseSpace:pTHatMin = 10.', |
metrics.go | /*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package metrics
import (
"sync"
"time"
"github.com/golang/glog"
"github.com/prometheus/client_golang/prometheus"
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
)
const (
KubeletSubsystem = "kubelet"
PodWorkerLatencyKey = "pod_worker_latency_microseconds"
PodStartLatencyKey = "pod_start_latency_microseconds"
CgroupManagerOperationsKey = "cgroup_manager_latency_microseconds"
PodWorkerStartLatencyKey = "pod_worker_start_latency_microseconds"
PLEGRelistLatencyKey = "pleg_relist_latency_microseconds"
PLEGRelistIntervalKey = "pleg_relist_interval_microseconds"
EvictionStatsAgeKey = "eviction_stats_age_microseconds"
VolumeStatsCapacityBytesKey = "volume_stats_capacity_bytes"
VolumeStatsAvailableBytesKey = "volume_stats_available_bytes"
VolumeStatsUsedBytesKey = "volume_stats_used_bytes"
VolumeStatsInodesKey = "volume_stats_inodes"
VolumeStatsInodesFreeKey = "volume_stats_inodes_free"
VolumeStatsInodesUsedKey = "volume_stats_inodes_used"
// Metrics keys of remote runtime operations
RuntimeOperationsKey = "runtime_operations"
RuntimeOperationsLatencyKey = "runtime_operations_latency_microseconds"
RuntimeOperationsErrorsKey = "runtime_operations_errors"
)
var (
ContainersPerPodCount = prometheus.NewSummary(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: "containers_per_pod_count",
Help: "The number of containers per pod.",
},
)
PodWorkerLatency = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: PodWorkerLatencyKey,
Help: "Latency in microseconds to sync a single pod. Broken down by operation type: create, update, or sync",
},
[]string{"operation_type"},
)
PodStartLatency = prometheus.NewSummary(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: PodStartLatencyKey,
Help: "Latency in microseconds for a single pod to go from pending to running. Broken down by podname.",
},
)
CgroupManagerLatency = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: CgroupManagerOperationsKey,
Help: "Latency in microseconds for cgroup manager operations. Broken down by method.",
},
[]string{"operation_type"},
)
PodWorkerStartLatency = prometheus.NewSummary(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: PodWorkerStartLatencyKey,
Help: "Latency in microseconds from seeing a pod to starting a worker.",
},
)
PLEGRelistLatency = prometheus.NewSummary(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: PLEGRelistLatencyKey,
Help: "Latency in microseconds for relisting pods in PLEG.",
},
)
PLEGRelistInterval = prometheus.NewSummary(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: PLEGRelistIntervalKey,
Help: "Interval in microseconds between relisting in PLEG.",
},
)
// Metrics of remote runtime operations.
RuntimeOperations = prometheus.NewCounterVec(
prometheus.CounterOpts{
Subsystem: KubeletSubsystem,
Name: RuntimeOperationsKey,
Help: "Cumulative number of runtime operations by operation type.",
},
[]string{"operation_type"},
)
RuntimeOperationsLatency = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: RuntimeOperationsLatencyKey,
Help: "Latency in microseconds of runtime operations. Broken down by operation type.",
},
[]string{"operation_type"},
)
RuntimeOperationsErrors = prometheus.NewCounterVec(
prometheus.CounterOpts{
Subsystem: KubeletSubsystem,
Name: RuntimeOperationsErrorsKey,
Help: "Cumulative number of runtime operation errors by operation type.",
},
[]string{"operation_type"},
)
EvictionStatsAge = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Subsystem: KubeletSubsystem,
Name: EvictionStatsAgeKey,
Help: "Time between when stats are collected, and when pod is evicted based on those stats by eviction signal",
},
[]string{"eviction_signal"},
)
VolumeStatsCapacityBytes = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Subsystem: KubeletSubsystem,
Name: VolumeStatsCapacityBytesKey,
Help: "Capacity in bytes of the volume",
},
[]string{"namespace", "persistentvolumeclaim"},
)
VolumeStatsAvailableBytes = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Subsystem: KubeletSubsystem,
Name: VolumeStatsAvailableBytesKey,
Help: "Number of available bytes in the volume",
},
[]string{"namespace", "persistentvolumeclaim"},
)
VolumeStatsUsedBytes = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Subsystem: KubeletSubsystem,
Name: VolumeStatsUsedBytesKey,
Help: "Number of used bytes in the volume",
},
[]string{"namespace", "persistentvolumeclaim"},
)
VolumeStatsInodes = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Subsystem: KubeletSubsystem,
Name: VolumeStatsInodesKey,
Help: "Maximum number of inodes in the volume",
},
[]string{"namespace", "persistentvolumeclaim"},
)
VolumeStatsInodesFree = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Subsystem: KubeletSubsystem,
Name: VolumeStatsInodesFreeKey,
Help: "Number of free inodes in the volume",
},
[]string{"namespace", "persistentvolumeclaim"},
)
VolumeStatsInodesUsed = prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Subsystem: KubeletSubsystem,
Name: VolumeStatsInodesUsedKey,
Help: "Number of used inodes in the volume",
},
[]string{"namespace", "persistentvolumeclaim"},
)
)
var registerMetrics sync.Once
// Register all metrics.
func | (containerCache kubecontainer.RuntimeCache) {
// Register the metrics.
registerMetrics.Do(func() {
prometheus.MustRegister(PodWorkerLatency)
prometheus.MustRegister(PodStartLatency)
prometheus.MustRegister(CgroupManagerLatency)
prometheus.MustRegister(PodWorkerStartLatency)
prometheus.MustRegister(ContainersPerPodCount)
prometheus.MustRegister(newPodAndContainerCollector(containerCache))
prometheus.MustRegister(PLEGRelistLatency)
prometheus.MustRegister(PLEGRelistInterval)
prometheus.MustRegister(RuntimeOperations)
prometheus.MustRegister(RuntimeOperationsLatency)
prometheus.MustRegister(RuntimeOperationsErrors)
prometheus.MustRegister(EvictionStatsAge)
prometheus.MustRegister(VolumeStatsCapacityBytes)
prometheus.MustRegister(VolumeStatsAvailableBytes)
prometheus.MustRegister(VolumeStatsUsedBytes)
prometheus.MustRegister(VolumeStatsInodes)
prometheus.MustRegister(VolumeStatsInodesFree)
prometheus.MustRegister(VolumeStatsInodesUsed)
})
}
// Gets the time since the specified start in microseconds.
func SinceInMicroseconds(start time.Time) float64 {
return float64(time.Since(start).Nanoseconds() / time.Microsecond.Nanoseconds())
}
func newPodAndContainerCollector(containerCache kubecontainer.RuntimeCache) *podAndContainerCollector {
return &podAndContainerCollector{
containerCache: containerCache,
}
}
// Custom collector for current pod and container counts.
type podAndContainerCollector struct {
// Cache for accessing information about running containers.
containerCache kubecontainer.RuntimeCache
}
// TODO(vmarmol): Split by source?
var (
runningPodCountDesc = prometheus.NewDesc(
prometheus.BuildFQName("", KubeletSubsystem, "running_pod_count"),
"Number of pods currently running",
nil, nil)
runningContainerCountDesc = prometheus.NewDesc(
prometheus.BuildFQName("", KubeletSubsystem, "running_container_count"),
"Number of containers currently running",
nil, nil)
)
func (pc *podAndContainerCollector) Describe(ch chan<- *prometheus.Desc) {
ch <- runningPodCountDesc
ch <- runningContainerCountDesc
}
func (pc *podAndContainerCollector) Collect(ch chan<- prometheus.Metric) {
runningPods, err := pc.containerCache.GetPods()
if err != nil {
glog.Warningf("Failed to get running container information while collecting metrics: %v", err)
return
}
runningContainers := 0
for _, p := range runningPods {
runningContainers += len(p.Containers)
}
ch <- prometheus.MustNewConstMetric(
runningPodCountDesc,
prometheus.GaugeValue,
float64(len(runningPods)))
ch <- prometheus.MustNewConstMetric(
runningContainerCountDesc,
prometheus.GaugeValue,
float64(runningContainers))
}
| Register |
lib.rs | #[allow(dead_code)]
#[allow(unknown_lints)]
#[allow(clippy::all)]
#[allow(renamed_and_removed_lints)]
#[allow(bare_trait_objects)]
#[allow(deprecated)]
mod protos {
include!(concat!(env!("OUT_DIR"), "/protos/mod.rs"));
use raft_proto::eraftpb;
}
pub use protos::*;
#[cfg(feature = "prost-codec")]
pub mod prost_adapt {
use crate::backup::{error, ClusterIdError, Error};
use crate::import_kvpb::{write_engine_request, WriteBatch, WriteEngineRequest, WriteHead};
use crate::import_sstpb::{upload_request, SstMeta, UploadRequest};
use crate::{errorpb, kvrpcpb};
impl UploadRequest {
pub fn set_data(&mut self, v: Vec<u8>) {
self.chunk = Some(upload_request::Chunk::Data(v));
}
pub fn get_data(&self) -> &[u8] {
match &self.chunk {
Some(upload_request::Chunk::Data(v)) => v,
_ => &[],
}
}
pub fn set_meta(&mut self, v: SstMeta) {
self.chunk = Some(upload_request::Chunk::Meta(v));
}
pub fn get_meta(&self) -> &SstMeta {
match &self.chunk {
Some(upload_request::Chunk::Meta(v)) => v,
_ => SstMeta::default_ref(),
}
}
pub fn has_meta(&self) -> bool {
match self.chunk {
Some(upload_request::Chunk::Meta(_)) => true,
_ => false,
}
}
}
impl WriteEngineRequest {
pub fn set_head(&mut self, v: WriteHead) {
self.chunk = Some(write_engine_request::Chunk::Head(v));
}
pub fn get_head(&self) -> &WriteHead {
match &self.chunk {
Some(write_engine_request::Chunk::Head(v)) => v,
_ => WriteHead::default_ref(),
}
}
pub fn | (&self) -> bool {
match self.chunk {
Some(write_engine_request::Chunk::Head(_)) => true,
_ => false,
}
}
pub fn set_batch(&mut self, v: WriteBatch) {
self.chunk = Some(write_engine_request::Chunk::Batch(v));
}
pub fn get_batch(&self) -> &WriteBatch {
match &self.chunk {
Some(write_engine_request::Chunk::Batch(v)) => v,
_ => WriteBatch::default_ref(),
}
}
pub fn has_batch(&self) -> bool {
match self.chunk {
Some(write_engine_request::Chunk::Batch(_)) => true,
_ => false,
}
}
pub fn take_batch(&mut self) -> WriteBatch {
if self.has_batch() {
match self.chunk.take() {
Some(write_engine_request::Chunk::Batch(v)) => v,
_ => unreachable!(),
}
} else {
WriteBatch::default()
}
}
}
impl Error {
pub fn set_region_error(&mut self, v: errorpb::Error) {
self.detail = Some(error::Detail::RegionError(v));
}
pub fn set_kv_error(&mut self, v: kvrpcpb::KeyError) {
self.detail = Some(error::Detail::KvError(v));
}
pub fn set_cluster_id_error(&mut self, v: ClusterIdError) {
self.detail = Some(error::Detail::ClusterIdError(v));
}
pub fn get_region_error(&self) -> &errorpb::Error {
match &self.detail {
Some(error::Detail::RegionError(v)) => v,
_ => errorpb::Error::default_ref(),
}
}
pub fn get_kv_error(&self) -> &kvrpcpb::KeyError {
match &self.detail {
Some(error::Detail::KvError(v)) => v,
_ => kvrpcpb::KeyError::default_ref(),
}
}
pub fn get_cluster_id_error(&self) -> &ClusterIdError {
match &self.detail {
Some(error::Detail::ClusterIdError(v)) => v,
_ => ClusterIdError::default_ref(),
}
}
pub fn has_region_error(&self) -> bool {
match self.detail {
Some(error::Detail::RegionError(_)) => true,
_ => false,
}
}
pub fn has_kv_error(&self) -> bool {
match self.detail {
Some(error::Detail::KvError(_)) => true,
_ => false,
}
}
pub fn has_cluster_id_error(&self) -> bool {
match self.detail {
Some(error::Detail::ClusterIdError(_)) => true,
_ => false,
}
}
pub fn mut_region_error(&mut self) -> &mut errorpb::Error {
if let Some(error::Detail::RegionError(_)) = self.detail {
} else {
self.detail = Some(error::Detail::RegionError(errorpb::Error::default()));
}
match self.detail {
Some(error::Detail::RegionError(ref mut v)) => v,
_ => unreachable!(),
}
}
pub fn mut_kv_error(&mut self) -> &mut kvrpcpb::KeyError {
if let Some(error::Detail::KvError(_)) = self.detail {
} else {
self.detail = Some(error::Detail::KvError(kvrpcpb::KeyError::default()));
}
match self.detail {
Some(error::Detail::KvError(ref mut v)) => v,
_ => unreachable!(),
}
}
pub fn mut_cluster_id_error(&mut self) -> &mut ClusterIdError {
if let Some(error::Detail::ClusterIdError(_)) = self.detail {
} else {
self.detail = Some(error::Detail::ClusterIdError(ClusterIdError::default()));
}
match self.detail {
Some(error::Detail::ClusterIdError(ref mut v)) => v,
_ => unreachable!(),
}
}
}
}
pub mod cdc_adapt {
#[cfg(not(feature = "prost-codec"))]
pub mod pb {
impl ::std::fmt::Debug for crate::cdcpb::Event_oneof_event {
#[allow(unused_variables)]
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
let mut buf = String::new();
match self {
crate::cdcpb::Event_oneof_event::Entries(v) => ::protobuf::PbPrint::fmt(v, "Entries", &mut buf),
crate::cdcpb::Event_oneof_event::Admin(v) => ::protobuf::PbPrint::fmt(v, "Admin", &mut buf),
crate::cdcpb::Event_oneof_event::Error(v) => ::protobuf::PbPrint::fmt(v, "Error", &mut buf),
crate::cdcpb::Event_oneof_event::ResolvedTs(v) => ::protobuf::PbPrint::fmt(v, "ResolvedTs", &mut buf),
crate::cdcpb::Event_oneof_event::LongTxn(v) => ::protobuf::PbPrint::fmt(v, "Long", &mut buf),
}
write!(f, "{}", buf)
}
}
#[allow(dead_code)]
fn assert_fmt_debug() {
fn require_impl_debug<T: ::std::fmt::Debug>(_: T) {}
require_impl_debug(crate::cdcpb::Event_oneof_event::Entries(::std::default::Default::default()));
require_impl_debug(crate::cdcpb::ChangeDataEvent::default());
}
}
#[cfg(feature = "prost-codec")]
pub mod prost {
#[allow(dead_code)]
fn assert_fmt_debug() {
fn require_impl_debug<T: ::std::fmt::Debug>(_: T) {}
require_impl_debug(crate::cdcpb::event::Event::Entries(::std::default::Default::default()));
require_impl_debug(crate::cdcpb::ChangeDataEvent::default());
}
}
}
| has_head |
compat.py | # -*- coding: utf-8 -*-
import warnings
import aesara_theano_fallback
from aesara_theano_fallback import aesara as theano
import aesara_theano_fallback.tensor as tt
from aesara_theano_fallback import sparse as ts
from aesara_theano_fallback import change_flags, ifelse, USE_AESARA
from aesara_theano_fallback.tensor import slinalg
from aesara_theano_fallback.graph import basic, op, params_type, fg
from inspect import getmro
if USE_AESARA:
from aesara.scan.utils import until as scan_until
else:
try:
from theano.scan.utils import until as scan_until
except ModuleNotFoundError:
from theano.scan_module.scan_utils import until as scan_until
__all__ = [
"theano",
"tt",
"ts",
"slinalg",
"ifelse",
"Apply",
"COp",
"Op",
"Params",
"ParamsType",
"Node",
"change_flags",
"floatX",
"evaluator",
"scan_until",
"USE_AESARA",
]
# Suppress third-party deprecation warnings
warnings.filterwarnings("ignore", category=DeprecationWarning, module="pymc3")
warnings.filterwarnings("ignore", category=DeprecationWarning, module="theano")
warnings.filterwarnings("ignore", category=DeprecationWarning, module="aesara")
# Set double precision
floatX = "float64"
# Compatibility imports
Node = basic.Node
Apply = basic.Apply
Op = op.Op
COp = op.ExternalCOp
Params = params_type.Params
ParamsType = params_type.ParamsType
MissingInputError = fg.MissingInputError
theano.config.floatX = floatX
# This helps prevent defaulting to float32
theano.config.cast_policy = "numpy+floatX"
def is_tensor(*objs):
"""Return ``True`` if any of ``objs`` is a ``Theano`` object."""
for obj in objs:
for c in getmro(type(obj)):
if c is Node:
return True
return False
def evaluator(**kwargs):
"""
Return a function to evaluate theano tensors.
Works inside a `pymc3` model if a `point` is provided.
Lazily imports `pymc3` to minimize overhead.
"""
# Store the kwargs
kwargs_point = kwargs.get("point", None)
kwargs_model = kwargs.get("model", None)
if kwargs_point is not None:
# User provided a point
import pymc3 as pm
import pymc3_ext as pmx
point = kwargs_point
model = kwargs_model
if model is None:
model = pm.Model.get_context()
def get_val(x):
if is_tensor(x):
return pmx.eval_in_model(x, model=model, point=point)
else:
return x
| def get_val(x):
if is_tensor(x):
try:
# Try to directly evaluate it
return x.eval()
except MissingInputError as e:
# That didn't work. Perhaps we are in a pymc3 model
# context, but the user didn't provide a point?
import pymc3 as pm
import pymc3_ext as pmx
try:
model = kwargs_model
if model is None:
model = pm.Model.get_context()
except TypeError:
raise ValueError(
"Missing input for variable {}, and no pymc3 model found.".format(
x
)
)
# Warn the user that we're using the test point
warnings.warn(
"Detected pymc3 model context, but no point provided. "
"Evaluating at test_point."
)
return pmx.eval_in_model(
x, model=model, point=model.test_point
)
else:
return x
return get_val | else:
# No point provided
|
chinaformat.ts | export let chinaformat ={
"中国": [
"116.3683244",
"39.915085"
],
"北京": [
"116.405285",
"39.904989"
],
"北京市": [
"116.405285",
"39.904989"
],
"东城区": [
"116.41005",
"39.93157"
],
"西城区": [
"116.36003",
"39.9305"
],
"朝阳区": [
"125.2883",
"43.83339"
],
"丰台区": [
"116.28625",
"39.8585"
],
"石景山区": [
"116.2229",
"39.90564"
],
"海淀区": [
"116.29812",
"39.95931"
],
"门头沟区": [
"116.10137",
"39.94043"
],
"房山区": [
"116.14257",
"39.74786"
],
"通州区": [
"121.07293",
"32.0676"
],
"顺义区": [
"116.65417",
"40.1302"
],
"昌平区": [
"116.2312",
"40.22072"
],
"大兴区": [
"116.34149",
"39.72668"
],
"怀柔区": [
"116.63168",
"40.31602"
],
"平谷区": [
"117.12133",
"40.14056"
],
"密云县": [
"116.84295",
"40.37618"
],
"延庆县": [
"115.97494",
"40.45672"
],
"天津": [
"117.190182",
"39.125596"
],
"天津市": [
"117.190182",
"39.125596"
],
"和平区": [
"120.88349",
"24.17477"
],
"河东区": [
"118.41055",
"35.08803"
],
"河西区": [
"117.22327",
"39.10959"
],
"南开区": [
"117.15074",
"39.13821"
],
"河北区": [
"117.19697",
"39.14816"
],
"红桥区": [
"117.15145",
"39.16715"
],
"东丽区": [
"117.31436",
"39.0863"
],
"西青区": [
"117.00927",
"39.14123"
],
"津南区": [
"117.38537",
"38.99139"
],
"北辰区": [
"117.13217",
"39.22131"
],
"武清区": [
"117.04443",
"39.38415"
],
"宝坻区": [
"117.3103",
"39.71761"
],
"滨海新区": [
"117.70162",
"39.02668"
],
"宁河县": [
"117.8255",
"39.33048"
],
"静海县": [
"116.97436",
"38.94582"
],
"蓟县": [
"117.40799",
"40.04567"
],
"河北省": [
"114.502461",
"38.045474"
],
"石家庄市": [
"114.502461",
"38.045474"
],
"长安区": [
"108.94586",
"34.15559"
],
"桥西区": [
"114.86962",
"40.81945"
],
"新华区": [
"113.29402",
"33.7373"
],
"井陉矿区": [
"114.06518",
"38.06705"
],
"裕华区": [
"114.53115",
"38.00604"
],
"藁城区": [
"114.84671",
"38.02162"
],
"鹿泉区": [
"114.31347",
"38.08782"
],
"栾城区": [
"114.64834",
"37.90022"
],
"井陉县": [
"114.14257",
"38.03688"
],
"正定县": [
"114.57296",
"38.14445"
],
"行唐县": [
"114.55316",
"38.43654"
],
"灵寿县": [
"114.38259",
"38.30845"
],
"高邑县": [
"114.61142",
"37.61556"
],
"深泽县": [
"115.20358",
"38.18353"
],
"赞皇县": [
"114.38775",
"37.66135"
],
"无极县": [
"114.97509",
"38.17653"
],
"平山县": [
"114.186",
"38.25994"
],
"元氏县": [
"114.52539",
"37.76668"
],
"赵县": [
"114.77612",
"37.75628"
],
"辛集市": [
"115.20626",
"37.94079"
],
"晋州市": [
"115.04348",
"38.03135"
],
"新乐市": [
"114.68985",
"38.34417"
],
"唐山市": [
"118.175393",
"39.635113"
],
"路南区": [
"118.15431",
"39.62505"
],
"路北区": [
"118.20079",
"39.62436"
],
"古冶区": [
"118.45803",
"39.71993"
],
"开平区": [
"118.26171",
"39.67128"
],
"丰南区": [
"118.11282",
"39.56483"
],
"丰润区": [
"118.12976",
"39.8244"
],
"曹妃甸区": [
"118.460379",
"39.273070"
],
"滦县": [
"118.70346",
"39.74056"
],
"滦南县": [
"118.6741",
"39.5039"
],
"乐亭县": [
"118.9125",
"39.42561"
],
"迁西县": [
"118.31616",
"40.14587"
],
"玉田县": [
"117.7388",
"39.90049"
],
"遵化市": [
"117.96444",
"40.18741"
],
"迁安市": [
"118.70068",
"39.99833"
],
"秦皇岛市": [
"119.586579",
"39.942531"
],
"海港区": [
"119.61046",
"39.9345"
],
"山海关区": [
"119.77563",
"39.97869"
],
"北戴河区": [
"119.48388",
"39.83408"
],
"青龙满族自治县": [
"118.95242",
"40.40743"
],
"昌黎县": [
"119.16595",
"39.70884"
],
"抚宁县": [
"119.24487",
"39.87538"
],
"卢龙县": [
"118.89288",
"39.89176"
],
"邯郸市": [
"114.490686",
"36.612273"
],
"邯山区": [
"114.48375",
"36.60006"
],
"丛台区": [
"114.49343",
"36.61847"
],
"复兴区": [
"114.45928",
"36.61134"
],
"峰峰矿区": [
"114.21148",
"36.41937"
],
"邯郸县": [
"114.53103",
"36.59385"
],
"临漳县": [
"114.6195",
"36.33461"
],
"成安县": [
"114.66995",
"36.44411"
],
"大名县": [
"115.15362",
"36.27994"
],
"涉县": [
"113.69183",
"36.58072"
],
"磁县": [
"114.37387",
"36.37392"
],
"肥乡县": [
"114.79998",
"36.54807"
],
"永年县": [
"114.48925",
"36.78356"
],
"邱县": [
"115.17407",
"36.82082"
],
"鸡泽县": [
"114.8742",
"36.92374"
],
"广平县": [
"114.94653",
"36.48046"
],
"馆陶县": [
"115.29913",
"36.53719"
],
"魏县": [
"114.93518",
"36.36171"
],
"曲周县": [
"114.95196",
"36.77671"
],
"武安市": [
"114.20153",
"36.69281"
],
"邢台市": [
"114.508851",
"37.0682"
],
"桥东区": [
"114.8943",
"40.78844"
],
"邢台县": [
"114.56575",
"37.0456"
],
"临城县": [
"114.50387",
"37.43977"
],
"内丘县": [
"114.51212",
"37.28671"
],
"柏乡县": [
"114.69332",
"37.48242"
],
"隆尧县": [
"114.77615",
"37.35351"
],
"任县": [
"114.6842",
"37.12575"
],
"南和县": [
"114.68371",
"37.00488"
],
"宁晋县": [
"114.92117",
"37.61696"
],
"巨鹿县": [
"115.03524",
"37.21801"
],
"新河县": [
"115.24987",
"37.52718"
],
"广宗县": [
"115.14254",
"37.0746"
],
"平乡县": [
"115.03002",
"37.06317"
],
"威县": [
"115.2637",
"36.9768"
],
"清河县": [
"115.66479",
"37.07122"
],
"临西县": [
"115.50097",
"36.87078"
],
"南宫市": [
"115.39068",
"37.35799"
],
"沙河市": [
"114.4981",
"36.8577"
],
"保定市": [
"115.482331",
"38.867657"
],
"新市区": [
"120.295138",
"23.07897"
],
"北市区": [
"115.49715",
"38.88322"
],
"南市区": [
"115.52859",
"38.85455"
],
"满城县": [
"115.32296",
"38.94972"
],
"清苑县": [
"115.49267",
"38.76709"
],
"涞水县": [
"115.71517",
"39.39404"
],
"阜平县": [
"114.19683",
"38.84763"
],
"徐水县": [
"115.65829",
"39.02099"
],
"定兴县": [
"115.80786",
"39.26312"
],
"唐县": [
"114.98516",
"38.74513"
],
"高阳县": [
"115.7788",
"38.70003"
],
"容城县": [
"115.87158",
"39.0535"
],
"涞源县": [
"114.69128",
"39.35388"
],
"望都县": [
"115.1567",
"38.70996"
],
"安新县": [
"115.93557",
"38.93532"
],
"易县": [
"115.4981",
"39.34885"
],
"曲阳县": [
"114.70123",
"38.62154"
],
"蠡县": [
"115.57717",
"38.48974"
],
"顺平县": [
"115.1347",
"38.83854"
],
"博野县": [
"115.47033",
"38.4564"
],
"雄县": [
"116.10873",
"38.99442"
],
"涿州市": [
"115.98062",
"39.48622"
],
"定州市": [
"114.9902",
"38.51623"
],
"安国市": [
"115.32321",
"38.41391"
],
"高碑店市": [
"115.87368",
"39.32655"
],
"张家口市": [
"114.884091",
"40.811901"
],
"宣化区": [
"115.06543",
"40.60957"
],
"下花园区": [
"115.28744",
"40.50236"
],
"宣化县": [
"115.15497",
"40.56618"
],
"张北县": [
"114.71432",
"41.15977"
],
"康保县": [
"114.60031",
"41.85225"
],
"沽源县": [
"115.68859",
"41.66959"
],
"尚义县": [
"113.97134",
"41.07782"
],
"蔚县": [
"114.58892",
"39.84067"
],
"阳原县": [
"114.15051",
"40.10361"
],
"怀安县": [
"114.38559",
"40.67425"
],
"万全县": [
"114.7405",
"40.76694"
],
"怀来县": [
"115.51773",
"40.41536"
],
"涿鹿县": [
"115.22403",
"40.37636"
],
"赤城县": [
"115.83187",
"40.91438"
],
"崇礼县": [
"115.27993",
"40.97519"
],
"承德市": [
"117.939152",
"40.976204"
],
"双桥区": [
"117.9432",
"40.97466"
],
"双滦区": [
"117.74487",
"40.95375"
],
"鹰手营子矿区": [
"117.65985",
"40.54744"
],
"承德县": [
"118.17639",
"40.76985"
],
"兴隆县": [
"117.50073",
"40.41709"
],
"平泉县": [
"118.70196",
"41.01839"
],
"滦平县": [
"117.33276",
"40.94148"
],
"隆化县": [
"117.7297",
"41.31412"
],
"丰宁满族自治县": [
"116.6492",
"41.20481"
],
"宽城满族自治县": [
"118.49176",
"40.60829"
],
"围场满族蒙古族自治县": [
"117.7601",
"41.94368"
],
"沧州市": [
"116.857461",
"38.310582"
],
"运河区": [
"116.85706",
"38.31352"
],
"沧县": [
"116.87817",
"38.29361"
],
"青县": [
"116.80316",
"38.58345"
],
"东光县": [
"116.53668",
"37.8857"
],
"海兴县": [
"117.49758",
"38.13958"
],
"盐山县": [
"117.23092",
"38.05647"
],
"肃宁县": [
"115.82971",
"38.42272"
],
"南皮县": [
"116.70224",
"38.04109"
],
"吴桥县": [
"116.3847",
"37.62546"
],
"献县": [
"116.12695",
"38.19228"
],
"孟村回族自治县": [
"117.10412",
"38.05338"
],
"泊头市": [
"116.57824",
"38.08359"
],
"任丘市": [
"116.1033",
"38.71124"
],
"黄骅市": [
"117.33883",
"38.3706"
],
"河间市": [
"116.0993",
"38.44549"
],
"廊坊市": [
"116.713873",
"39.529244"
],
"安次区": [
"116.70308",
"39.52057"
],
"广阳区": [
"116.71069",
"39.52278"
],
"固安县": [
"116.29916",
"39.43833"
],
"永清县": [
"116.50091",
"39.32069"
],
"香河县": [
"117.00634",
"39.76133"
],
"大城县": [
"116.65353",
"38.70534"
],
"文安县": [
"116.45846",
"38.87325"
],
"大厂回族自治县": [
"116.98916",
"39.88649"
],
"霸州市": [
"116.39154",
"39.12569"
],
"三河市": [
"117.07229",
"39.98358"
],
"衡水市": [
"115.665993",
"37.735097"
],
"桃城区": [
"115.67529",
"37.73499"
],
"枣强县": [
"115.72576",
"37.51027"
],
"武邑县": [
"115.88748",
"37.80181"
],
"武强县": [
"115.98226",
"38.04138"
],
"饶阳县": [
"115.72558",
"38.23529"
],
"安平县": [
"115.51876",
"38.23388"
],
"故城县": [
"115.97076",
"37.34773"
],
"景县": [
"116.26904",
"37.6926"
],
"阜城县": [
"116.14431",
"37.86881"
],
"冀州市": [
"115.57934",
"37.55082"
],
"深州市": [
"115.55993",
"38.00109"
],
"山西省": [
"112.549248",
"37.857014"
],
"太原市": [
"112.549248",
"37.857014"
],
"小店区": [
"112.56878",
"37.73565"
],
"迎泽区": [
"112.56338",
"37.86326"
],
"杏花岭区": [
"112.56237",
"37.88429"
],
"尖草坪区": [
"112.48709",
"37.94193"
],
"万柏林区": [
"112.51553",
"37.85923"
],
"晋源区": [
"112.47985",
"37.72479"
],
"清徐县": [
"112.35888",
"37.60758"
],
"阳曲县": [
"112.67861",
"38.05989"
],
"娄烦县": [
"111.79473",
"38.06689"
],
"古交市": [
"112.16918",
"37.90983"
],
"大同市": [
"113.295259",
"40.09031"
],
"城区": [
"115.36503",
"22.7789"
],
"矿区": [
"113.55677",
"37.86895"
],
"南郊区": [
"113.14947",
"40.00539"
],
"新荣区": [
"113.13504",
"40.25618"
],
"阳高县": [
"113.75012",
"40.36256"
],
"天镇县": [
"114.0931",
"40.42299"
],
"广灵县": [
"114.28204",
"39.76082"
],
"灵丘县": [
"114.23672",
"39.44043"
],
"浑源县": [
"113.69552",
"39.69962"
],
"左云县": [
"112.70266",
"40.01336"
],
"大同县": [
"113.61212",
"40.04012"
],
"阳泉市": [
"113.583285",
"37.861188"
],
"郊区": [
"117.80868",
"30.91976"
],
"平定县": [
"113.65789",
"37.78601"
],
"盂县": [
"113.41235",
"38.08579"
],
"长治市": [
"113.113556",
"36.191112"
],
"长治县": [
"113.04791",
"36.04722"
],
"襄垣县": [
"113.05157",
"36.53527"
],
"屯留县": [
"112.89196",
"36.31579"
],
"平顺县": [
"113.43603",
"36.20005"
],
"黎城县": [
"113.38766",
"36.50301"
],
"壶关县": [
"113.207",
"36.11301"
],
"长子县": [
"112.87731",
"36.12125"
],
"武乡县": [
"112.86343",
"36.83687"
],
"沁县": [
"112.69863",
"36.75628"
],
"沁源县": [
"112.33758",
"36.50008"
],
"潞城市": [
"113.22888",
"36.33414"
],
"晋城市": [
"112.851274",
"35.497553"
],
"沁水县": [
"112.1871",
"35.69102"
],
"阳城县": [
"112.41485",
"35.48614"
],
"陵川县": [
"113.2806",
"35.77532"
],
"泽州县": [
"112.83947",
"35.50789"
],
"高平市": [
"112.92288",
"35.79705"
],
"朔州市": [
"112.433387",
"39.331261"
],
"朔城区": [
"112.43189",
"39.31982"
],
"平鲁区": [
"112.28833",
"39.51155"
],
"山阴县": [
"112.81662",
"39.52697"
],
"应县": [
"113.19052",
"39.55279"
],
"右玉县": [
"112.46902",
"39.99011"
],
"怀仁县": [
"113.10009",
"39.82806"
],
"晋中市": [
"112.736465",
"37.696495"
],
"榆次区": [
"112.70788",
"37.6978"
],
"榆社县": [
"112.97558",
"37.0721"
],
"左权县": [
"113.37918",
"37.08235"
],
"和顺县": [
"113.56988",
"37.32963"
],
"昔阳县": [
"113.70517",
"37.61863"
],
"寿阳县": [
"113.17495",
"37.88899"
],
"太谷县": [
"112.55246",
"37.42161"
],
"祁县": [
"112.33358",
"37.3579"
],
"平遥县": [
"112.17553",
"37.1892"
],
"灵石县": [
"111.7774",
"36.84814"
],
"介休市": [
"111.91824",
"37.02771"
],
"运城市": [
"111.003957",
"35.022778"
],
"盐湖区": [
"110.99827",
"35.0151"
],
"临猗县": [
"110.77432",
"35.14455"
],
"万荣县": [
"110.83657",
"35.41556"
],
"闻喜县": [
"111.22265",
"35.35553"
],
"稷山县": [
"110.97924",
"35.59993"
],
"新绛县": [
"111.22509",
"35.61566"
],
"绛县": [
"111.56668",
"35.49096"
],
"垣曲县": [
"111.67166",
"35.29923"
],
"夏县": [
"111.21966",
"35.14121"
],
"平陆县": [
"111.21704",
"34.83772"
],
"芮城县": [
"110.69455",
"34.69384"
],
"永济市": [
"110.44537",
"34.86556"
],
"河津市": [
"110.7116",
"35.59478"
],
"忻州市": [
"112.733538",
"38.41769"
],
"忻府区": [
"112.74603",
"38.40414"
],
"定襄县": [
"112.95733",
"38.47387"
],
"五台县": [
"113.25256",
"38.72774"
],
"代县": [
"112.95913",
"39.06717"
],
"繁峙县": [
"113.26303",
"39.18886"
],
"宁武县": [
"112.30423",
"39.00211"
],
"静乐县": [
"111.94158",
"38.3602"
],
"神池县": [
"112.20541",
"39.09"
],
"五寨县": [
"111.8489",
"38.90757"
],
"岢岚县": [
"111.57388",
"38.70452"
],
"河曲县": [
"111.13821",
"39.38439"
],
"保德县": [
"111.08656",
"39.02248"
],
"偏关县": [
"111.50863",
"39.43609"
],
"原平市": [
"112.70584",
"38.73181"
],
"临汾市": [
"111.517973",
"36.08415"
],
"尧都区": [
"111.5787",
"36.08298"
],
"曲沃县": [
"111.47525",
"35.64119"
],
"翼城县": [
"111.7181",
"35.73881"
],
"襄汾县": [
"111.44204",
"35.87711"
],
"洪洞县": [
"111.67501",
"36.25425"
],
"古县": [
"111.92041",
"36.26688"
],
"安泽县": [
"112.24981",
"36.14803"
],
"浮山县": [
"111.84744",
"35.96854"
],
"吉县": [
"110.68148",
"36.09873"
],
"乡宁县": [
"110.84652",
"35.97072"
],
"大宁县": [
"110.75216",
"36.46624"
],
"隰县": [
"110.93881",
"36.69258"
],
"永和县": [
"110.63168",
"36.7584"
],
"蒲县": [
"111.09674",
"36.41243"
],
"汾西县": [
"111.56811",
"36.65063"
],
"侯马市": [
"111.37207",
"35.61903"
],
"霍州市": [
"111.755",
"36.5638"
],
"吕梁市": [
"111.134335",
"37.524366"
],
"离石区": [
"111.15059",
"37.5177"
],
"文水县": [
"112.02829",
"37.43841"
],
"交城县": [
"112.1585",
"37.5512"
],
"兴县": [
"111.12692",
"38.46321"
],
"临县": [
"110.99282",
"37.95271"
],
"柳林县": [
"110.88922",
"37.42932"
],
"石楼县": [
"110.8352",
"36.99731"
],
"岚县": [
"111.67627",
"38.27874"
],
"方山县": [
"111.24011",
"37.88979"
],
"中阳县": [
"111.1795",
"37.35715"
],
"交口县": [
"111.18103",
"36.98213"
],
"孝义市": [
"111.77362",
"37.14414"
],
"汾阳市": [
"111.7882",
"37.26605"
],
"内蒙古自治区": [
"111.670801",
"40.818311"
],
"呼和浩特市": [
"111.670801",
"40.818311"
],
"新城区": [
"108.9608",
"34.26641"
],
"回民区": [
"111.62402",
"40.80827"
],
"玉泉区": [
"111.67456",
"40.75227"
],
"赛罕区": [
"111.70224",
"40.79207"
],
"土默特左旗": [
"111.14898",
"40.72229"
],
"托克托县": [
"111.19101",
"40.27492"
],
"和林格尔县": [
"111.82205",
"40.37892"
],
"清水河县": [
"111.68316",
"39.9097"
],
"武川县": [
"111.45785",
"41.09289"
],
"包头市": [
"109.840405",
"40.658168"
],
"东河区": [
"110.0462",
"40.58237"
],
"昆都仑区": [
"109.83862",
"40.64175"
],
"青山区": [
"114.39117",
"30.63427"
],
"石拐区": [
"110.27322",
"40.67297"
],
"白云鄂博矿区": [
"109.97367",
"41.76968"
],
"九原区": [
"109.96496",
"40.60554"
],
"土默特右旗": [
"110.52417",
"40.5688"
],
"固阳县": [
"110.06372",
"41.01851"
],
"达尔罕茂明安联合旗": [
"110.43258",
"41.69875"
],
"乌海市": [
"106.825563",
"39.673734"
],
"海勃湾区": [
"106.8222",
"39.66955"
],
"海南区": [
"106.88656",
"39.44128"
],
"乌达区": [
"106.72723",
"39.505"
],
"赤峰市": [
"118.956806",
"42.275317"
],
"红山区": [
"118.95755",
"42.24312"
],
"元宝山区": [
"119.28921",
"42.04005"
],
"松山区": [
"121.577206",
"25.049698"
],
"阿鲁科尔沁旗": [
"120.06527",
"43.87988"
],
"巴林左旗": [
"119.38012",
"43.97031"
],
"巴林右旗": [
"118.66461",
"43.53387"
],
"林西县": [
"118.04733",
"43.61165"
],
"克什克腾旗": [
"117.54562",
"43.26501"
],
"翁牛特旗": [
"119.03042",
"42.93147"
],
"喀喇沁旗": [
"118.70144",
"41.92917"
],
"宁城县": [
"119.34375",
"41.59661"
],
"敖汉旗": [
"119.92163",
"42.29071"
],
"通辽市": [
"122.263119",
"43.617429"
],
"科尔沁区": [
"122.25573",
"43.62257"
],
"科尔沁左翼中旗": [
"123.31912",
"44.13014"
],
"科尔沁左翼后旗": [
"122.35745",
"42.94897"
],
"开鲁县": [
"121.31884",
"43.60003"
],
"库伦旗": [
"121.776",
"42.72998"
],
"奈曼旗": [
"120.66348",
"42.84527"
],
"扎鲁特旗": [
"120.91507",
"44.55592"
],
"霍林郭勒市": [
"119.65429",
"45.53454"
],
"鄂尔多斯市": [
"109.99029",
"39.817179"
],
"东胜区": [
"109.96289",
"39.82236"
],
"达拉特旗": [
"110.03317",
"40.4001"
],
"准格尔旗": [
"111.23645",
"39.86783"
],
"鄂托克前旗": [
"107.48403",
"38.18396"
],
"鄂托克旗": [
"107.98226",
"39.09456"
],
"杭锦旗": [
"108.72934",
"39.84023"
],
"乌审旗": [
"108.8461",
"38.59092"
],
"伊金霍洛旗": [
"109.74908",
"39.57393"
],
"呼伦贝尔市": [
"119.758168",
"49.215333"
],
"海拉尔区": [
"119.7364",
"49.2122"
],
"扎赉诺尔区": [
"117.792702",
"49.486943"
],
"阿荣旗": [
"123.45941",
"48.12581"
],
"莫力达瓦达斡尔族自治旗": [
"124.51498",
"48.48055"
],
"鄂伦春自治旗": [
"123.72604",
"50.59777"
],
"鄂温克族自治旗": [
"119.7565",
"49.14284"
],
"陈巴尔虎旗": [
"119.42434",
"49.32684"
],
"新巴尔虎左旗": [
"118.26989",
"48.21842"
],
"新巴尔虎右旗": [
"116.82366",
"48.66473"
],
"满洲里市": [
"117.47946",
"49.58272"
],
"牙克石市": [
"120.7117",
"49.2856"
],
"扎兰屯市": [
"122.73757",
"48.01363"
],
"额尔古纳市": [
"120.19094",
"50.24249"
],
"根河市": [
"121.52197",
"50.77996"
],
"巴彦淖尔市": [
"107.416959",
"40.757402"
],
"临河区": [
"107.42668",
"40.75827"
],
"五原县": [
"108.26916",
"41.09631"
],
"磴口县": [
"107.00936",
"40.33062"
],
"乌拉特前旗": [
"108.65219",
"40.73649"
],
"乌拉特中旗": [
"108.52587",
"41.56789"
],
"乌拉特后旗": [
"106.98971",
"41.43151"
],
"杭锦后旗": [
"107.15133",
"40.88627"
],
"乌兰察布市": [
"113.114543",
"41.034126"
],
"集宁区": [
"113.11452",
"41.0353"
],
"卓资县": [
"112.57757",
"40.89414"
],
"化德县": [
"114.01071",
"41.90433"
],
"商都县": [
"113.57772",
"41.56213"
],
"兴和县": [
"113.83395",
"40.87186"
],
"凉城县": [
"112.49569",
"40.53346"
],
"察哈尔右翼前旗": [
"113.22131",
"40.7788"
],
"察哈尔右翼中旗": [
"112.63537",
"41.27742"
],
"察哈尔右翼后旗": [
"113.19216",
"41.43554"
],
"四子王旗": [
"111.70654",
"41.53312"
],
"丰镇市": [
"113.10983",
"40.4369"
],
"兴安盟": [
"122.070317",
"46.076268"
],
"乌兰浩特市": [
"122.06378",
"46.06235"
],
"阿尔山市": [
"119.94317",
"47.17716"
],
"科尔沁右翼前旗": [
"121.95269",
"46.0795"
],
"科尔沁右翼中旗": [
"121.46807",
"45.05605"
],
"扎赉特旗": [
"122.91229",
"46.7267"
],
"突泉县": [
"121.59396",
"45.38187"
],
"锡林郭勒盟": [
"116.090996",
"43.944018"
],
"二连浩特市": [
"111.98297",
"43.65303"
],
"锡林浩特市": [
"116.08603",
"43.93341"
],
"阿巴嘎旗": [
"114.96826",
"44.02174"
],
"苏尼特左旗": [
"113.6506",
"43.85687"
],
"苏尼特右旗": [
"112.65741",
"42.7469"
],
"东乌珠穆沁旗": [
"116.97293",
"45.51108"
],
"西乌珠穆沁旗": [
"117.60983",
"44.59623"
],
"太仆寺旗": [
"115.28302",
"41.87727"
],
"镶黄旗": [
"113.84472",
"42.23927"
],
"正镶白旗": [
"115.00067",
"42.30712"
],
"正蓝旗": [
"116.00363",
"42.25229"
],
"多伦县": [
"116.48565",
"42.203"
],
"阿拉善盟": [
"105.706422",
"38.844814"
],
"阿拉善左旗": [
"105.67532",
"38.8293"
],
"阿拉善右旗": [
"101.66705",
"39.21533"
],
"额济纳旗": [
"101.06887",
"41.96755"
],
"辽宁省": [
"123.429096",
"41.796767"
],
"沈阳市": [
"123.429096",
"41.796767"
],
"沈河区": [
"123.45871",
"41.79625"
],
"大东区": [
"123.46997",
"41.80539"
],
"皇姑区": [
"123.42527",
"41.82035"
],
"铁西区": [
"124.37369",
"43.17456"
],
"苏家屯区": [
"123.34405",
"41.66475"
],
"浑南区": [
"123.457707",
"41.719450"
],
"沈北新区": [
"123.52658",
"42.05297"
],
"于洪区": [
"123.30807",
"41.794"
],
"辽中县": [
"122.72659",
"41.51302"
],
"康平县": [
"123.35446",
"42.75081"
],
"法库县": [
"123.41214",
"42.50608"
],
"新民市": [
"122.82867",
"41.99847"
],
"大连市": [
"121.618622",
"38.91459"
],
"中山区": [
"121.739132",
"25.133991"
],
"西岗区": [
"121.61238",
"38.91469"
],
"沙河口区": [
"121.58017",
"38.90536"
],
"甘井子区": [
"121.56567",
"38.95017"
],
"旅顺口区": [
"121.26202",
"38.85125"
],
"金州区": [
"121.71893",
"39.1004"
],
"长海县": [
"122.58859",
"39.27274"
],
"瓦房店市": [
"121.98104",
"39.62843"
],
"普兰店市": [
"121.96316",
"39.39465"
],
"庄河市": [
"122.96725",
"39.68815"
],
"鞍山市": [
"122.995632",
"41.110626"
],
"铁东区": [
"124.40976",
"43.16241"
],
"立山区": [
"123.02948",
"41.15008"
],
"千山区": [
"122.96048",
"41.07507"
],
"台安县": [
"122.43585",
"41.41265"
],
"岫岩满族自治县": [
"123.28875",
"40.27996"
],
"海城市": [
"122.68457",
"40.88142"
],
"抚顺市": [
"123.921109",
"41.875956"
],
"新抚区": [
"123.91264",
"41.86205"
],
"东洲区": [
"124.03759",
"41.8519"
],
"望花区": [
"123.78283",
"41.85532"
],
"顺城区": [
"123.94506",
"41.88321"
],
"抚顺县": [
"124.17755",
"41.71217"
],
"新宾满族自治县": [
"125.04049",
"41.73409"
],
"清原满族自治县": [
"124.92807",
"42.10221"
],
"本溪市": [
"123.770519",
"41.297909"
],
"平山区": [
"123.76892",
"41.2997"
],
"溪湖区": [
"123.76764",
"41.32921"
],
"明山区": [
"123.81746",
"41.30827"
],
"南芬区": [
"123.74523",
"41.1006"
],
"本溪满族自治县": [
"124.12741",
"41.30059"
],
"桓仁满族自治县": [
"125.36062",
"41.26798"
],
"丹东市": [
"124.383044",
"40.124296"
],
"元宝区": [
"124.39575",
"40.13651"
],
"振兴区": [
"124.36035",
"40.10489"
],
"振安区": [
"124.42816",
"40.15826"
],
"宽甸满族自治县": [
"124.78247",
"40.73187"
],
"东港市": [
"124.16287",
"39.86256"
],
"凤城市": [
"124.06671",
"40.45302"
],
"锦州市": [
"121.135742",
"41.119269"
],
"古塔区": [
"121.12832",
"41.11725"
],
"凌河区": [
"121.15089",
"41.11496"
],
"太和区": [
"121.10354",
"41.10929"
],
"黑山县": [
"122.12081",
"41.69417"
],
"义县": [
"121.24035",
"41.53458"
],
"凌海市": [
"121.35705",
"41.1737"
],
"北镇市": [
"121.79858",
"41.59537"
],
"营口市": [
"122.235151",
"40.667432"
],
"站前区": [
"122.25896",
"40.67266"
],
"西市区": [
"122.20641",
"40.6664"
],
"鲅鱼圈区": [
"122.13266",
"40.26865"
],
"老边区": [
"122.37996",
"40.6803"
],
"盖州市": [
"122.35464",
"40.40446"
],
"大石桥市": [
"122.50927",
"40.64567"
],
"阜新市": [
"121.648962",
"42.011796"
],
"海州区": [
"119.13128",
"34.56986"
],
"新邱区": [
"121.79251",
"42.09181"
],
"太平区": [
"120.718523",
"24.126472"
],
"清河门区": [
"121.4161",
"41.78309"
],
"细河区": [
"121.68013",
"42.02533"
],
"阜新蒙古族自治县": [
"121.75787",
"42.0651"
],
"彰武县": [
"122.54022",
"42.38625"
],
"辽阳市": [
"123.18152",
"41.269402"
],
"白塔区": [
"123.1747",
"41.27025"
],
"文圣区": [
"123.18521",
"41.26267"
],
"宏伟区": [
"123.1929",
"41.21852"
],
"弓长岭区": [
"123.41963",
"41.15181"
],
"太子河区": [
"123.18182",
"41.25337"
],
"辽阳县": [
"123.10574",
"41.20542"
],
"灯塔市": [
"123.33926",
"41.42612"
],
"盘锦市": [
"122.06957",
"41.124484"
],
"双台子区": [
"122.06011",
"41.1906"
],
"兴隆台区": [
"122.07529",
"41.12402"
],
"大洼县": [
"122.08239",
"41.00244"
],
"盘山县": [
"121.99777",
"41.23805"
],
"铁岭市": [
"123.844279",
"42.290585"
],
"银州区": [
"123.8573",
"42.29507"
],
"清河区": [
"119.00778",
"33.59949"
],
"铁岭县": [
"123.77325",
"42.22498"
],
"西丰县": [
"124.7304",
"42.73756"
],
"昌图县": [
"124.11206",
"42.78428"
],
"调兵山市": [
"123.56689",
"42.4675"
],
"开原市": [
"124.03945",
"42.54585"
],
"朝阳市": [
"120.451176",
"41.576758"
],
"双塔区": [
"120.45385",
"41.566"
],
"龙城区": [
"120.43719",
"41.59264"
],
"朝阳县": [
"120.17401",
"41.4324"
],
"建平县": [
"119.64392",
"41.40315"
],
"喀喇沁左翼蒙古族自治县": [
"119.74185",
"41.12801"
],
"北票市": [
"120.76977",
"41.80196"
],
"凌源市": [
"119.40148",
"41.24558"
],
"葫芦岛市": [
"120.856394",
"40.755572"
],
"连山区": [
"120.86393",
"40.75554"
],
"龙港区": [
"120.94866",
"40.71919"
],
"南票区": [
"120.74978",
"41.10707"
],
"绥中县": [
"120.34451",
"40.32552"
],
"建昌县": [
"119.8377",
"40.82448"
],
"兴城市": [
"120.72537",
"40.61492"
],
"金普新区": [
"121.789627",
"39.055451"
],
"金州新区": [
"121.784821",
"39.052252"
],
"普湾新区": [
"121.812812",
"39.330093"
],
"保税区": [
"121.94289",
"39.224614"
],
"吉林省": [
"125.3245",
"43.886841"
],
"长春市": [
"125.3245",
"43.886841"
],
"南关区": [
"125.35035",
"43.86401"
],
"宽城区": [
"125.32635",
"43.90182"
],
"二道区": [
"125.37429",
"43.86501"
],
"绿园区": [
"125.25582",
"43.88045"
],
"双阳区": [
"125.65631",
"43.52803"
],
"九台区": [
"125.8395",
"44.15163"
],
"农安县": [
"125.18481",
"44.43265"
],
"榆树市": [
"126.55688",
"44.82523"
],
"德惠市": [
"125.70538",
"44.53719"
],
"吉林市": [
"126.55302",
"43.843577"
],
"昌邑区": [
"126.57424",
"43.88183"
],
"龙潭区": [
"126.56213",
"43.91054"
],
"船营区": [
"126.54096",
"43.83344"
],
"丰满区": [
"126.56237",
"43.82236"
],
"永吉县": [
"126.4963",
"43.67197"
],
"蛟河市": [
"127.34426",
"43.72696"
],
"桦甸市": [
"126.74624",
"42.97206"
],
"舒兰市": [
"126.9653",
"44.40582"
],
"磐石市": [
"126.0625",
"42.94628"
],
"四平市": [
"124.370785",
"43.170344"
],
"梨树县": [
"124.33563",
"43.30717"
],
"伊通满族自治县": [
"125.30596",
"43.34434"
],
"公主岭市": [
"124.82266",
"43.50453"
],
"双辽市": [
"123.50106",
"43.52099"
],
"辽源市": [
"125.145349",
"42.902692"
],
"龙山区": [
"125.13641",
"42.89714"
],
"西安区": [
"129.61616",
"44.57766"
],
"东丰县": [
"125.53244",
"42.6783"
],
"东辽县": [
"124.98596",
"42.92492"
],
"通化市": [
"125.936501",
"41.721177"
],
"东昌区": [
"125.9551",
"41.72849"
],
"二道江区": [
"126.04257",
"41.7741"
],
"通化县": [
"125.75936",
"41.67928"
],
"辉南县": [
"126.04684",
"42.68497"
],
"柳河县": [
"125.74475",
"42.28468"
],
"梅河口市": [
"125.71041",
"42.53828"
],
"集安市": [
"126.18829",
"41.12268"
],
"白山市": [
"126.427839",
"41.942505"
],
"浑江区": [
"126.422342",
"41.945656"
],
"江源区": [
"126.59079",
"42.05664"
],
"抚松县": [
"127.2803",
"42.34198"
],
"靖宇县": [
"126.81308",
"42.38863"
],
"长白朝鲜族自治县": [
"128.20047",
"41.41996"
],
"临江市": [
"126.91751",
"41.81142"
],
"松原市": [
"124.823608",
"45.118243"
],
"宁江区": [
"124.81689",
"45.17175"
],
"前郭尔罗斯蒙古族自治县": [
"124.82351",
"45.11726"
],
"长岭县": [
"123.96725",
"44.27581"
],
"乾安县": [
"124.02737",
"45.01068"
],
"扶余市": [
"126.042758",
"44.986199"
],
"白城市": [
"122.841114",
"45.619026"
],
"洮北区": [
"122.85104",
"45.62167"
],
"镇赉县": [
"123.19924",
"45.84779"
],
"通榆县": [
"123.08761",
"44.81388"
],
"洮南市": [
"122.78772",
"45.33502"
],
"大安市": [
"124.29519",
"45.50846"
],
"延边朝鲜族自治州": [
"129.513228",
"42.904823"
],
"延吉市": [
"129.51357",
"42.90682"
],
"图们市": [
"129.84381",
"42.96801"
],
"敦化市": [
"128.23242",
"43.37304"
],
"珲春市": [
"130.36572",
"42.86242"
],
"龙井市": [
"129.42584",
"42.76804"
],
"和龙市": [
"129.01077",
"42.5464"
],
"汪清县": [
"129.77121",
"43.31278"
],
"安图县": [
"128.90625",
"43.11533"
],
"黑龙江省": [
"126.642464",
"45.756967"
],
"哈尔滨市": [
"126.642464",
"45.756967"
],
"道里区": [
"126.61705",
"45.75586"
],
"南岗区": [
"126.66854",
"45.75996"
],
"道外区": [
"126.64938",
"45.79187"
],
"平房区": [
"126.63729",
"45.59777"
],
"松北区": [
"126.56276",
"45.80831"
],
"香坊区": [
"126.67968",
"45.72383"
],
"呼兰区": [
"126.58792",
"45.88895"
],
"阿城区": [
"126.97525",
"45.54144"
],
"双城区": [
"126.308784",
"45.377942"
],
"依兰县": [
"129.56817",
"46.3247"
],
"方正县": [
"128.82952",
"45.85162"
],
"宾县": [
"127.48675",
"45.75504"
],
"巴彦县": [
"127.40799",
"46.08148"
],
"木兰县": [
"128.0448",
"45.94944"
],
"通河县": [
"128.74603",
"45.99007"
],
"延寿县": [
"128.33419",
"45.4554"
],
"尚志市": [
"127.96191",
"45.21736"
],
"五常市": [
"127.16751",
"44.93184"
],
"齐齐哈尔市": [
"123.953486",
"47.348079"
],
"龙沙区": [
"123.95752",
"47.31776"
],
"建华区": [
"124.0133",
"47.36718"
],
"铁锋区": [
"123.97821",
"47.34075"
],
"昂昂溪区": [
"123.82229",
"47.15513"
],
"富拉尔基区": [
"123.62918",
"47.20884"
],
"碾子山区": [
"122.88183",
"47.51662"
],
"梅里斯达斡尔族区": [
"123.75274",
"47.30946"
],
"龙江县": [
"123.20532",
"47.33868"
],
"依安县": [
"125.30896",
"47.8931"
],
"泰来县": [
"123.42285",
"46.39386"
],
"甘南县": [
"123.50317",
"47.92437"
],
"富裕县": [
"124.47457",
"47.77431"
],
"克山县": [
"125.87396",
"48.03265"
],
"克东县": [
"126.24917",
"48.03828"
],
"拜泉县": [
"126.09167",
"47.60817"
],
"讷河市": [
"124.87713",
"48.48388"
],
"鸡西市": [
"130.975966",
"45.300046"
],
"鸡冠区": [
"130.98139",
"45.30396"
],
"恒山区": [
"130.90493",
"45.21071"
],
"滴道区": [
"130.84841",
"45.35109"
],
"梨树区": [
"130.69848",
"45.09037"
],
"城子河区": [
"131.01132",
"45.33689"
],
"麻山区": [
"130.47811",
"45.21209"
],
"鸡东县": [
"131.12423",
"45.26025"
],
"虎林市": [
"132.93679",
"45.76291"
],
"密山市": [
"131.84625",
"45.5297"
],
"鹤岗市": [
"130.277487",
"47.332085"
],
"向阳区": [
"130.36519",
"46.80778"
],
"工农区": [
"130.27468",
"47.31869"
],
"南山区": [
"113.93029",
"22.53291"
],
"兴安区": [
"130.23965",
"47.2526"
],
"东山区": [
"120.403984",
"23.326092"
],
"兴山区": [
"130.29271",
"47.35776"
],
"萝北县": [
"130.83346",
"47.57959"
],
"绥滨县": [
"131.86029",
"47.2903"
],
"双鸭山市": [
"131.157304",
"46.643442"
],
"尖山区": [
"131.15841",
"46.64635"
],
"岭东区": [
"131.16473",
"46.59043"
],
"四方台区": [
"131.33593",
"46.59499"
],
"宝山区": [
"121.4891",
"31.4045"
],
"集贤县": [
"131.14053",
"46.72678"
],
"友谊县": [
"131.80789",
"46.76739"
],
"宝清县": [
"132.19695",
"46.32716"
],
"饶河县": [
"134.01986",
"46.79899"
],
"大庆市": [
"125.11272",
"46.590734"
],
"萨尔图区": [
"125.08792",
"46.59359"
],
"龙凤区": [
"125.11657",
"46.53273"
],
"让胡路区": [
"124.87075",
"46.6522"
],
"红岗区": [
"124.89248",
"46.40128"
],
"大同区": [
"121.515514",
"25.065986"
],
"肇州县": [
"125.27059",
"45.70414"
],
"肇源县": [
"125.08456",
"45.52032"
],
"林甸县": [
"124.87564",
"47.18601"
],
"杜尔伯特蒙古族自治县": [
"124.44937",
"46.86507"
],
"伊春市": [
"128.899396",
"47.724775"
],
"伊春区": [
"128.90752",
"47.728"
],
"南岔区": [
"129.28362",
"47.13897"
],
"友好区": [
"128.84039",
"47.85371"
],
"西林区": [
"129.31201",
"47.48103"
],
"翠峦区": [
"128.66729",
"47.72503"
],
"新青区": [
"129.53653",
"48.29067"
],
"美溪区": [
"129.13708",
"47.63513"
],
"金山屯区": [
"129.43768",
"47.41349"
],
"五营区": [
"129.24545",
"48.10791"
],
"乌马河区": [
"128.79672",
"47.728"
],
"汤旺河区": [
"129.57226",
"48.45182"
],
"带岭区": [
"129.02352",
"47.02553"
],
"乌伊岭区": [
"129.43981",
"48.59602"
],
"红星区": [
"129.3887",
"48.23944"
],
"上甘岭区": [
"129.02447",
"47.97522"
],
"嘉荫县": [
"130.39825",
"48.8917"
],
"铁力市": [
"128.0317",
"46.98571"
],
"佳木斯市": [
"130.361634",
"46.809606"
],
"前进区": [
"130.37497",
"46.81401"
],
"东风区": [
"130.40366",
"46.82257"
],
"桦南县": [
"130.55361",
"46.23921"
],
"桦川县": [
"130.71893",
"47.02297"
],
"汤原县": [
"129.90966",
"46.72755"
],
"抚远县": [
"134.29595",
"48.36794"
],
"同江市": [
"132.51095",
"47.64211"
],
"富锦市": [
"132.03707",
"47.25132"
],
"七台河市": [
"131.015584",
"45.771266"
],
"新兴区": [
"120.309535",
"22.631147"
],
"桃山区": [
"131.01786",
"45.76782"
],
"茄子河区": [
"131.06807",
"45.78519"
],
"勃利县": [
"130.59179",
"45.755"
],
"牡丹江市": [
"129.618602",
"44.582962"
],
"东安区": [
"129.62665",
"44.58133"
],
"阳明区": [
"129.63547",
"44.59603"
],
"爱民区": [
"129.59077",
"44.59648"
],
"东宁县": [
"131.12793",
"44.0661"
],
"林口县": [
"130.28393",
"45.27809"
],
"绥芬河市": [
"131.15139",
"44.41249"
],
"海林市": [
"129.38156",
"44.59"
],
"宁安市": [
"129.48303",
"44.34016"
],
"穆棱市": [
"130.52465",
"44.919"
],
"黑河市": [
"127.499023",
"50.249585"
],
"爱辉区": [
"127.50074",
"50.25202"
],
"嫩江县": [
"125.22607",
"49.17844"
],
"逊克县": [
"128.47882",
"49.57983"
],
"孙吴县": [
"127.33599",
"49.42539"
],
"北安市": [
"126.48193",
"48.23872"
],
"五大连池市": [
"126.20294",
"48.51507"
],
"绥化市": [
"126.99293",
"46.637393"
],
"北林区": [
"126.98564",
"46.63735"
],
"望奎县": [
"126.48187",
"46.83079"
],
"兰西县": [
"126.28994",
"46.2525"
],
"青冈县": [
"126.11325",
"46.68534"
],
"庆安县": [
"127.50753",
"46.88016"
],
"明水县": [
"125.90594",
"47.17327"
],
"绥棱县": [
"127.11584",
"47.24267"
],
"安达市": [
"125.34375",
"46.4177"
],
"肇东市": [
"125.96243",
"46.05131"
],
"海伦市": [
"126.9682",
"47.46093"
],
"大兴安岭地区": [
"124.711526",
"52.335262"
],
"加格达奇区": [
"124.30954",
"51.98144"
],
"新林区": [
"124.397983",
"51.67341"
],
"松岭区": [
"124.189713",
"51.985453"
],
"呼中区": [
"123.60009",
"52.03346"
],
"呼玛县": [
"126.66174",
"51.73112"
],
"塔河县": [
"124.70999",
"52.33431"
],
"漠河县": [
"122.53759",
"52.97003"
],
"上海": [
"121.472644",
"31.231706"
],
"上海市": [
"121.472644",
"31.231706"
],
"黄浦区": [
"121.49295",
"31.22337"
],
"徐汇区": [
"121.43676",
"31.18831"
],
"长宁区": [
"121.42462",
"31.22036"
],
"静安区": [
"121.4444",
"31.22884"
],
"普陀区": [
"122.30278",
"29.94908"
],
"闸北区": [
"121.44636",
"31.28075"
],
"虹口区": [
"121.48162",
"31.27788"
],
"杨浦区": [
"121.526",
"31.2595"
],
"闵行区": [
"121.38162",
"31.11246"
],
"嘉定区": [
"121.2655",
"31.37473"
],
"浦东新区": [
"121.5447",
"31.22249"
],
"金山区": [
"121.636427",
"25.221883"
],
"松江区": [
"121.22879",
"31.03222"
],
"青浦区": [
"121.12417",
"31.14974"
],
"奉贤区": [
"121.47412",
"30.9179"
],
"崇明县": [
"121.39758",
"31.62278"
],
"江苏省": [
"118.767413",
"32.041544"
],
"南京市": [
"118.767413",
"32.041544"
],
"玄武区": [
"118.79772",
"32.04856"
],
"秦淮区": [
"118.79815",
"32.01112"
],
"建邺区": [
"118.76641",
"32.03096"
],
"鼓楼区": [
"114.35559",
"34.79517"
],
"浦口区": [
"118.62802",
"32.05881"
],
"栖霞区": [
"118.88064",
"32.11352"
],
"雨花台区": [
"118.7799",
"31.99202"
],
"江宁区": [
"118.8399",
"31.95263"
],
"六合区": [
"118.8413",
"32.34222"
],
"溧水区": [
"119.028732",
"31.653061"
],
"高淳区": [
"118.87589",
"31.327132"
],
"无锡市": [
"120.301663",
"31.574729"
],
"崇安区": [
"120.29975",
"31.58002"
],
"南长区": [
"120.30873",
"31.56359"
],
"北塘区": [
"120.29405",
"31.60592"
],
"锡山区": [
"120.35699",
"31.5886"
],
"惠山区": [
"120.29849",
"31.68088"
],
"滨湖区": [
"120.29461",
"31.52162"
],
"江阴市": [
"120.2853",
"31.91996"
],
"宜兴市": [
"119.82357",
"31.33978"
],
"徐州市": [
"117.184811",
"34.261792"
],
"云龙区": [
"117.23053",
"34.24895"
],
"贾汪区": [
"117.45346",
"34.44264"
],
"泉山区": [
"117.19378",
"34.24418"
],
"铜山区": [
"117.183894",
"34.19288"
],
"丰县": [
"116.59957",
"34.69972"
],
"沛县": [
"116.93743",
"34.72163"
],
"睢宁县": [
"117.94104",
"33.91269"
],
"新沂市": [
"118.35452",
"34.36942"
],
"邳州市": [
"117.95858",
"34.33329"
],
"常州市": [
"119.946973",
"31.772752"
],
"天宁区": [
"119.95132",
"31.75211"
],
"钟楼区": [
"119.90178",
"31.80221"
],
"戚墅堰区": [
"120.06106",
"31.71956"
],
"新北区": [
"119.97131",
"31.83046"
],
"武进区": [
"119.94244",
"31.70086"
],
"溧阳市": [
"119.4837",
"31.41538"
],
"金坛市": [
"119.57757",
"31.74043"
],
"苏州市": [
"120.619585",
"31.299379"
],
"虎丘区": [
"120.57345",
"31.2953"
],
"吴中区": [
"120.63211",
"31.26226"
],
"相城区": [
"120.64239",
"31.36889"
],
"姑苏区": [
"120.619585",
"31.299379"
],
"吴江区": [
"120.638317",
"31.159815"
],
"常熟市": [
"120.75225",
"31.65374"
],
"张家港市": [
"120.55538",
"31.87532"
],
"昆山市": [
"120.98074",
"31.38464"
],
"太仓市": [
"121.10891",
"31.4497"
],
"南通市": [
"120.864608",
"32.016212"
],
"崇川区": [
"120.8573",
"32.0098"
],
"港闸区": [
"120.81778",
"32.03163"
],
"海安县": [
"120.45852",
"32.54514"
],
"如东县": [
"121.18942",
"32.31439"
],
"启东市": [
"121.65985",
"31.81083"
],
"如皋市": [
"120.55969",
"32.37597"
],
"海门市": [
"121.16995",
"31.89422"
],
"连云港市": [
"119.178821",
"34.600018"
],
"连云区": [
"119.37304",
"34.75293"
],
"赣榆区": [
"119.128774",
"34.839154"
],
"东海县": [
"118.77145",
"34.54215"
],
"灌云县": [
"119.23925",
"34.28391"
],
"灌南县": [
"119.35632",
"34.09"
],
"淮安市": [
"119.021265",
"33.597506"
],
"淮安区": [
"119.021265",
"33.597506"
],
"淮阴区": [
"119.03485",
"33.63171"
],
"清浦区": [
"119.02648",
"33.55232"
],
"涟水县": [
"119.26083",
"33.78094"
],
"洪泽县": [
"118.87344",
"33.29429"
],
"盱眙县": [
"118.54495",
"33.01086"
],
"金湖县": [
"119.02307",
"33.02219"
],
"盐城市": [
"120.139998",
"33.377631"
],
"亭湖区": [
"120.16583",
"33.37825"
],
"盐都区": [
"120.15441",
"33.3373"
],
"响水县": [
"119.56985",
"34.20513"
],
"滨海县": [
"119.82058",
"33.98972"
],
"阜宁县": [
"119.80175",
"33.78228"
],
"射阳县": [
"120.26043",
"33.77636"
],
"建湖县": [
"119.79852",
"33.47241"
],
"东台市": [
"120.32376",
"32.85078"
],
"大丰市": [
"120.46594",
"33.19893"
],
"扬州市": [
"119.421003",
"32.393159"
],
"广陵区": [
"119.43186",
"32.39472"
],
"邗江区": [
"119.39816",
"32.3765"
],
"江都区": [
"119.567481",
"32.426564"
],
"宝应县": [
"119.31213",
"33.23549"
],
"仪征市": [
"119.18432",
"32.27197"
],
"高邮市": [
"119.45965",
"32.78135"
],
"镇江市": [
"119.452753",
"32.204402"
],
"京口区": [
"119.46947",
"32.19809"
],
"润州区": [
"119.41134",
"32.19523"
],
"丹徒区": [
"119.43383",
"32.13183"
],
"丹阳市": [
"119.57525",
"31.99121"
],
"扬中市": [
"119.79718",
"32.2363"
],
"句容市": [
"119.16482",
"31.95591"
],
"泰州市": [
"119.915176",
"32.484882"
],
"海陵区": [
"119.91942",
"32.49101"
],
"高港区": [
"119.88089",
"32.31833"
],
"姜堰区": [
"120.148208",
"32.508483"
],
"兴化市": [
"119.85238",
"32.90944"
],
"靖江市": [
"120.27291",
"32.01595"
],
"泰兴市": [
"120.05194",
"32.17187"
],
"宿迁市": [
"118.293328",
"33.945154"
],
"宿城区": [
"118.29141",
"33.94219"
],
"宿豫区": [
"118.32922",
"33.94673"
],
"沭阳县": [
"118.76873",
"34.11446"
],
"泗阳县": [
"118.7033",
"33.72096"
],
"泗洪县": [
"118.21716",
"33.45996"
],
"浙江省": [
"120.153576",
"30.287459"
],
"杭州市": [
"120.153576",
"30.287459"
],
"上城区": [
"120.16922",
"30.24255"
],
"下城区": [
"120.18096",
"30.28153"
],
"江干区": [
"120.20517",
"30.2572"
],
"拱墅区": [
"120.14209",
"30.31968"
],
"西湖区": [
"115.87728",
"28.65688"
],
"滨江区": [
"120.21194",
"30.20835"
],
"萧山区": [
"120.26452",
"30.18505"
],
"余杭区": [
"120.29986",
"30.41829"
],
"桐庐县": [
"119.68853",
"29.79779"
],
"淳安县": [
"119.04257",
"29.60988"
],
"建德市": [
"119.28158",
"29.47603"
],
"富阳区": [
"119.96041",
"30.04878"
],
"临安市": [
"119.72473",
"30.23447"
],
"宁波市": [
"121.549792",
"29.868388"
],
"海曙区": [
"121.55106",
"29.85977"
],
"江东区": [
"121.57028",
"29.86701"
],
"江北区": [
"106.57434",
"29.60658"
],
"北仑区": [
"121.84408",
"29.90069"
],
"镇海区": [
"121.71615",
"29.94893"
],
"鄞州区": [
"121.54754",
"29.81614"
],
"象山县": [
"121.86917",
"29.47758"
],
"宁海县": [
"121.43072",
"29.2889"
],
"余姚市": [
"121.15341",
"30.03867"
],
"慈溪市": [
"121.26641",
"30.16959"
],
"奉化市": [
"121.41003",
"29.65537"
],
"温州市": [
"120.672111",
"28.000575"
],
"鹿城区": [
"120.65505",
"28.01489"
],
"龙湾区": [
"120.83053",
"27.91284"
],
"瓯海区": [
"120.63751",
"28.00714"
],
"洞头县": [
"121.15606",
"27.83634"
],
"永嘉县": [
"120.69317",
"28.15456"
],
"平阳县": [
"120.56506",
"27.66245"
],
"苍南县": [
"120.42608",
"27.51739"
],
"文成县": [
"120.09063",
"27.78678"
],
"泰顺县": [
"119.7182",
"27.55694"
],
"瑞安市": [
"120.65466",
"27.78041"
],
"乐清市": [
"120.9617",
"28.12404"
],
"嘉兴市": [
"120.750865",
"30.762653"
],
"南湖区": [
"120.78524",
"30.74865"
],
"秀洲区": [
"120.70867",
"30.76454"
],
"嘉善县": [
"120.92559",
"30.82993"
],
"海盐县": [
"120.9457",
"30.52547"
],
"海宁市": [
"120.6813",
"30.5097"
],
"平湖市": [
"121.02166",
"30.69618"
],
"桐乡市": [
"120.56485",
"30.6302"
],
"湖州市": [
"120.102398",
"30.867198"
],
"吴兴区": [
"120.12548",
"30.85752"
],
"南浔区": [
"120.42038",
"30.86686"
],
"德清县": [
"119.97836",
"30.53369"
],
"长兴县": [
"119.90783",
"31.00606"
],
"安吉县": [
"119.68158",
"30.63798"
],
"绍兴市": [
"120.582112",
"29.997117"
],
"越城区": [
"120.5819",
"29.98895"
],
"柯桥区": [
"120.492736",
"30.08763"
],
"上虞区": [
"120.476075",
"30.078038"
],
"新昌县": [
"120.90435",
"29.49991"
],
"诸暨市": [
"120.23629",
"29.71358"
],
"嵊州市": [
"120.82174",
"29.58854"
],
"金华市": [
"119.649506",
"29.089524"
],
"婺城区": [
"119.57135",
"29.09521"
],
"金东区": [
"119.69302",
"29.0991"
],
"武义县": [
"119.8164",
"28.89331"
],
"浦江县": [
"119.89181",
"29.45353"
],
"磐安县": [
"120.45022",
"29.05733"
],
"兰溪市": [
"119.45965",
"29.20841"
],
"义乌市": [
"120.0744",
"29.30558"
],
"东阳市": [
"120.24185",
"29.28942"
],
"永康市": [
"120.04727",
"28.88844"
],
"衢州市": [
"118.87263",
"28.941708"
],
"柯城区": [
"118.87109",
"28.96858"
],
"衢江区": [
"118.9598",
"28.97977"
],
"常山县": [
"118.51025",
"28.90191"
],
"开化县": [
"118.41616",
"29.13785"
],
"龙游县": [
"119.17221",
"29.02823"
],
"江山市": [
"118.62674",
"28.7386"
],
"舟山市": [
"122.106863",
"30.016028"
],
"定海区": [
"122.10677",
"30.01985"
],
"岱山县": [
"122.20486",
"30.24385"
],
"嵊泗县": [
"122.45129",
"30.72678"
],
"台州市": [
"121.428599",
"28.661378"
],
"椒江区": [
"121.44287",
"28.67301"
],
"黄岩区": [
"121.25891",
"28.65077"
],
"路桥区": [
"121.37381",
"28.58016"
],
"玉环县": [
"121.23242",
"28.13637"
],
"三门县": [
"121.3937",
"29.1051"
],
"天台县": [
"121.00848",
"29.1429"
],
"仙居县": [
"120.72872",
"28.84672"
],
"温岭市": [
"121.38595",
"28.37176"
],
"临海市": [
"121.13885",
"28.85603"
],
"丽水市": [
"119.921786",
"28.451993"
],
"莲都区": [
"119.9127",
"28.44583"
],
"青田县": [
"120.29028",
"28.13897"
],
"缙云县": [
"120.09036",
"28.65944"
],
"遂昌县": [
"119.27606",
"28.59291"
],
"松阳县": [
"119.48199",
"28.4494"
],
"云和县": [
"119.57287",
"28.11643"
],
"庆元县": [
"119.06256",
"27.61842"
],
"景宁畲族自治县": [
"119.63839",
"27.97393"
],
"龙泉市": [
"119.14163",
"28.0743"
],
"舟山群岛新区": [
"122.317657",
"29.813242"
],
"金塘岛": [
"121.893373",
"30.040641"
],
"六横岛": [
"122.14265",
"29.662938"
],
"衢山岛": [
"122.358425",
"30.442642"
],
"舟山本岛西北部": [
"122.03064",
"30.140377"
],
"岱山岛西南部": [
"122.180123",
"30.277269"
],
"泗礁岛": [
"122.45803",
"30.725112"
],
"朱家尖岛": [
"122.390636",
"29.916303"
],
"洋山岛": [
"121.995891",
"30.094637"
],
"长涂岛": [
"122.284681",
"30.24888"
],
"虾峙岛": [
"122.244686",
"29.752941"
],
"安徽省": [
"117.283042",
"31.86119"
],
"合肥市": [
"117.283042",
"31.86119"
],
"瑶海区": [
"117.30947",
"31.85809"
],
"庐阳区": [
"117.26452",
"31.87874"
],
"蜀山区": [
"117.26104",
"31.85117"
],
"包河区": [
"117.30984",
"31.79502"
],
"长丰县": [
"117.16549",
"32.47959"
],
"肥东县": [
"117.47128",
"31.88525"
],
"肥西县": [
"117.16845",
"31.72143"
],
"庐江县": [
"117.289844",
"31.251488"
],
"巢湖市": [
"117.874155",
"31.600518"
],
"芜湖市": [
"118.376451",
"31.326319"
],
"镜湖区": [
"118.38525",
"31.34038"
],
"弋江区": [
"118.37265",
"31.31178"
],
"鸠江区": [
"118.39215",
"31.36928"
],
"三山区": [
"118.22509",
"31.20703"
],
"芜湖县": [
"118.57525",
"31.13476"
],
"繁昌县": [
"118.19982",
"31.08319"
],
"南陵县": [
"118.33688",
"30.91969"
],
"无为县": [
"117.911432",
"31.303075"
],
"蚌埠市": [
"117.36237",
"32.934037"
],
"龙子湖区": [
"117.39379",
"32.94301"
],
"蚌山区": [
"117.36767",
"32.94411"
],
"禹会区": [
"117.35315",
"32.93336"
],
"淮上区": [
"117.35983",
"32.96423"
],
"怀远县": [
"117.20507",
"32.97007"
],
"五河县": [
"117.89144",
"33.14457"
],
"固镇县": [
"117.31558",
"33.31803"
],
"淮南市": [
"117.025449",
"32.645947"
],
"大通区": [
"117.05255",
"32.63265"
],
"田家庵区": [
"117.01739",
"32.64697"
],
"谢家集区": [
"116.86377",
"32.59818"
],
"八公山区": [
"116.83694",
"32.62941"
],
"潘集区": [
"116.81622",
"32.78287"
],
"凤台县": [
"116.71569",
"32.70752"
],
"马鞍山市": [
"118.507906",
"31.689362"
],
"花山区": [
"118.51231",
"31.7001"
],
"雨山区": [
"118.49869",
"31.68219"
],
"博望区": [
"118.844387",
"31.561871"
],
"当涂县": [
"118.49786",
"31.57098"
],
"含山县": [
"118.105545",
"31.727758"
],
"和县": [
"118.351405",
"31.741794"
],
"淮北市": [
"116.794664",
"33.971707"
],
"杜集区": [
"116.82998",
"33.99363"
],
"相山区": [
"116.79464",
"33.95979"
],
"烈山区": [
"116.81448",
"33.89355"
],
"濉溪县": [
"116.76785",
"33.91455"
],
"铜陵市": [
"117.816576",
"30.929935"
],
"铜官山区": [
"117.81525",
"30.93423"
],
"狮子山区": [
"117.89178",
"30.92631"
],
"铜陵县": [
"117.79113",
"30.95365"
],
"安庆市": [
"117.053571",
"30.524816"
],
"迎江区": [
"117.0493",
"30.50421"
],
"大观区": [
"117.03426",
"30.51216"
],
"宜秀区": [
"117.06127",
"30.50783"
],
"怀宁县": [
"116.82968",
"30.73376"
],
"枞阳县": [
"117.22015",
"30.69956"
],
"潜山县": [
"116.57574",
"30.63037"
],
"太湖县": [
"116.3088",
"30.4541"
],
"宿松县": [
"116.12915",
"30.1536"
],
"望江县": [
"116.68814",
"30.12585"
],
"岳西县": [
"116.35995",
"30.84983"
],
"桐城市": [
"116.95071",
"31.05216"
],
"黄山市": [
"118.317325",
"29.709239"
],
"屯溪区": [
"118.33368",
"29.71138"
],
"黄山区": [
"118.1416",
"30.2729"
],
"徽州区": [
"118.33654",
"29.82784"
],
"歙县": [
"118.43676",
"29.86745"
],
"休宁县": [
"118.18136",
"29.78607"
],
"黟县": [
"117.94137",
"29.92588"
],
"祁门县": [
"117.71847",
"29.85723"
],
"滁州市": [
"118.316264",
"32.303627"
],
"琅琊区": [
"118.30538",
"32.29521"
],
"南谯区": [
"118.31222",
"32.31861"
],
"来安县": [
"118.43438",
"32.45176"
],
"全椒县": [
"118.27291",
"32.08524"
],
"定远县": [
"117.68035",
"32.52488"
],
"凤阳县": [
"117.56454",
"32.86507"
],
"天长市": [
"118.99868",
"32.69124"
],
"明光市": [
"117.99093",
"32.77819"
],
"阜阳市": [
"115.819729",
"32.896969"
],
"颍州区": [
"115.80694",
"32.88346"
],
"颍东区": [
"115.85659",
"32.91296"
],
"颍泉区": [
"115.80712",
"32.9249"
],
"临泉县": [
"115.26232",
"33.06758"
],
"太和县": [
"115.62191",
"33.16025"
],
"阜南县": [
"115.58563",
"32.63551"
],
"颍上县": [
"116.26458",
"32.62998"
],
"界首市": [
"115.37445",
"33.25714"
],
"宿州市": [
"116.984084",
"33.633891"
],
"埇桥区": [
"116.97731",
"33.64058"
],
"砀山县": [
"116.35363",
"34.42356"
],
"萧县": [
"116.94546",
"34.1879"
],
"灵璧县": [
"117.55813",
"33.54339"
],
"泗县": [
"117.91033",
"33.48295"
],
"六安市": [
"116.507676",
"31.752889"
],
"金安区": [
"116.50912",
"31.75573"
],
"裕安区": [
"116.47985",
"31.73787"
],
"寿县": [
"116.78466",
"32.57653"
],
"霍邱县": [
"116.27795",
"32.353"
],
"舒城县": [
"116.94491",
"31.46413"
],
"金寨县": [
"115.93463",
"31.7351"
],
"霍山县": [
"116.33291",
"31.3929"
],
"亳州市": [
"115.782939",
"33.869338"
],
"谯城区": [
"115.77941",
"33.87532"
],
"涡阳县": [
"116.21682",
"33.50911"
],
"蒙城县": [
"116.5646",
"33.26477"
],
"利辛县": [
"116.208",
"33.14198"
],
"池州市": [
"117.489157",
"30.656037"
],
"贵池区": [
"117.48722",
"30.65283"
],
"东至县": [
"117.02719",
"30.0969"
],
"石台县": [
"117.48666",
"30.21042"
],
"青阳县": [
"117.84744",
"30.63932"
],
"宣城市": [
"118.757995",
"30.945667"
],
"宣州区": [
"118.75462",
"30.94439"
],
"郎溪县": [
"119.17923",
"31.12599"
],
"广德县": [
"119.41769",
"30.89371"
],
"泾县": [
"118.41964",
"30.69498"
],
"绩溪县": [
"118.59765",
"30.07069"
],
"旌德县": [
"118.54299",
"30.28898"
],
"宁国市": [
"118.98349",
"30.6238"
],
"福建省": [
"119.306239",
"26.075302"
],
"福州市": [
"119.306239",
"26.075302"
],
"台江区": [
"119.30899",
"26.06204"
],
"仓山区": [
"119.31543",
"26.04335"
],
"马尾区": [
"119.4555",
"25.98942"
],
"晋安区": [
"119.32828",
"26.0818"
],
"闽侯县": [
"119.13388",
"26.15014"
],
"连江县": [
"119.539704",
"26.197364"
],
"罗源县": [
"119.5509",
"26.48752"
],
"闽清县": [
"118.8623",
"26.21901"
],
"永泰县": [
"118.936",
"25.86816"
],
"平潭县": [
"119.791197",
"25.503672"
],
"福清市": [
"119.38507",
"25.72086"
],
"长乐市": [
"119.52313",
"25.96276"
],
"厦门市": [
"118.11022",
"24.490474"
],
"思明区": [
"118.08233",
"24.44543"
],
"海沧区": [
"118.03289",
"24.48461"
],
"湖里区": [
"118.14621",
"24.51253"
],
"集美区": [
"118.09719",
"24.57584"
],
"同安区": [
"118.15197",
"24.72308"
],
"翔安区": [
"118.24783",
"24.61863"
],
"莆田市": [
"119.007558",
"25.431011"
],
"城厢区": [
"118.99462",
"25.41872"
],
"涵江区": [
"119.11621",
"25.45876"
],
"荔城区": [
"119.01339",
"25.43369"
],
"秀屿区": [
"119.10553",
"25.31831"
],
"仙游县": [
"118.69177",
"25.36214"
],
"三明市": [
"117.635001",
"26.265444"
],
"梅列区": [
"117.64585",
"26.27171"
],
"三元区": [
"117.60788",
"26.23372"
],
"明溪县": [
"117.20498",
"26.35294"
],
"清流县": [
"116.8146",
"26.17144"
],
"宁化县": [
"116.66101",
"26.25874"
],
"大田县": [
"117.8471",
"25.6926"
],
"尤溪县": [
"118.19049",
"26.17002"
],
"沙县": [
"117.79266",
"26.39615"
],
"将乐县": [
"117.47317",
"26.72837"
],
"泰宁县": [
"117.17578",
"26.9001"
],
"建宁县": [
"116.84603",
"26.83091"
],
"永安市": [
"117.36517",
"25.94136"
],
"泉州市": [
"118.589421",
"24.908853"
],
"鲤城区": [
"118.56591",
"24.88741"
],
"丰泽区": [
"118.61328",
"24.89119"
],
"洛江区": [
"118.67111",
"24.93984"
],
"泉港区": [
"118.91586",
"25.12005"
],
"惠安县": [
"118.79687",
"25.03059"
],
"安溪县": [
"118.18719",
"25.05627"
],
"永春县": [
"118.29437",
"25.32183"
],
"德化县": [
"118.24176",
"25.49224"
],
"金门县": [
"118.317089",
"24.432706"
],
"石狮市": [
"118.64779",
"24.73242"
],
"晋江市": [
"118.55194",
"24.78141"
],
"南安市": [
"118.38589",
"24.96055"
],
"漳州市": [
"117.661801",
"24.510897"
],
"芗城区": [
"117.65402",
"24.51081"
],
"龙文区": [
"117.70971",
"24.50323"
],
"云霄县": [
"117.34051",
"23.95534"
],
"漳浦县": [
"117.61367",
"24.11706"
],
"诏安县": [
"117.17501",
"23.71148"
],
"长泰县": [
"117.75924",
"24.62526"
],
"东山县": [
"117.42822",
"23.70109"
],
"南靖县": [
"117.35736",
"24.51448"
],
"平和县": [
"117.3124",
"24.36395"
],
"华安县": [
"117.54077",
"25.00563"
],
"龙海市": [
"117.81802",
"24.44655"
],
"南平市": [
"118.178459",
"26.635627"
],
"延平区": [
"118.18189",
"26.63745"
],
"建阳区": [
"118.12267",
"27.332067"
],
"顺昌县": [
"117.8103",
"26.79298"
],
"浦城县": [
"118.54007",
"27.91888"
],
"光泽县": [
"117.33346",
"27.54231"
],
"松溪县": [
"118.78533",
"27.52624"
],
"政和县": [
"118.85571",
"27.36769"
],
"邵武市": [
"117.4924",
"27.34033"
],
"武夷山市": [
"118.03665",
"27.75543"
],
"建瓯市": [
"118.29766",
"27.02301"
],
"龙岩市": [
"117.02978",
"25.091603"
],
"新罗区": [
"117.03693",
"25.09834"
],
"长汀县": [
"116.35888",
"25.82773"
],
"永定区": [
"110.47464",
"29.13387"
],
"上杭县": [
"116.42022",
"25.04943"
],
"武平县": [
"116.10229",
"25.09244"
],
"连城县": [
"116.75454",
"25.7103"
],
"漳平市": [
"117.41992",
"25.29109"
],
"宁德市": [
"119.527082",
"26.65924"
],
"蕉城区": [
"119.52643",
"26.66048"
],
"霞浦县": [
"119.99893",
"26.88578"
],
"古田县": [
"118.74688",
"26.57682"
],
"屏南县": [
"118.98861",
"26.91099"
],
"寿宁县": [
"119.5039",
"27.45996"
],
"周宁县": [
"119.33837",
"27.10664"
],
"柘荣县": [
"119.89971",
"27.23543"
],
"福安市": [
"119.6495",
"27.08673"
],
"福鼎市": [
"120.21664",
"27.3243"
],
"江西省": [
"115.892151",
"28.676493"
],
"南昌市": [
"115.892151",
"28.676493"
],
"东湖区": [
"115.8988",
"28.68505"
],
"青云谱区": [
"115.915",
"28.63199"
],
"湾里区": [
"115.73104",
"28.71529"
],
"青山湖区": [
"115.9617",
"28.68206"
],
"南昌县": [
"115.94393",
"28.54559"
],
"新建县": [
"115.81546",
"28.69248"
],
"安义县": [
"115.54879",
"28.84602"
],
"进贤县": [
"116.24087",
"28.37679"
],
"景德镇市": [
"117.214664",
"29.29256"
],
"昌江区": [
"117.18359",
"29.27321"
],
"珠山区": [
"117.20233",
"29.30127"
],
"浮梁县": [
"117.21517",
"29.35156"
],
"乐平市": [
"117.12887",
"28.96295"
],
"萍乡市": [
"113.852186",
"27.622946"
],
"安源区": [
"113.89135",
"27.61653"
],
"湘东区": [
"113.73294",
"27.64007"
],
"莲花县": [
"113.96142",
"27.12866"
],
"上栗县": [
"113.79403",
"27.87467"
],
"芦溪县": [
"114.02951",
"27.63063"
],
"九江市": [
"115.992811",
"29.712034"
],
"庐山区": [
"115.98904",
"29.67177"
],
"浔阳区": [
"115.98986",
"29.72786"
],
"九江县": [
"115.91128",
"29.60852"
],
"武宁县": [
"115.10061",
"29.2584"
],
"修水县": [
"114.54684",
"29.02539"
],
"永修县": [
"115.80911",
"29.02093"
],
"德安县": [
"115.75601",
"29.31341"
],
"星子县": [
"116.04492",
"29.44608"
],
"都昌县": [
"116.20401",
"29.27327"
],
"湖口县": [
"116.21853",
"29.73818"
],
"彭泽县": [
"116.55011",
"29.89589"
],
"瑞昌市": [
"115.66705",
"29.67183"
],
"共青城市": [
"115.801939",
"29.238785"
],
"新余市": [
"114.930835",
"27.810834"
],
"渝水区": [
"114.944",
"27.80098"
],
"分宜县": [
"114.69189",
"27.81475"
],
"鹰潭市": [
"117.033838",
"28.238638"
],
"月湖区": [
"117.03732",
"28.23913"
],
"余江县": [
"116.81851",
"28.21034"
],
"贵溪市": [
"117.24246",
"28.2926"
],
"赣州市": [
"114.940278",
"25.85097"
],
"章贡区": [
"114.94284",
"25.8624"
],
"南康区": [
"114.756933",
"25.661721"
],
"赣县": [
"115.01171",
"25.86149"
],
"信丰县": [
"114.92279",
"25.38612"
],
"大余县": [
"114.35757",
"25.39561"
],
"上犹县": [
"114.54138",
"25.79567"
],
"崇义县": [
"114.30835",
"25.68186"
],
"安远县": [
"115.39483",
"25.1371"
],
"龙南县": [
"114.78994",
"24.91086"
],
"定南县": [
"115.02713",
"24.78395"
],
"全南县": [
"114.5292",
"24.74324"
],
"宁都县": [
"116.01565",
"26.47227"
],
"于都县": [
"115.41415",
"25.95257"
],
"兴国县": [
"115.36309",
"26.33776"
],
"会昌县": [
"115.78555",
"25.60068"
],
"寻乌县": [
"115.64852",
"24.95513"
],
"石城县": [
"116.3442",
"26.32617"
],
"瑞金市": [
"116.02703",
"25.88557"
],
"吉安市": [
"114.986373",
"27.111699"
],
"吉州区": [
"114.97598",
"27.10669"
],
"青原区": [
"115.01747",
"27.10577"
],
"吉安县": [
"114.90695",
"27.04048"
],
"吉水县": [
"115.1343",
"27.21071"
],
"峡江县": [
"115.31723",
"27.576"
],
"新干县": [
"115.39306",
"27.74092"
],
"永丰县": [
"115.44238",
"27.31785"
],
"泰和县": [
"114.90789",
"26.79113"
],
"遂川县": [
"114.51629",
"26.32598"
],
"万安县": [
"114.78659",
"26.45931"
],
"安福县": [
"114.61956",
"27.39276"
],
"永新县": [
"114.24246",
"26.94488"
],
"井冈山市": [
"114.28949",
"26.74804"
],
"宜春市": [
"114.391136",
"27.8043"
],
"袁州区": [
"114.38246",
"27.79649"
],
"奉新县": [
"115.40036",
"28.6879"
],
"万载县": [
"114.4458",
"28.10656"
],
"上高县": [
"114.92459",
"28.23423"
],
"宜丰县": [
"114.7803",
"28.38555"
],
"靖安县": [
"115.36279",
"28.86167"
],
"铜鼓县": [
"114.37036",
"28.52311"
],
"丰城市": [
"115.77114",
"28.15918"
],
"樟树市": [
"115.5465",
"28.05332"
],
"高安市": [
"115.3753",
"28.4178"
],
"抚州市": [
"116.358351",
"27.98385"
],
"临川区": [
"116.35919",
"27.97721"
],
"南城县": [
"116.64419",
"27.55381"
],
"黎川县": [
"116.90771",
"27.28232"
],
"南丰县": [
"116.5256",
"27.21842"
],
"崇仁县": [
"116.06021",
"27.75962"
],
"乐安县": [
"115.83108",
"27.42812"
],
"宜黄县": [
"116.23626",
"27.55487"
],
"金溪县": [
"116.77392",
"27.90753"
],
"资溪县": [
"117.06939",
"27.70493"
],
"东乡县": [
"116.59039",
"28.23614"
],
"广昌县": [
"116.32547",
"26.8341"
],
"上饶市": [
"117.971185",
"28.44442"
],
"信州区": [
"117.96682",
"28.43121"
],
"上饶县": [
"117.90884",
"28.44856"
],
"广丰县": [
"118.19158",
"28.43766"
],
"玉山县": [
"118.24462",
"28.6818"
],
"铅山县": [
"117.70996",
"28.31549"
],
"横峰县": [
"117.5964",
"28.40716"
],
"弋阳县": [
"117.45929",
"28.37451"
],
"余干县": [
"116.69555",
"28.70206"
],
"鄱阳县": [
"116.69967",
"29.0118"
],
"万年县": [
"117.06884",
"28.69537"
],
"婺源县": [
"117.86105",
"29.24841"
],
"德兴市": [
"117.57919",
"28.94736"
],
"山东省": [
"117.000923",
"36.675807"
],
"济南市": [
"117.000923",
"36.675807"
],
"历下区": [
"117.0768",
"36.66661"
],
"市中区": [
"103.76159",
"29.55543"
],
"槐荫区": [
"116.90075",
"36.65136"
],
"天桥区": [
"116.98749",
"36.67801"
],
"历城区": [
"117.06509",
"36.67995"
],
"长清区": [
"116.75192",
"36.55352"
],
"平阴县": [
"116.45587",
"36.28955"
],
"济阳县": [
"117.17327",
"36.97845"
],
"商河县": [
"117.15722",
"37.31119"
],
"章丘市": [
"117.53677",
"36.71392"
],
"青岛市": [
"120.369557",
"36.094406"
],
"市南区": [
"120.38773",
"36.06671"
],
"市北区": [
"120.37469",
"36.08734"
],
"黄岛区": [
"120.19775",
"35.96065"
],
"崂山区": [
"120.46923",
"36.10717"
],
"李沧区": [
"120.43286",
"36.14502"
],
"城阳区": [
"120.39621",
"36.30735"
],
"胶州市": [
"120.0335",
"36.26442"
],
"即墨市": [
"120.44699",
"36.38907"
],
"平度市": [
"119.95996",
"36.78688"
],
"莱西市": [
"120.51773",
"36.88804"
],
"西海岸新区": [
"120.19775",
"35.96065"
],
"淄博市": [
"118.047648",
"36.814939"
],
"淄川区": [
"117.96655",
"36.64339"
],
"张店区": [
"118.01788",
"36.80676"
],
"博山区": [
"117.86166",
"36.49469"
],
"临淄区": [
"118.30966",
"36.8259"
],
"周村区": [
"117.86969",
"36.80322"
],
"桓台县": [
"118.09698",
"36.96036"
],
"高青县": [
"117.82708",
"37.17197"
],
"沂源县": [
"118.17105",
"36.18536"
],
"枣庄市": [
"117.557964",
"34.856424"
],
"薛城区": [
"117.26318",
"34.79498"
],
"峄城区": [
"117.59057",
"34.77225"
],
"台儿庄区": [
"117.73452",
"34.56363"
],
"山亭区": [
"117.4663",
"35.09541"
],
"滕州市": [
"117.165",
"35.10534"
],
"东营市": [
"118.4963",
"37.461266"
],
"东营区": [
"118.5816",
"37.44875"
],
"河口区": [
"118.5249",
"37.88541"
],
"垦利县": [
"118.54815",
"37.58825"
],
"利津县": [
"118.25637",
"37.49157"
],
"广饶县": [
"118.40704",
"37.05381"
],
"烟台市": [
"121.391382",
"37.539297"
],
"芝罘区": [
"121.40023",
"37.54064"
],
"福山区": [
"121.26812",
"37.49841"
],
"牟平区": [
"121.60067",
"37.38846"
],
"莱山区": [
"121.44512",
"37.51165"
],
"长岛县": [
"120.738",
"37.91754"
],
"龙口市": [
"120.50634",
"37.64064"
],
"莱阳市": [
"120.71066",
"36.98012"
],
"莱州市": [
"119.94137",
"37.17806"
],
"蓬莱市": [
"120.75988",
"37.81119"
],
"招远市": [
"120.40481",
"37.36269"
],
"栖霞市": [
"120.85025",
"37.33571"
],
"海阳市": [
"121.15976",
"36.77622"
],
"潍坊市": [
"119.107078",
"36.70925"
],
"潍城区": [
"119.10582",
"36.7139"
],
"寒亭区": [
"119.21832",
"36.77504"
],
"坊子区": [
"119.16476",
"36.65218"
],
"奎文区": [
"119.12532",
"36.70723"
],
"临朐县": [
"118.544",
"36.51216"
],
"昌乐县": [
"118.83017",
"36.7078"
],
"青州市": [
"118.47915",
"36.68505"
],
"诸城市": [
"119.40988",
"35.99662"
],
"寿光市": [
"118.74047",
"36.88128"
],
"安丘市": [
"119.2189",
"36.47847"
],
"高密市": [
"119.75701",
"36.38397"
],
"昌邑市": [
"119.39767",
"36.86008"
],
"济宁市": [
"116.587245",
"35.415393"
],
"任城区": [
"116.59504",
"35.40659"
],
"兖州区": [
"116.826546",
"35.552305"
],
"微山县": [
"117.12875",
"34.80712"
],
"鱼台县": [
"116.64761",
"34.99674"
],
"金乡县": [
"116.31146",
"35.065"
],
"嘉祥县": [
"116.34249",
"35.40836"
],
"汶上县": [
"116.48742",
"35.73295"
],
"泗水县": [
"117.27948",
"35.66113"
],
"梁山县": [
"116.09683",
"35.80322"
],
"曲阜市": [
"116.98645",
"35.58091"
],
"邹城市": [
"116.97335",
"35.40531"
],
"泰安市": [
"117.129063",
"36.194968"
],
"泰山区": [
"121.430811",
"25.058864"
],
"岱岳区": [
"117.04174",
"36.1875"
],
"宁阳县": [
"116.80542",
"35.7599"
],
"东平县": [
"116.47113",
"35.93792"
],
"新泰市": [
"117.76959",
"35.90887"
],
"肥城市": [
"116.76815",
"36.18247"
],
"威海市": [
"122.116394",
"37.509691"
],
"环翠区": [
"122.12344",
"37.50199"
],
"文登区": [
"122.057139",
"37.196211"
],
"荣成市": [
"122.48773",
"37.1652"
],
"乳山市": [
"121.53814",
"36.91918"
],
"日照市": [
"119.461208",
"35.428588"
],
"东港区": [
"119.46237",
"35.42541"
],
"岚山区": [
"119.31884",
"35.12203"
],
"五莲县": [
"119.207",
"35.75004"
],
"莒县": [
"118.83789",
"35.58054"
],
"莱芜市": [
"117.677736",
"36.214397"
],
"莱城区": [
"117.65986",
"36.2032"
],
"钢城区": [
"117.8049",
"36.06319"
],
"临沂市": [
"118.326443",
"35.065282"
],
"兰山区": [
"118.34817",
"35.06872"
],
"罗庄区": [
"118.28466",
"34.99627"
],
"沂南县": [
"118.47061",
"35.55131"
],
"郯城县": [
"118.36712",
"34.61354"
],
"沂水县": [
"118.63009",
"35.78731"
],
"兰陵县": [
"117.856592",
"34.738315"
],
"费县": [
"117.97836",
"35.26562"
],
"平邑县": [
"117.63867",
"35.50573"
],
"莒南县": [
"118.83227",
"35.17539"
],
"蒙阴县": [
"117.94592",
"35.70996"
],
"临沭县": [
"118.65267",
"34.92091"
],
"德州市": [
"116.307428",
"37.453968"
], | ],
"陵城区": [
"116.57601",
"37.33571"
],
"宁津县": [
"116.79702",
"37.65301"
],
"庆云县": [
"117.38635",
"37.77616"
],
"临邑县": [
"116.86547",
"37.19053"
],
"齐河县": [
"116.75515",
"36.79532"
],
"平原县": [
"116.43432",
"37.16632"
],
"夏津县": [
"116.0017",
"36.94852"
],
"武城县": [
"116.07009",
"37.21403"
],
"乐陵市": [
"117.23141",
"37.73164"
],
"禹城市": [
"116.64309",
"36.93444"
],
"聊城市": [
"115.980367",
"36.456013"
],
"东昌府区": [
"115.97383",
"36.44458"
],
"阳谷县": [
"115.79126",
"36.11444"
],
"莘县": [
"115.6697",
"36.23423"
],
"茌平县": [
"116.25491",
"36.57969"
],
"东阿县": [
"116.25012",
"36.33209"
],
"冠县": [
"115.44195",
"36.48429"
],
"高唐县": [
"116.23172",
"36.86535"
],
"临清市": [
"115.70629",
"36.83945"
],
"滨州市": [
"118.016974",
"37.383542"
],
"滨城区": [
"118.02026",
"37.38524"
],
"沾化区": [
"118.13214",
"37.69832"
],
"惠民县": [
"117.51113",
"37.49013"
],
"阳信县": [
"117.58139",
"37.64198"
],
"无棣县": [
"117.61395",
"37.74009"
],
"博兴县": [
"118.1336",
"37.14316"
],
"邹平县": [
"117.74307",
"36.86295"
],
"北海新区": [
"118.016974",
"37.383542"
],
"菏泽市": [
"115.469381",
"35.246531"
],
"牡丹区": [
"115.41662",
"35.25091"
],
"曹县": [
"115.54226",
"34.82659"
],
"单县": [
"116.08703",
"34.79514"
],
"成武县": [
"115.8897",
"34.95332"
],
"巨野县": [
"116.09497",
"35.39788"
],
"郓城县": [
"115.94439",
"35.60044"
],
"鄄城县": [
"115.50997",
"35.56412"
],
"定陶县": [
"115.57287",
"35.07118"
],
"东明县": [
"115.09079",
"35.28906"
],
"河南省": [
"113.665412",
"34.757975"
],
"郑州市": [
"113.665412",
"34.757975"
],
"中原区": [
"113.61333",
"34.74827"
],
"二七区": [
"113.63931",
"34.72336"
],
"管城回族区": [
"113.67734",
"34.75383"
],
"金水区": [
"113.66057",
"34.80028"
],
"上街区": [
"113.30897",
"34.80276"
],
"惠济区": [
"113.61688",
"34.86735"
],
"中牟县": [
"113.97619",
"34.71899"
],
"巩义市": [
"113.022",
"34.74794"
],
"荥阳市": [
"113.38345",
"34.78759"
],
"新密市": [
"113.3869",
"34.53704"
],
"新郑市": [
"113.73645",
"34.3955"
],
"登封市": [
"113.05023",
"34.45345"
],
"开封市": [
"114.341447",
"34.797049"
],
"龙亭区": [
"114.35484",
"34.79995"
],
"顺河回族区": [
"114.36123",
"34.79586"
],
"禹王台区": [
"114.34787",
"34.77693"
],
"祥符区": [
"114.43859",
"34.75874"
],
"杞县": [
"114.7828",
"34.55033"
],
"通许县": [
"114.46716",
"34.47522"
],
"尉氏县": [
"114.19284",
"34.41223"
],
"兰考县": [
"114.81961",
"34.8235"
],
"洛阳市": [
"112.434468",
"34.663041"
],
"老城区": [
"112.46902",
"34.68364"
],
"西工区": [
"112.4371",
"34.67"
],
"瀍河回族区": [
"112.50018",
"34.67985"
],
"涧西区": [
"112.39588",
"34.65823"
],
"吉利区": [
"112.58905",
"34.90088"
],
"洛龙区": [
"112.46412",
"34.61866"
],
"孟津县": [
"112.44351",
"34.826"
],
"新安县": [
"112.13238",
"34.72814"
],
"栾川县": [
"111.61779",
"33.78576"
],
"嵩县": [
"112.08526",
"34.13466"
],
"汝阳县": [
"112.47314",
"34.15387"
],
"宜阳县": [
"112.17907",
"34.51523"
],
"洛宁县": [
"111.65087",
"34.38913"
],
"伊川县": [
"112.42947",
"34.42205"
],
"偃师市": [
"112.7922",
"34.7281"
],
"平顶山市": [
"113.307718",
"33.735241"
],
"卫东区": [
"113.33511",
"33.73472"
],
"石龙区": [
"112.89879",
"33.89878"
],
"湛河区": [
"113.29252",
"33.7362"
],
"宝丰县": [
"113.05493",
"33.86916"
],
"叶县": [
"113.35104",
"33.62225"
],
"鲁山县": [
"112.9057",
"33.73879"
],
"郏县": [
"113.21588",
"33.97072"
],
"舞钢市": [
"113.52417",
"33.2938"
],
"汝州市": [
"112.84301",
"34.16135"
],
"安阳市": [
"114.352482",
"36.103442"
],
"文峰区": [
"114.35708",
"36.09046"
],
"北关区": [
"114.35735",
"36.11872"
],
"殷都区": [
"114.3034",
"36.1099"
],
"龙安区": [
"114.34814",
"36.11904"
],
"安阳县": [
"114.36605",
"36.06695"
],
"汤阴县": [
"114.35839",
"35.92152"
],
"滑县": [
"114.52066",
"35.5807"
],
"内黄县": [
"114.90673",
"35.95269"
],
"林州市": [
"113.81558",
"36.07804"
],
"鹤壁市": [
"114.295444",
"35.748236"
],
"鹤山区": [
"114.16336",
"35.95458"
],
"山城区": [
"114.18443",
"35.89773"
],
"淇滨区": [
"114.29867",
"35.74127"
],
"浚县": [
"114.54879",
"35.67085"
],
"淇县": [
"114.1976",
"35.60782"
],
"新乡市": [
"113.883991",
"35.302616"
],
"红旗区": [
"113.87523",
"35.30367"
],
"卫滨区": [
"113.86578",
"35.30211"
],
"凤泉区": [
"113.91507",
"35.38399"
],
"牧野区": [
"113.9086",
"35.3149"
],
"新乡县": [
"113.80511",
"35.19075"
],
"获嘉县": [
"113.66159",
"35.26521"
],
"原阳县": [
"113.93994",
"35.06565"
],
"延津县": [
"114.20266",
"35.14327"
],
"封丘县": [
"114.41915",
"35.04166"
],
"长垣县": [
"114.66882",
"35.20046"
],
"卫辉市": [
"114.06454",
"35.39843"
],
"辉县市": [
"113.8067",
"35.46307"
],
"焦作市": [
"113.238266",
"35.23904"
],
"解放区": [
"113.22933",
"35.24023"
],
"中站区": [
"113.18315",
"35.23665"
],
"马村区": [
"113.3187",
"35.26908"
],
"山阳区": [
"113.25464",
"35.21436"
],
"修武县": [
"113.44775",
"35.22357"
],
"博爱县": [
"113.06698",
"35.16943"
],
"武陟县": [
"113.39718",
"35.09505"
],
"温县": [
"113.08065",
"34.94022"
],
"沁阳市": [
"112.94494",
"35.08935"
],
"孟州市": [
"112.79138",
"34.9071"
],
"濮阳市": [
"115.041299",
"35.768234"
],
"华龙区": [
"115.07446",
"35.77736"
],
"清丰县": [
"115.10415",
"35.88507"
],
"南乐县": [
"115.20639",
"36.07686"
],
"范县": [
"115.50405",
"35.85178"
],
"台前县": [
"115.87158",
"35.96923"
],
"濮阳县": [
"115.03057",
"35.70745"
],
"许昌市": [
"113.826063",
"34.022956"
],
"魏都区": [
"113.8227",
"34.02544"
],
"许昌县": [
"113.84707",
"34.00406"
],
"鄢陵县": [
"114.18795",
"34.10317"
],
"襄城县": [
"113.48196",
"33.84928"
],
"禹州市": [
"113.48803",
"34.14054"
],
"长葛市": [
"113.77328",
"34.21846"
],
"漯河市": [
"114.026405",
"33.575855"
],
"源汇区": [
"114.00647",
"33.55627"
],
"郾城区": [
"114.00694",
"33.58723"
],
"召陵区": [
"114.09399",
"33.58601"
],
"舞阳县": [
"113.59848",
"33.43243"
],
"临颍县": [
"113.93661",
"33.81123"
],
"三门峡市": [
"111.194099",
"34.777338"
],
"湖滨区": [
"111.20006",
"34.77872"
],
"渑池县": [
"111.76184",
"34.76725"
],
"陕县": [
"111.10333",
"34.72052"
],
"卢氏县": [
"111.04782",
"34.05436"
],
"义马市": [
"111.87445",
"34.74721"
],
"灵宝市": [
"110.8945",
"34.51682"
],
"南阳市": [
"112.540918",
"32.999082"
],
"宛城区": [
"112.53955",
"33.00378"
],
"卧龙区": [
"112.53479",
"32.98615"
],
"南召县": [
"112.43194",
"33.49098"
],
"方城县": [
"113.01269",
"33.25453"
],
"西峡县": [
"111.48187",
"33.29772"
],
"镇平县": [
"112.2398",
"33.03629"
],
"内乡县": [
"111.84957",
"33.04671"
],
"淅川县": [
"111.48663",
"33.13708"
],
"社旗县": [
"112.94656",
"33.05503"
],
"唐河县": [
"112.83609",
"32.69453"
],
"新野县": [
"112.36151",
"32.51698"
],
"桐柏县": [
"113.42886",
"32.37917"
],
"邓州市": [
"112.0896",
"32.68577"
],
"商丘市": [
"115.650497",
"34.437054"
],
"梁园区": [
"115.64487",
"34.44341"
],
"睢阳区": [
"115.65338",
"34.38804"
],
"民权县": [
"115.14621",
"34.64931"
],
"睢县": [
"115.07168",
"34.44539"
],
"宁陵县": [
"115.30511",
"34.45463"
],
"柘城县": [
"115.30538",
"34.0911"
],
"虞城县": [
"115.86337",
"34.40189"
],
"夏邑县": [
"116.13348",
"34.23242"
],
"永城市": [
"116.44943",
"33.92911"
],
"信阳市": [
"114.075031",
"32.123274"
],
"浉河区": [
"114.05871",
"32.1168"
],
"平桥区": [
"114.12435",
"32.10095"
],
"罗山县": [
"114.5314",
"32.20277"
],
"光山县": [
"114.91873",
"32.00992"
],
"新县": [
"114.87924",
"31.64386"
],
"商城县": [
"115.40856",
"31.79986"
],
"固始县": [
"115.68298",
"32.18011"
],
"潢川县": [
"115.04696",
"32.13763"
],
"淮滨县": [
"115.4205",
"32.46614"
],
"息县": [
"114.7402",
"32.34279"
],
"周口市": [
"114.649653",
"33.620357"
],
"川汇区": [
"114.64202",
"33.6256"
],
"扶沟县": [
"114.39477",
"34.05999"
],
"西华县": [
"114.52279",
"33.78548"
],
"商水县": [
"114.60604",
"33.53912"
],
"沈丘县": [
"115.09851",
"33.40936"
],
"郸城县": [
"115.17715",
"33.64485"
],
"淮阳县": [
"114.88848",
"33.73211"
],
"太康县": [
"114.83773",
"34.06376"
],
"鹿邑县": [
"115.48553",
"33.85931"
],
"项城市": [
"114.87558",
"33.4672"
],
"驻马店市": [
"114.024736",
"32.980169"
],
"驿城区": [
"113.99377",
"32.97316"
],
"西平县": [
"114.02322",
"33.3845"
],
"上蔡县": [
"114.26825",
"33.26825"
],
"平舆县": [
"114.63552",
"32.95727"
],
"正阳县": [
"114.38952",
"32.6039"
],
"确山县": [
"114.02917",
"32.80281"
],
"泌阳县": [
"113.32681",
"32.71781"
],
"汝南县": [
"114.36138",
"33.00461"
],
"遂平县": [
"114.01297",
"33.14571"
],
"新蔡县": [
"114.98199",
"32.7502"
],
"直辖县级": [
"91.132212",
"29.660361"
],
"济源市": [
"112.590047",
"35.090378"
],
"湖北省": [
"114.298572",
"30.584355"
],
"武汉市": [
"114.298572",
"30.584355"
],
"江岸区": [
"114.30943",
"30.59982"
],
"江汉区": [
"114.27093",
"30.60146"
],
"硚口区": [
"114.26422",
"30.56945"
],
"汉阳区": [
"114.27478",
"30.54915"
],
"武昌区": [
"114.31589",
"30.55389"
],
"洪山区": [
"114.34375",
"30.49989"
],
"东西湖区": [
"114.13708",
"30.61989"
],
"汉南区": [
"114.08462",
"30.30879"
],
"蔡甸区": [
"114.02929",
"30.58197"
],
"江夏区": [
"114.31301",
"30.34653"
],
"黄陂区": [
"114.37512",
"30.88151"
],
"新洲区": [
"114.80136",
"30.84145"
],
"黄石市": [
"115.077048",
"30.220074"
],
"黄石港区": [
"115.06604",
"30.22279"
],
"西塞山区": [
"115.11016",
"30.20487"
],
"下陆区": [
"114.96112",
"30.17368"
],
"铁山区": [
"114.90109",
"30.20678"
],
"阳新县": [
"115.21527",
"29.83038"
],
"大冶市": [
"114.97174",
"30.09438"
],
"十堰市": [
"110.785239",
"32.647017"
],
"茅箭区": [
"110.81341",
"32.59153"
],
"张湾区": [
"110.77067",
"32.65195"
],
"郧阳区": [
"110.81854",
"32.83593"
],
"郧西县": [
"110.42556",
"32.99349"
],
"竹山县": [
"110.23071",
"32.22536"
],
"竹溪县": [
"109.71798",
"32.31901"
],
"房县": [
"110.74386",
"32.05794"
],
"丹江口市": [
"111.51525",
"32.54085"
],
"宜昌市": [
"111.290843",
"30.702636"
],
"西陵区": [
"111.28573",
"30.71077"
],
"伍家岗区": [
"111.3609",
"30.64434"
],
"点军区": [
"111.26828",
"30.6934"
],
"猇亭区": [
"111.44079",
"30.52663"
],
"夷陵区": [
"111.3262",
"30.76881"
],
"远安县": [
"111.6416",
"31.05989"
],
"兴山县": [
"110.74951",
"31.34686"
],
"秭归县": [
"110.98156",
"30.82702"
],
"长阳土家族自治县": [
"111.20105",
"30.47052"
],
"五峰土家族自治县": [
"110.6748",
"30.19856"
],
"宜都市": [
"111.45025",
"30.37807"
],
"当阳市": [
"111.78912",
"30.8208"
],
"枝江市": [
"111.76855",
"30.42612"
],
"襄阳市": [
"112.144146",
"32.042426"
],
"襄城区": [
"112.13372",
"32.01017"
],
"樊城区": [
"112.13546",
"32.04482"
],
"襄州区": [
"112.150327",
"32.015088"
],
"南漳县": [
"111.84603",
"31.77653"
],
"谷城县": [
"111.65267",
"32.26377"
],
"保康县": [
"111.26138",
"31.87874"
],
"老河口市": [
"111.67117",
"32.38476"
],
"枣阳市": [
"112.77444",
"32.13142"
],
"宜城市": [
"112.25772",
"31.71972"
],
"鄂州市": [
"114.890593",
"30.396536"
],
"梁子湖区": [
"114.68463",
"30.10003"
],
"华容区": [
"114.73568",
"30.53328"
],
"鄂城区": [
"114.89158",
"30.40024"
],
"荆门市": [
"112.204251",
"31.03542"
],
"东宝区": [
"112.20147",
"31.05192"
],
"掇刀区": [
"112.208",
"30.97316"
],
"京山县": [
"113.11074",
"31.0224"
],
"沙洋县": [
"112.58853",
"30.70916"
],
"钟祥市": [
"112.58932",
"31.1678"
],
"孝感市": [
"113.926655",
"30.926423"
],
"孝南区": [
"113.91111",
"30.9168"
],
"孝昌县": [
"113.99795",
"31.25799"
],
"大悟县": [
"114.12564",
"31.56176"
],
"云梦县": [
"113.75289",
"31.02093"
],
"应城市": [
"113.57287",
"30.92834"
],
"安陆市": [
"113.68557",
"31.25693"
],
"汉川市": [
"113.83898",
"30.66117"
],
"荆州市": [
"112.23813",
"30.326857"
],
"沙市区": [
"112.25543",
"30.31107"
],
"荆州区": [
"112.19006",
"30.35264"
],
"公安县": [
"112.23242",
"30.05902"
],
"监利县": [
"112.89462",
"29.81494"
],
"江陵县": [
"112.42468",
"30.04174"
],
"石首市": [
"112.42636",
"29.72127"
],
"洪湖市": [
"113.47598",
"29.827"
],
"松滋市": [
"111.76739",
"30.16965"
],
"黄冈市": [
"114.879365",
"30.447711"
],
"黄州区": [
"114.88008",
"30.43436"
],
"团风县": [
"114.87228",
"30.64359"
],
"红安县": [
"114.6224",
"31.28668"
],
"罗田县": [
"115.39971",
"30.78255"
],
"英山县": [
"115.68142",
"30.73516"
],
"浠水县": [
"115.26913",
"30.45265"
],
"蕲春县": [
"115.43615",
"30.22613"
],
"黄梅县": [
"115.94427",
"30.07033"
],
"麻城市": [
"115.00988",
"31.17228"
],
"武穴市": [
"115.55975",
"29.84446"
],
"咸宁市": [
"114.328963",
"29.832798"
],
"咸安区": [
"114.29872",
"29.8529"
],
"嘉鱼县": [
"113.93927",
"29.97054"
],
"通城县": [
"113.81582",
"29.24568"
],
"崇阳县": [
"114.03982",
"29.55564"
],
"通山县": [
"114.48239",
"29.6063"
],
"赤壁市": [
"113.90039",
"29.72454"
],
"随州市": [
"113.37377",
"31.717497"
],
"曾都区": [
"113.37128",
"31.71614"
],
"随县": [
"113.82663",
"31.6179"
],
"广水市": [
"113.82663",
"31.6179"
],
"恩施土家族苗族自治州": [
"109.48699",
"30.283114"
],
"恩施市": [
"109.47942",
"30.29502"
],
"利川市": [
"108.93591",
"30.29117"
],
"建始县": [
"109.72207",
"30.60209"
],
"巴东县": [
"110.34066",
"31.04233"
],
"宣恩县": [
"109.49179",
"29.98714"
],
"咸丰县": [
"109.152",
"29.67983"
],
"来凤县": [
"109.40716",
"29.49373"
],
"鹤峰县": [
"110.03091",
"29.89072"
],
"仙桃市": [
"113.453974",
"30.364953"
],
"潜江市": [
"112.896866",
"30.421215"
],
"天门市": [
"113.165862",
"30.653061"
],
"神农架林区": [
"110.671525",
"31.744449"
],
"湖南省": [
"112.982279",
"28.19409"
],
"长沙市": [
"112.982279",
"28.19409"
],
"芙蓉区": [
"113.03176",
"28.1844"
],
"天心区": [
"112.98991",
"28.1127"
],
"岳麓区": [
"112.93133",
"28.2351"
],
"开福区": [
"112.98623",
"28.25585"
],
"雨花区": [
"113.03567",
"28.13541"
],
"望城区": [
"112.819549",
"28.347458"
],
"长沙县": [
"113.08071",
"28.24595"
],
"宁乡县": [
"112.55749",
"28.25358"
],
"浏阳市": [
"113.64312",
"28.16375"
],
"株洲市": [
"113.151737",
"27.835806"
],
"荷塘区": [
"113.17315",
"27.85569"
],
"芦淞区": [
"113.15562",
"27.78525"
],
"石峰区": [
"113.11776",
"27.87552"
],
"天元区": [
"113.12335",
"27.83103"
],
"株洲县": [
"113.14428",
"27.69826"
],
"攸县": [
"113.34365",
"27.00352"
],
"茶陵县": [
"113.54364",
"26.7915"
],
"炎陵县": [
"113.77163",
"26.48818"
],
"醴陵市": [
"113.49704",
"27.64615"
],
"湘潭市": [
"112.925083",
"27.846725"
],
"雨湖区": [
"112.90399",
"27.86859"
],
"岳塘区": [
"112.9606",
"27.85784"
],
"湘潭县": [
"112.9508",
"27.77893"
],
"湘乡市": [
"112.53512",
"27.73543"
],
"韶山市": [
"112.52655",
"27.91503"
],
"衡阳市": [
"112.607693",
"26.900358"
],
"珠晖区": [
"112.62054",
"26.89361"
],
"雁峰区": [
"112.61654",
"26.88866"
],
"石鼓区": [
"112.61069",
"26.90232"
],
"蒸湘区": [
"112.6033",
"26.89651"
],
"南岳区": [
"112.7384",
"27.23262"
],
"衡阳县": [
"112.37088",
"26.9706"
],
"衡南县": [
"112.67788",
"26.73828"
],
"衡山县": [
"112.86776",
"27.23134"
],
"衡东县": [
"112.94833",
"27.08093"
],
"祁东县": [
"112.09039",
"26.79964"
],
"耒阳市": [
"112.85998",
"26.42132"
],
"常宁市": [
"112.4009",
"26.40692"
],
"邵阳市": [
"111.46923",
"27.237842"
],
"双清区": [
"111.49715",
"27.23291"
],
"大祥区": [
"111.45412",
"27.23332"
],
"北塔区": [
"111.45219",
"27.24648"
],
"邵东县": [
"111.74441",
"27.2584"
],
"新邵县": [
"111.46066",
"27.32169"
],
"邵阳县": [
"111.27459",
"26.99143"
],
"隆回县": [
"111.03216",
"27.10937"
],
"洞口县": [
"110.57388",
"27.05462"
],
"绥宁县": [
"110.15576",
"26.58636"
],
"新宁县": [
"110.85131",
"26.42936"
],
"城步苗族自治县": [
"110.3222",
"26.39048"
],
"武冈市": [
"110.63281",
"26.72817"
],
"岳阳市": [
"113.132855",
"29.37029"
],
"岳阳楼区": [
"113.12942",
"29.3719"
],
"云溪区": [
"113.27713",
"29.47357"
],
"君山区": [
"113.00439",
"29.45941"
],
"岳阳县": [
"113.11987",
"29.14314"
],
"华容县": [
"112.54089",
"29.53019"
],
"湘阴县": [
"112.90911",
"28.68922"
],
"平江县": [
"113.58105",
"28.70664"
],
"汨罗市": [
"113.06707",
"28.80631"
],
"临湘市": [
"113.4501",
"29.47701"
],
"常德市": [
"111.691347",
"29.040225"
],
"武陵区": [
"111.69791",
"29.02876"
],
"鼎城区": [
"111.68078",
"29.01859"
],
"安乡县": [
"112.16732",
"29.41326"
],
"汉寿县": [
"111.96691",
"28.90299"
],
"澧县": [
"111.75866",
"29.63317"
],
"临澧县": [
"111.65161",
"29.44163"
],
"桃源县": [
"111.48892",
"28.90474"
],
"石门县": [
"111.37966",
"29.58424"
],
"津市市": [
"111.87756",
"29.60563"
],
"张家界市": [
"110.479921",
"29.127401"
],
"武陵源区": [
"110.55026",
"29.34574"
],
"慈利县": [
"111.13946",
"29.42989"
],
"桑植县": [
"110.16308",
"29.39815"
],
"益阳市": [
"112.355042",
"28.570066"
],
"资阳区": [
"112.32447",
"28.59095"
],
"赫山区": [
"112.37265",
"28.57425"
],
"南县": [
"112.3963",
"29.36159"
],
"桃江县": [
"112.1557",
"28.51814"
],
"安化县": [
"111.21298",
"28.37424"
],
"沅江市": [
"112.35427",
"28.84403"
],
"郴州市": [
"113.032067",
"25.793589"
],
"北湖区": [
"113.01103",
"25.78405"
],
"苏仙区": [
"113.04226",
"25.80045"
],
"桂阳县": [
"112.73364",
"25.75406"
],
"宜章县": [
"112.95147",
"25.39931"
],
"永兴县": [
"113.11242",
"26.12646"
],
"嘉禾县": [
"112.36935",
"25.58795"
],
"临武县": [
"112.56369",
"25.27602"
],
"汝城县": [
"113.68582",
"25.55204"
],
"桂东县": [
"113.9468",
"26.07987"
],
"安仁县": [
"113.26944",
"26.70931"
],
"资兴市": [
"113.23724",
"25.97668"
],
"永州市": [
"111.608019",
"26.434516"
],
"零陵区": [
"111.62103",
"26.22109"
],
"冷水滩区": [
"111.59214",
"26.46107"
],
"祁阳县": [
"111.84011",
"26.58009"
],
"东安县": [
"111.3164",
"26.39202"
],
"双牌县": [
"111.65927",
"25.95988"
],
"道县": [
"111.60195",
"25.52766"
],
"江永县": [
"111.34082",
"25.27233"
],
"宁远县": [
"111.94625",
"25.56913"
],
"蓝山县": [
"112.19363",
"25.36794"
],
"新田县": [
"112.22103",
"25.9095"
],
"江华瑶族自治县": [
"111.58847",
"25.1845"
],
"怀化市": [
"109.97824",
"27.550082"
],
"鹤城区": [
"109.96509",
"27.54942"
],
"中方县": [
"109.94497",
"27.43988"
],
"沅陵县": [
"110.39633",
"28.45548"
],
"辰溪县": [
"110.18942",
"28.00406"
],
"溆浦县": [
"110.59384",
"27.90836"
],
"会同县": [
"109.73568",
"26.88716"
],
"麻阳苗族自治县": [
"109.80194",
"27.866"
],
"新晃侗族自治县": [
"109.17166",
"27.35937"
],
"芷江侗族自治县": [
"109.6849",
"27.44297"
],
"靖州苗族侗族自治县": [
"109.69821",
"26.57651"
],
"通道侗族自治县": [
"109.78515",
"26.1571"
],
"洪江市": [
"109.83651",
"27.20922"
],
"娄底市": [
"112.008497",
"27.728136"
],
"娄星区": [
"112.00193",
"27.72992"
],
"双峰县": [
"112.19921",
"27.45418"
],
"新化县": [
"111.32739",
"27.7266"
],
"冷水江市": [
"111.43554",
"27.68147"
],
"涟源市": [
"111.67233",
"27.68831"
],
"湘西土家族苗族自治州": [
"109.739735",
"28.314296"
],
"吉首市": [
"109.69799",
"28.26247"
],
"泸溪县": [
"110.21682",
"28.2205"
],
"凤凰县": [
"109.60156",
"27.94822"
],
"花垣县": [
"109.48217",
"28.5721"
],
"保靖县": [
"109.66049",
"28.69997"
],
"古丈县": [
"109.94812",
"28.61944"
],
"永顺县": [
"109.85266",
"29.00103"
],
"龙山县": [
"109.4432",
"29.45693"
],
"广东省": [
"113.280637",
"23.125178"
],
"广州市": [
"113.280637",
"23.125178"
],
"荔湾区": [
"113.2442",
"23.12592"
],
"越秀区": [
"113.26683",
"23.12897"
],
"海珠区": [
"113.26197",
"23.10379"
],
"天河区": [
"113.36112",
"23.12467"
],
"白云区": [
"106.63088",
"26.68284"
],
"黄埔区": [
"113.45895",
"23.10642"
],
"番禺区": [
"113.38397",
"22.93599"
],
"花都区": [
"113.22033",
"23.40358"
],
"南沙区": [
"113.60845",
"22.77144"
],
"从化区": [
"113.587386",
"23.545283"
],
"增城区": [
"113.829579",
"23.290497"
],
"韶关市": [
"113.591544",
"24.801322"
],
"武江区": [
"113.58767",
"24.79264"
],
"浈江区": [
"113.61109",
"24.80438"
],
"曲江区": [
"113.60165",
"24.67915"
],
"始兴县": [
"114.06799",
"24.94759"
],
"仁化县": [
"113.74737",
"25.08742"
],
"翁源县": [
"114.13385",
"24.3495"
],
"乳源瑶族自治县": [
"113.27734",
"24.77803"
],
"新丰县": [
"114.20788",
"24.05924"
],
"乐昌市": [
"113.35653",
"25.12799"
],
"南雄市": [
"114.30966",
"25.11706"
],
"深圳市": [
"114.085947",
"22.547"
],
"罗湖区": [
"114.13116",
"22.54836"
],
"福田区": [
"114.05571",
"22.52245"
],
"宝安区": [
"113.88311",
"22.55371"
],
"龙岗区": [
"114.24771",
"22.71986"
],
"盐田区": [
"114.23733",
"22.5578"
],
"光明新区": [
"113.896026",
"22.777292"
],
"坪山新区": [
"114.34637",
"22.690529"
],
"大鹏新区": [
"114.479901",
"22.587862"
],
"龙华新区": [
"114.036585",
"22.68695"
],
"珠海市": [
"113.552724",
"22.255899"
],
"香洲区": [
"113.5435",
"22.26654"
],
"斗门区": [
"113.29644",
"22.20898"
],
"金湾区": [
"113.36361",
"22.14691"
],
"汕头市": [
"116.708463",
"23.37102"
],
"龙湖区": [
"116.71641",
"23.37166"
],
"金平区": [
"116.70364",
"23.36637"
],
"濠江区": [
"116.72659",
"23.28588"
],
"潮阳区": [
"116.6015",
"23.26485"
],
"潮南区": [
"116.43188",
"23.25"
],
"澄海区": [
"116.75589",
"23.46728"
],
"南澳县": [
"117.01889",
"23.4223"
],
"佛山市": [
"113.122717",
"23.028762"
],
"禅城区": [
"113.1228",
"23.00842"
],
"南海区": [
"113.14299",
"23.02877"
],
"顺德区": [
"113.29394",
"22.80452"
],
"三水区": [
"112.89703",
"23.15564"
],
"高明区": [
"112.89254",
"22.90022"
],
"江门市": [
"113.094942",
"22.590431"
],
"蓬江区": [
"113.07849",
"22.59515"
],
"江海区": [
"113.11099",
"22.56024"
],
"新会区": [
"113.03225",
"22.45876"
],
"台山市": [
"112.79382",
"22.2515"
],
"开平市": [
"112.69842",
"22.37622"
],
"鹤山市": [
"112.96429",
"22.76523"
],
"恩平市": [
"112.30496",
"22.18288"
],
"湛江市": [
"110.405529",
"21.195338"
],
"赤坎区": [
"110.36592",
"21.26606"
],
"霞山区": [
"110.39822",
"21.19181"
],
"坡头区": [
"110.45533",
"21.24472"
],
"麻章区": [
"110.3342",
"21.26333"
],
"遂溪县": [
"110.25003",
"21.37721"
],
"徐闻县": [
"110.17379",
"20.32812"
],
"廉江市": [
"110.28442",
"21.60917"
],
"雷州市": [
"110.10092",
"20.91428"
],
"吴川市": [
"110.77703",
"21.44584"
],
"茂名市": [
"110.919229",
"21.659751"
],
"茂南区": [
"110.9187",
"21.64103"
],
"电白区": [
"111.007264",
"21.507219"
],
"高州市": [
"110.85519",
"21.92057"
],
"化州市": [
"110.63949",
"21.66394"
],
"信宜市": [
"110.94647",
"22.35351"
],
"肇庆市": [
"112.472529",
"23.051546"
],
"端州区": [
"112.48495",
"23.0519"
],
"鼎湖区": [
"112.56643",
"23.15846"
],
"广宁县": [
"112.44064",
"23.6346"
],
"怀集县": [
"112.18396",
"23.90918"
],
"封开县": [
"111.50332",
"23.43571"
],
"德庆县": [
"111.78555",
"23.14371"
],
"高要市": [
"112.45834",
"23.02577"
],
"四会市": [
"112.73416",
"23.32686"
],
"惠州市": [
"114.412599",
"23.079404"
],
"惠城区": [
"114.3828",
"23.08377"
],
"惠阳区": [
"114.45639",
"22.78845"
],
"博罗县": [
"114.28964",
"23.17307"
],
"惠东县": [
"114.72009",
"22.98484"
],
"龙门县": [
"114.25479",
"23.72758"
],
"梅州市": [
"116.117582",
"24.299112"
],
"梅江区": [
"116.11663",
"24.31062"
],
"梅县区": [
"116.097753",
"24.286739"
],
"大埔县": [
"116.69662",
"24.35325"
],
"丰顺县": [
"116.18219",
"23.74094"
],
"五华县": [
"115.77893",
"23.92417"
],
"平远县": [
"115.89556",
"24.57116"
],
"蕉岭县": [
"116.17089",
"24.65732"
],
"兴宁市": [
"115.73141",
"24.14001"
],
"汕尾市": [
"115.364238",
"22.774485"
],
"海丰县": [
"115.32336",
"22.96653"
],
"陆河县": [
"115.65597",
"23.30365"
],
"陆丰市": [
"115.64813",
"22.94335"
],
"河源市": [
"114.697802",
"23.746266"
],
"源城区": [
"114.70242",
"23.7341"
],
"紫金县": [
"115.18365",
"23.63867"
],
"龙川县": [
"115.26025",
"24.10142"
],
"连平县": [
"114.49026",
"24.37156"
],
"和平县": [
"114.93841",
"24.44319"
],
"东源县": [
"114.74633",
"23.78835"
],
"阳江市": [
"111.975107",
"21.859222"
],
"江城区": [
"111.95488",
"21.86193"
],
"阳东区": [
"112.01467",
"21.87398"
],
"阳西县": [
"111.61785",
"21.75234"
],
"阳春市": [
"111.78854",
"22.17232"
],
"清远市": [
"113.036779",
"23.704188"
],
"清城区": [
"113.06265",
"23.69784"
],
"清新区": [
"113.015203",
"23.736949"
],
"佛冈县": [
"113.53286",
"23.87231"
],
"阳山县": [
"112.64129",
"24.46516"
],
"连山壮族瑶族自治县": [
"112.0802",
"24.56807"
],
"连南瑶族自治县": [
"112.28842",
"24.71726"
],
"英德市": [
"113.415",
"24.18571"
],
"连州市": [
"112.38153",
"24.77913"
],
"东莞市": [
"113.760234",
"23.048884"
],
"莞城区": [
"113.751043",
"23.053412"
],
"南城区": [
"113.752125",
"23.02018"
],
"万江区": [
"113.739053",
"23.043842"
],
"石碣镇": [
"113.80217",
"23.09899"
],
"石龙镇": [
"113.876381",
"23.107444"
],
"茶山镇": [
"113.883526",
"23.062375"
],
"石排镇": [
"113.919859",
"23.0863"
],
"企石镇": [
"114.013233",
"23.066044"
],
"横沥镇": [
"113.957436",
"23.025732"
],
"桥头镇": [
"114.01385",
"22.939727"
],
"谢岗镇": [
"114.141396",
"22.959664"
],
"东坑镇": [
"113.939835",
"22.992804"
],
"常平镇": [
"114.029627",
"23.016116"
],
"寮步镇": [
"113.884745",
"22.991738"
],
"大朗镇": [
"113.9271",
"22.965748"
],
"麻涌镇": [
"113.546177",
"23.045315"
],
"中堂镇": [
"113.654422",
"23.090164"
],
"高埗镇": [
"113.735917",
"23.068415"
],
"樟木头镇": [
"114.066298",
"22.956682"
],
"大岭山镇": [
"113.782955",
"22.885366"
],
"望牛墩镇": [
"113.658847",
"23.055018"
],
"黄江镇": [
"113.992635",
"22.877536"
],
"洪梅镇": [
"113.613081",
"22.992675"
],
"清溪镇": [
"114.155796",
"22.844456"
],
"沙田镇": [
"113.760234",
"23.048884"
],
"道滘镇": [
"113.760234",
"23.048884"
],
"塘厦镇": [
"114.10765",
"22.822862"
],
"虎门镇": [
"113.71118",
"22.82615"
],
"厚街镇": [
"113.67301",
"22.940815"
],
"凤岗镇": [
"114.141194",
"22.744598"
],
"长安镇": [
"113.803939",
"22.816644"
],
"中山市": [
"113.382391",
"22.521113"
],
"石岐区": [
"113.378835",
"22.52522"
],
"南区": [
"114.174134",
"22.24676"
],
"五桂山区": [
"113.41079",
"22.51968"
],
"火炬开发区": [
"113.480523",
"22.566082"
],
"黄圃镇": [
"113.342359",
"22.715116"
],
"南头镇": [
"113.296358",
"22.713907"
],
"东凤镇": [
"113.26114",
"22.68775"
],
"阜沙镇": [
"113.353024",
"22.666364"
],
"小榄镇": [
"113.244235",
"22.666951"
],
"东升镇": [
"113.296298",
"22.614003"
],
"古镇镇": [
"113.179745",
"22.611019"
],
"横栏镇": [
"113.265845",
"22.523202"
],
"三角镇": [
"113.423624",
"22.677033"
],
"民众镇": [
"113.486025",
"22.623468"
],
"南朗镇": [
"113.533939",
"22.492378"
],
"港口镇": [
"113.382391",
"22.521113"
],
"大涌镇": [
"113.291708",
"22.467712"
],
"沙溪镇": [
"113.328369",
"22.526325"
],
"三乡镇": [
"113.4334",
"22.352494"
],
"板芙镇": [
"113.320346",
"22.415674"
],
"神湾镇": [
"113.359387",
"22.312476"
],
"坦洲镇": [
"113.485677",
"22.261269"
],
"潮州市": [
"116.632301",
"23.661701"
],
"湘桥区": [
"116.62805",
"23.67451"
],
"潮安区": [
"116.592895",
"23.643656"
],
"饶平县": [
"117.00692",
"23.66994"
],
"揭阳市": [
"116.355733",
"23.543778"
],
"榕城区": [
"116.3671",
"23.52508"
],
"揭东区": [
"116.412947",
"23.569887"
],
"揭西县": [
"115.83883",
"23.42714"
],
"惠来县": [
"116.29599",
"23.03289"
],
"普宁市": [
"116.16564",
"23.29732"
],
"云浮市": [
"112.044439",
"22.929801"
],
"云城区": [
"112.03908",
"22.92996"
],
"云安区": [
"112.00936",
"23.07779"
],
"新兴县": [
"112.23019",
"22.69734"
],
"郁南县": [
"111.53387",
"23.23307"
],
"罗定市": [
"111.56979",
"22.76967"
],
"广西壮族自治区": [
"108.320004",
"22.82402"
],
"南宁市": [
"108.320004",
"22.82402"
],
"兴宁区": [
"108.36694",
"22.85355"
],
"青秀区": [
"108.49545",
"22.78511"
],
"江南区": [
"108.27325",
"22.78127"
],
"西乡塘区": [
"108.31347",
"22.83386"
],
"良庆区": [
"108.41284",
"22.74914"
],
"邕宁区": [
"108.48684",
"22.75628"
],
"武鸣县": [
"108.27719",
"23.15643"
],
"隆安县": [
"107.69192",
"23.17336"
],
"马山县": [
"108.17697",
"23.70931"
],
"上林县": [
"108.60522",
"23.432"
],
"宾阳县": [
"108.81185",
"23.2196"
],
"横县": [
"109.26608",
"22.68448"
],
"埌东新区": [
"108.419094",
"22.812976"
],
"柳州市": [
"109.411703",
"24.314617"
],
"城中区": [
"101.78394",
"36.62279"
],
"鱼峰区": [
"109.4533",
"24.31868"
],
"柳南区": [
"109.38548",
"24.33599"
],
"柳北区": [
"109.40202",
"24.36267"
],
"柳江县": [
"109.33273",
"24.25596"
],
"柳城县": [
"109.23877",
"24.64951"
],
"鹿寨县": [
"109.75177",
"24.47306"
],
"融安县": [
"109.39761",
"25.22465"
],
"融水苗族自治县": [
"109.25634",
"25.06628"
],
"三江侗族自治县": [
"109.60446",
"25.78428"
],
"柳东新区": [
"109.437053",
"24.329204"
],
"桂林市": [
"110.299121",
"25.274215"
],
"秀峰区": [
"110.28915",
"25.28249"
],
"叠彩区": [
"110.30195",
"25.31381"
],
"象山区": [
"110.28108",
"25.26168"
],
"七星区": [
"110.31793",
"25.2525"
],
"雁山区": [
"110.30911",
"25.06038"
],
"临桂区": [
"110.205487",
"25.246257"
],
"阳朔县": [
"110.49475",
"24.77579"
],
"灵川县": [
"110.32949",
"25.41292"
],
"全州县": [
"111.07211",
"25.92799"
],
"兴安县": [
"110.67144",
"25.61167"
],
"永福县": [
"109.98333",
"24.98004"
],
"灌阳县": [
"111.15954",
"25.48803"
],
"龙胜各族自治县": [
"110.01226",
"25.79614"
],
"资源县": [
"110.65255",
"26.04237"
],
"平乐县": [
"110.64175",
"24.63242"
],
"荔浦县": [
"110.3971",
"24.49589"
],
"恭城瑶族自治县": [
"110.83035",
"24.83286"
],
"梧州市": [
"111.316229",
"23.472309"
],
"万秀区": [
"111.32052",
"23.47298"
],
"长洲区": [
"111.27494",
"23.48573"
],
"龙圩区": [
"111.316229",
"23.472309"
],
"苍梧县": [
"111.24533",
"23.42049"
],
"藤县": [
"110.91418",
"23.37605"
],
"蒙山县": [
"110.52221",
"24.20168"
],
"岑溪市": [
"110.99594",
"22.9191"
],
"北海市": [
"109.119254",
"21.473343"
],
"海城区": [
"109.11744",
"21.47501"
],
"银海区": [
"109.13029",
"21.4783"
],
"铁山港区": [
"109.45578",
"21.59661"
],
"合浦县": [
"109.20068",
"21.66601"
],
"防城港市": [
"108.345478",
"21.614631"
],
"港口区": [
"108.38022",
"21.64342"
],
"防城区": [
"108.35726",
"21.76464"
],
"上思县": [
"107.9823",
"22.14957"
],
"东兴市": [
"107.97204",
"21.54713"
],
"钦州市": [
"108.624175",
"21.967127"
],
"钦南区": [
"108.61775",
"21.95137"
],
"钦北区": [
"108.63037",
"21.95127"
],
"灵山县": [
"109.29153",
"22.4165"
],
"浦北县": [
"109.55572",
"22.26888"
],
"贵港市": [
"109.602146",
"23.0936"
],
"港北区": [
"109.57224",
"23.11153"
],
"港南区": [
"109.60617",
"23.07226"
],
"覃塘区": [
"109.44293",
"23.12677"
],
"平南县": [
"110.39062",
"23.54201"
],
"桂平市": [
"110.08105",
"23.39339"
],
"玉林市": [
"110.154393",
"22.63136"
],
"玉州区": [
"110.15114",
"22.6281"
],
"福绵区": [
"110.064816",
"22.583057"
],
"玉东新区": [
"110.154393",
"22.63136"
],
"容县": [
"110.55593",
"22.85701"
],
"陆川县": [
"110.26413",
"22.32454"
],
"博白县": [
"109.97744",
"22.27286"
],
"兴业县": [
"109.87612",
"22.74237"
],
"北流市": [
"110.35302",
"22.70817"
],
"百色市": [
"106.616285",
"23.897742"
],
"右江区": [
"106.61764",
"23.9009"
],
"田阳县": [
"106.91558",
"23.73535"
],
"田东县": [
"107.12432",
"23.60003"
],
"平果县": [
"107.59045",
"23.32969"
],
"德保县": [
"106.61917",
"23.32515"
],
"靖西县": [
"106.41766",
"23.13425"
],
"那坡县": [
"105.84191",
"23.40649"
],
"凌云县": [
"106.56155",
"24.34747"
],
"乐业县": [
"106.56124",
"24.78295"
],
"田林县": [
"106.22882",
"24.29207"
],
"西林县": [
"105.09722",
"24.48966"
],
"隆林各族自治县": [
"105.34295",
"24.77036"
],
"贺州市": [
"111.552056",
"24.414141"
],
"八步区": [
"111.55225",
"24.41179"
],
"昭平县": [
"110.81082",
"24.1701"
],
"钟山县": [
"111.30459",
"24.52482"
],
"富川瑶族自治县": [
"111.27767",
"24.81431"
],
"平桂管理区": [
"111.485651",
"24.458041"
],
"河池市": [
"108.062105",
"24.695899"
],
"金城江区": [
"108.03727",
"24.6897"
],
"南丹县": [
"107.54562",
"24.9776"
],
"天峨县": [
"107.17205",
"24.99593"
],
"凤山县": [
"107.04892",
"24.54215"
],
"东兰县": [
"107.37527",
"24.51053"
],
"罗城仫佬族自治县": [
"108.90777",
"24.77923"
],
"环江毛南族自治县": [
"108.26055",
"24.82916"
],
"巴马瑶族自治县": [
"107.25308",
"24.14135"
],
"都安瑶族自治县": [
"108.10116",
"23.93245"
],
"大化瑶族自治县": [
"107.9985",
"23.74487"
],
"宜州市": [
"108.65304",
"24.49391"
],
"来宾市": [
"109.229772",
"23.733766"
],
"兴宾区": [
"109.23471",
"23.72731"
],
"忻城县": [
"108.66357",
"24.06862"
],
"象州县": [
"109.6994",
"23.97355"
],
"武宣县": [
"109.66284",
"23.59474"
],
"金秀瑶族自治县": [
"110.19079",
"24.12929"
],
"合山市": [
"108.88586",
"23.80619"
],
"崇左市": [
"107.353926",
"22.404108"
],
"江州区": [
"107.34747",
"22.41135"
],
"扶绥县": [
"107.90405",
"22.63413"
],
"宁明县": [
"107.07299",
"22.13655"
],
"龙州县": [
"106.85415",
"22.33937"
],
"大新县": [
"107.19821",
"22.83412"
],
"天等县": [
"107.13998",
"23.077"
],
"凭祥市": [
"106.75534",
"22.10573"
],
"海南省": [
"110.33119",
"20.031971"
],
"海口市": [
"110.33119",
"20.031971"
],
"秀英区": [
"110.29345",
"20.00752"
],
"龙华区": [
"110.30194",
"20.02866"
],
"琼山区": [
"110.35418",
"20.00321"
],
"美兰区": [
"110.36908",
"20.02864"
],
"三亚市": [
"109.508268",
"18.247872"
],
"海棠区": [
"109.508268",
"18.247872"
],
"吉阳区": [
"109.508268",
"18.247872"
],
"天涯区": [
"109.508268",
"18.247872"
],
"崖州区": [
"109.508268",
"18.247872"
],
"三沙市": [
"112.34882",
"16.831039"
],
"西沙群岛": [
"112.025528",
"16.331342"
],
"南沙群岛": [
"116.749998",
"11.471888"
],
"中沙群岛": [
"117.740071",
"15.112856"
],
"五指山市": [
"109.516662",
"18.776921"
],
"琼海市": [
"110.466785",
"19.246011"
],
"儋州市": [
"109.576782",
"19.517486"
],
"文昌市": [
"110.753975",
"19.612986"
],
"万宁市": [
"110.388793",
"18.796216"
],
"东方市": [
"108.653789",
"19.10198"
],
"定安县": [
"110.323959",
"19.699211"
],
"屯昌县": [
"110.102773",
"19.362916"
],
"澄迈县": [
"110.007147",
"19.737095"
],
"临高县": [
"109.687697",
"19.908293"
],
"白沙黎族自治县": [
"109.452606",
"19.224584"
],
"昌江黎族自治县": [
"109.053351",
"19.260968"
],
"乐东黎族自治县": [
"109.175444",
"18.74758"
],
"陵水黎族自治县": [
"110.037218",
"18.505006"
],
"保亭黎族苗族自治县": [
"109.70245",
"18.636371"
],
"琼中黎族苗族自治县": [
"109.839996",
"19.03557"
],
"重庆": [
"106.504962",
"29.533155"
],
"重庆市": [
"106.504962",
"29.533155"
],
"万州区": [
"108.40869",
"30.80788"
],
"涪陵区": [
"107.39007",
"29.70292"
],
"渝中区": [
"106.56901",
"29.55279"
],
"大渡口区": [
"106.48262",
"29.48447"
],
"沙坪坝区": [
"106.45752",
"29.54113"
],
"九龙坡区": [
"106.51107",
"29.50197"
],
"南岸区": [
"106.56347",
"29.52311"
],
"北碚区": [
"106.39614",
"29.80574"
],
"綦江区": [
"106.926779",
"28.960656"
],
"大足区": [
"105.768121",
"29.484025"
],
"渝北区": [
"106.6307",
"29.7182"
],
"巴南区": [
"106.52365",
"29.38311"
],
"黔江区": [
"108.7709",
"29.5332"
],
"长寿区": [
"107.08166",
"29.85359"
],
"江津区": [
"106.25912",
"29.29008"
],
"合川区": [
"106.27633",
"29.97227"
],
"永川区": [
"105.927",
"29.35593"
],
"南川区": [
"107.09936",
"29.15751"
],
"璧山区": [
"106.231126",
"29.593581"
],
"铜梁区": [
"106.054948",
"29.839944"
],
"潼南县": [
"105.84005",
"30.1912"
],
"荣昌县": [
"105.59442",
"29.40488"
],
"梁平县": [
"107.79998",
"30.67545"
],
"城口县": [
"108.66513",
"31.94801"
],
"丰都县": [
"107.73098",
"29.86348"
],
"垫江县": [
"107.35446",
"30.33359"
],
"武隆县": [
"107.7601",
"29.32548"
],
"忠县": [
"108.03689",
"30.28898"
],
"开县": [
"108.39306",
"31.16095"
],
"云阳县": [
"108.69726",
"30.93062"
],
"奉节县": [
"109.46478",
"31.01825"
],
"巫山县": [
"109.87814",
"31.07458"
],
"巫溪县": [
"109.63128",
"31.39756"
],
"石柱土家族自治县": [
"108.11389",
"30.00054"
],
"秀山土家族苗族自治县": [
"108.98861",
"28.45062"
],
"酉阳土家族苗族自治县": [
"108.77212",
"28.8446"
],
"彭水苗族土家族自治县": [
"108.16638",
"29.29516"
],
"两江新区": [
"106.463344",
"29.729153"
],
"北部新区": [
"106.488841",
"29.667062"
],
"保税港区": [
"106.638184",
"29.716311"
],
"工业园区": [
"106.626434",
"29.55554"
],
"四川省": [
"104.065735",
"30.659462"
],
"成都市": [
"104.065735",
"30.659462"
],
"锦江区": [
"104.08347",
"30.65614"
],
"青羊区": [
"104.06151",
"30.67387"
],
"金牛区": [
"104.05114",
"30.69126"
],
"武侯区": [
"104.04303",
"30.64235"
],
"成华区": [
"104.10193",
"30.65993"
],
"龙泉驿区": [
"104.27462",
"30.55658"
],
"青白江区": [
"104.251",
"30.87841"
],
"新都区": [
"104.15921",
"30.82314"
],
"温江区": [
"103.84881",
"30.68444"
],
"金堂县": [
"104.41195",
"30.86195"
],
"双流县": [
"103.92373",
"30.57444"
],
"郫县": [
"103.88717",
"30.81054"
],
"大邑县": [
"103.52075",
"30.58738"
],
"蒲江县": [
"103.50616",
"30.19667"
],
"新津县": [
"103.8114",
"30.40983"
],
"都江堰市": [
"103.61941",
"30.99825"
],
"彭州市": [
"103.958",
"30.99011"
],
"邛崃市": [
"103.46283",
"30.41489"
],
"崇州市": [
"103.67285",
"30.63014"
],
"自贡市": [
"104.773447",
"29.352765"
],
"自流井区": [
"104.77719",
"29.33745"
],
"贡井区": [
"104.71536",
"29.34576"
],
"大安区": [
"120.58652",
"24.34607"
],
"沿滩区": [
"104.88012",
"29.26611"
],
"荣县": [
"104.4176",
"29.44445"
],
"富顺县": [
"104.97491",
"29.18123"
],
"攀枝花市": [
"101.716007",
"26.580446"
],
"东区": [
"114.255993",
"22.262755"
],
"西区": [
"120.437493",
"23.473029"
],
"仁和区": [
"101.73812",
"26.49841"
],
"米易县": [
"102.11132",
"26.88718"
],
"盐边县": [
"101.85446",
"26.68847"
],
"泸州市": [
"105.443348",
"28.889138"
],
"江阳区": [
"105.45336",
"28.88934"
],
"纳溪区": [
"105.37255",
"28.77343"
],
"龙马潭区": [
"105.43774",
"28.91308"
],
"泸县": [
"105.38192",
"29.15041"
],
"合江县": [
"105.8352",
"28.81005"
],
"叙永县": [
"105.44473",
"28.15586"
],
"古蔺县": [
"105.81347",
"28.03867"
],
"德阳市": [
"104.398651",
"31.127991"
],
"旌阳区": [
"104.39367",
"31.13906"
],
"中江县": [
"104.67861",
"31.03297"
],
"罗江县": [
"104.51025",
"31.31665"
],
"广汉市": [
"104.28234",
"30.97686"
],
"什邡市": [
"104.16754",
"31.1264"
],
"绵竹市": [
"104.22076",
"31.33772"
],
"绵阳市": [
"104.741722",
"31.46402"
],
"涪城区": [
"104.75719",
"31.45522"
],
"游仙区": [
"104.77092",
"31.46574"
],
"三台县": [
"105.09079",
"31.09179"
],
"盐亭县": [
"105.3898",
"31.22176"
],
"安县": [
"104.56738",
"31.53487"
],
"梓潼县": [
"105.16183",
"31.6359"
],
"北川羌族自治县": [
"104.46408",
"31.83286"
],
"平武县": [
"104.52862",
"32.40791"
],
"江油市": [
"104.74539",
"31.77775"
],
"广元市": [
"105.829757",
"32.433668"
],
"利州区": [
"105.826194",
"32.432276"
],
"昭化区": [
"105.640491",
"32.386518"
],
"朝天区": [
"105.89273",
"32.64398"
],
"旺苍县": [
"106.29022",
"32.22845"
],
"青川县": [
"105.2391",
"32.58563"
],
"剑阁县": [
"105.5252",
"32.28845"
],
"苍溪县": [
"105.936",
"31.73209"
],
"遂宁市": [
"105.571331",
"30.513311"
],
"船山区": [
"105.5809",
"30.49991"
],
"安居区": [
"105.46402",
"30.35778"
],
"蓬溪县": [
"105.70752",
"30.75775"
],
"射洪县": [
"105.38922",
"30.87203"
],
"大英县": [
"105.24346",
"30.59434"
],
"内江市": [
"105.066138",
"29.58708"
],
"东兴区": [
"105.07554",
"29.59278"
],
"威远县": [
"104.66955",
"29.52823"
],
"资中县": [
"104.85205",
"29.76409"
],
"隆昌县": [
"105.28738",
"29.33937"
],
"乐山市": [
"103.761263",
"29.582024"
],
"沙湾区": [
"103.54873",
"29.41194"
],
"五通桥区": [
"103.82345",
"29.40344"
],
"金口河区": [
"103.07858",
"29.24578"
],
"犍为县": [
"103.94989",
"29.20973"
],
"井研县": [
"104.07019",
"29.65228"
],
"夹江县": [
"103.57199",
"29.73868"
],
"沐川县": [
"103.90353",
"28.95646"
],
"峨边彝族自治县": [
"103.26339",
"29.23004"
],
"马边彝族自治县": [
"103.54617",
"28.83593"
],
"峨眉山市": [
"103.4844",
"29.60117"
],
"南充市": [
"106.082974",
"30.795281"
],
"顺庆区": [
"106.09216",
"30.79642"
],
"高坪区": [
"106.11894",
"30.78162"
],
"嘉陵区": [
"106.07141",
"30.75848"
],
"南部县": [
"106.06738",
"31.35451"
],
"营山县": [
"106.56637",
"31.07747"
],
"蓬安县": [
"106.41262",
"31.02964"
],
"仪陇县": [
"106.29974",
"31.27628"
],
"西充县": [
"105.89996",
"30.9969"
],
"阆中市": [
"106.00494",
"31.55832"
],
"眉山市": [
"103.831788",
"30.048318"
],
"东坡区": [
"103.832",
"30.04219"
],
"彭山区": [
"103.87268",
"30.19283"
],
"仁寿县": [
"104.13412",
"29.99599"
],
"洪雅县": [
"103.37313",
"29.90661"
],
"丹棱县": [
"103.51339",
"30.01562"
],
"青神县": [
"103.84771",
"29.83235"
],
"宜宾市": [
"104.630825",
"28.760189"
],
"翠屏区": [
"104.61978",
"28.76566"
],
"南溪区": [
"104.981133",
"28.839806"
],
"宜宾县": [
"104.53314",
"28.68996"
],
"江安县": [
"105.06683",
"28.72385"
],
"长宁县": [
"104.9252",
"28.57777"
],
"高县": [
"104.51754",
"28.43619"
],
"珙县": [
"104.71398",
"28.44512"
],
"筠连县": [
"104.51217",
"28.16495"
],
"兴文县": [
"105.23675",
"28.3044"
],
"屏山县": [
"104.16293",
"28.64369"
],
"广安市": [
"106.633369",
"30.456398"
],
"广安区": [
"106.64163",
"30.47389"
],
"前锋区": [
"106.893537",
"30.494572"
],
"岳池县": [
"106.44079",
"30.53918"
],
"武胜县": [
"106.29592",
"30.34932"
],
"邻水县": [
"106.92968",
"30.33449"
],
"华蓥市": [
"106.78466",
"30.39007"
],
"达州市": [
"107.502262",
"31.209484"
],
"通川区": [
"107.50456",
"31.21469"
],
"达川区": [
"107.502262",
"31.209484"
],
"宣汉县": [
"107.72775",
"31.35516"
],
"开江县": [
"107.86889",
"31.0841"
],
"大竹县": [
"107.20855",
"30.74147"
],
"渠县": [
"106.97381",
"30.8376"
],
"万源市": [
"108.03598",
"32.08091"
],
"雅安市": [
"103.001033",
"29.987722"
],
"雨城区": [
"103.03305",
"30.00531"
],
"名山区": [
"103.112214",
"30.084718"
],
"荥经县": [
"102.84652",
"29.79402"
],
"汉源县": [
"102.6784",
"29.35168"
],
"石棉县": [
"102.35943",
"29.22796"
],
"天全县": [
"102.75906",
"30.06754"
],
"芦山县": [
"102.92791",
"30.14369"
],
"宝兴县": [
"102.81555",
"30.36836"
],
"巴中市": [
"106.753669",
"31.858809"
],
"巴州区": [
"106.76889",
"31.85125"
],
"恩阳区": [
"106.753669",
"31.858809"
],
"通江县": [
"107.24398",
"31.91294"
],
"南江县": [
"106.84164",
"32.35335"
],
"平昌县": [
"107.10424",
"31.5594"
],
"资阳市": [
"104.641917",
"30.122211"
],
"雁江区": [
"104.65216",
"30.11525"
],
"安岳县": [
"105.3363",
"30.09786"
],
"乐至县": [
"105.03207",
"30.27227"
],
"简阳市": [
"104.54864",
"30.3904"
],
"阿坝藏族羌族自治州": [
"102.221374",
"31.899792"
],
"汶川县": [
"103.59079",
"31.47326"
],
"理县": [
"103.17175",
"31.43603"
],
"茂县": [
"103.85372",
"31.682"
],
"松潘县": [
"103.59924",
"32.63871"
],
"九寨沟县": [
"104.23672",
"33.26318"
],
"金川县": [
"102.06555",
"31.47623"
],
"小金县": [
"102.36499",
"30.99934"
],
"黑水县": [
"102.99176",
"32.06184"
],
"马尔康县": [
"102.20625",
"31.90584"
],
"壤塘县": [
"100.9783",
"32.26578"
],
"阿坝县": [
"101.70632",
"32.90301"
],
"若尔盖县": [
"102.9598",
"33.57432"
],
"红原县": [
"102.54525",
"32.78989"
],
"甘孜藏族自治州": [
"101.963815",
"30.050663"
],
"康定县": [
"101.96487",
"30.05532"
],
"泸定县": [
"102.23507",
"29.91475"
],
"丹巴县": [
"101.88424",
"30.87656"
],
"九龙县": [
"101.50848",
"29.00091"
],
"雅江县": [
"101.01492",
"30.03281"
],
"道孚县": [
"101.12554",
"30.98046"
],
"炉霍县": [
"100.67681",
"31.3917"
],
"甘孜县": [
"99.99307",
"31.62672"
],
"新龙县": [
"100.3125",
"30.94067"
],
"德格县": [
"98.58078",
"31.80615"
],
"白玉县": [
"98.82568",
"31.20902"
],
"石渠县": [
"98.10156",
"32.97884"
],
"色达县": [
"100.33224",
"32.26839"
],
"理塘县": [
"100.27005",
"29.99674"
],
"巴塘县": [
"99.10409",
"30.00423"
],
"乡城县": [
"99.79943",
"28.93554"
],
"稻城县": [
"100.29809",
"29.0379"
],
"得荣县": [
"99.28628",
"28.71297"
],
"凉山彝族自治州": [
"102.258746",
"27.886762"
],
"西昌市": [
"102.26413",
"27.89524"
],
"木里藏族自治县": [
"101.2796",
"27.92875"
],
"盐源县": [
"101.5097",
"27.42177"
],
"德昌县": [
"102.18017",
"27.40482"
],
"会理县": [
"102.24539",
"26.65627"
],
"会东县": [
"102.57815",
"26.63429"
],
"宁南县": [
"102.76116",
"27.06567"
],
"普格县": [
"102.54055",
"27.37485"
],
"布拖县": [
"102.81234",
"27.7079"
],
"金阳县": [
"103.24774",
"27.69698"
],
"昭觉县": [
"102.84661",
"28.01155"
],
"喜德县": [
"102.41336",
"28.30739"
],
"冕宁县": [
"102.17108",
"28.55161"
],
"越西县": [
"102.5079",
"28.64133"
],
"甘洛县": [
"102.77154",
"28.96624"
],
"美姑县": [
"103.13116",
"28.32596"
],
"雷波县": [
"103.57287",
"28.26407"
],
"贵州省": [
"106.713478",
"26.578343"
],
"贵阳市": [
"106.713478",
"26.578343"
],
"南明区": [
"106.7145",
"26.56819"
],
"云岩区": [
"106.72485",
"26.60484"
],
"花溪区": [
"106.67688",
"26.43343"
],
"乌当区": [
"106.7521",
"26.6302"
],
"观山湖区": [
"106.625442",
"26.618209"
],
"开阳县": [
"106.9692",
"27.05533"
],
"息烽县": [
"106.738",
"27.09346"
],
"修文县": [
"106.59487",
"26.83783"
],
"清镇市": [
"106.46862",
"26.55261"
],
"六盘水市": [
"104.846743",
"26.584643"
],
"钟山区": [
"104.87848",
"26.57699"
],
"六枝特区": [
"105.48062",
"26.20117"
],
"水城县": [
"104.95764",
"26.54785"
],
"盘县": [
"104.47061",
"25.7136"
],
"遵义市": [
"106.937265",
"27.706626"
],
"红花岗区": [
"106.89404",
"27.64471"
],
"汇川区": [
"106.9393",
"27.70625"
],
"遵义县": [
"106.83331",
"27.53772"
],
"桐梓县": [
"106.82568",
"28.13806"
],
"绥阳县": [
"107.19064",
"27.94702"
],
"正安县": [
"107.44357",
"28.5512"
],
"道真仡佬族苗族自治县": [
"107.61152",
"28.864"
],
"务川仡佬族苗族自治县": [
"107.88935",
"28.52227"
],
"凤冈县": [
"107.71682",
"27.95461"
],
"湄潭县": [
"107.48779",
"27.76676"
],
"余庆县": [
"107.88821",
"27.22532"
],
"习水县": [
"106.21267",
"28.31976"
],
"赤水市": [
"105.69845",
"28.58921"
],
"仁怀市": [
"106.40152",
"27.79231"
],
"安顺市": [
"105.932188",
"26.245544"
],
"西秀区": [
"105.96585",
"26.24491"
],
"平坝区": [
"106.25683",
"26.40543"
],
"普定县": [
"105.74285",
"26.30141"
],
"镇宁布依族苗族自治县": [
"105.76513",
"26.05533"
],
"关岭布依族苗族自治县": [
"105.61883",
"25.94248"
],
"紫云苗族布依族自治县": [
"106.08364",
"25.75258"
],
"毕节市": [
"105.28501",
"27.301693"
],
"七星关区": [
"104.9497",
"27.153556"
],
"大方县": [
"105.609254",
"27.143521"
],
"黔西县": [
"106.038299",
"27.024923"
],
"金沙县": [
"106.222103",
"27.459693"
],
"织金县": [
"105.768997",
"26.668497"
],
"纳雍县": [
"105.375322",
"26.769875"
],
"威宁彝族回族苗族自治县": [
"104.286523",
"26.859099"
],
"赫章县": [
"104.726438",
"27.119243"
],
"铜仁市": [
"109.191555",
"27.718346"
],
"碧江区": [
"109.191555",
"27.718346"
],
"万山区": [
"109.21199",
"27.51903"
],
"江口县": [
"108.848427",
"27.691904"
],
"玉屏侗族自治县": [
"108.917882",
"27.238024"
],
"石阡县": [
"108.229854",
"27.519386"
],
"思南县": [
"108.255827",
"27.941331"
],
"印江土家族苗族自治县": [
"108.405517",
"27.997976"
],
"德江县": [
"108.117317",
"28.26094"
],
"沿河土家族自治县": [
"108.495746",
"28.560487"
],
"松桃苗族自治县": [
"109.202627",
"28.165419"
],
"黔西南布依族苗族自治州": [
"104.897971",
"25.08812"
],
"兴义市 ": [
"104.89548",
"25.09205"
],
"兴仁县": [
"105.18652",
"25.43282"
],
"普安县": [
"104.95529",
"25.78603"
],
"晴隆县": [
"105.2192",
"25.83522"
],
"贞丰县": [
"105.65454",
"25.38464"
],
"望谟县": [
"106.09957",
"25.17822"
],
"册亨县": [
"105.8124",
"24.98376"
],
"安龙县": [
"105.44268",
"25.09818"
],
"黔东南苗族侗族自治州": [
"107.977488",
"26.583352"
],
"凯里市": [
"107.98132",
"26.56689"
],
"黄平县": [
"107.90179",
"26.89573"
],
"施秉县": [
"108.12597",
"27.03495"
],
"三穗县": [
"108.67132",
"26.94765"
],
"镇远县": [
"108.42721",
"27.04933"
],
"岑巩县": [
"108.81884",
"27.17539"
],
"天柱县": [
"109.20718",
"26.90781"
],
"锦屏县": [
"109.19982",
"26.67635"
],
"剑河县": [
"108.5913",
"26.6525"
],
"台江县": [
"108.31814",
"26.66916"
],
"黎平县": [
"109.13607",
"26.23114"
],
"榕江县": [
"108.52072",
"25.92421"
],
"从江县": [
"108.90527",
"25.75415"
],
"雷山县": [
"108.07745",
"26.38385"
],
"麻江县": [
"107.59155",
"26.49235"
],
"丹寨县": [
"107.79718",
"26.19816"
],
"黔南布依族苗族自治州": [
"107.517156",
"26.258219"
],
"都匀市": [
"107.51872",
"26.2594"
],
"福泉市": [
"107.51715",
"26.67989"
],
"荔波县": [
"107.88592",
"25.4139"
],
"贵定县": [
"107.23654",
"26.57812"
],
"瓮安县": [
"107.4757",
"27.06813"
],
"独山县": [
"107.54101",
"25.8245"
],
"平塘县": [
"107.32428",
"25.83294"
],
"罗甸县": [
"106.75186",
"25.42586"
],
"长顺县": [
"106.45217",
"26.02299"
],
"龙里县": [
"106.97662",
"26.45076"
],
"惠水县": [
"106.65911",
"26.13389"
],
"三都水族自治县": [
"107.87464",
"25.98562"
],
"云南省": [
"102.712251",
"25.040609"
],
"昆明市": [
"102.712251",
"25.040609"
],
"五华区": [
"102.70786",
"25.03521"
],
"盘龙区": [
"102.71994",
"25.04053"
],
"官渡区": [
"102.74362",
"25.01497"
],
"西山区": [
"102.66464",
"25.03796"
],
"东川区": [
"103.18832",
"26.083"
],
"呈贡区": [
"102.801382",
"24.889275"
],
"晋宁县": [
"102.59393",
"24.6665"
],
"富民县": [
"102.4985",
"25.22119"
],
"宜良县": [
"103.14117",
"24.91705"
],
"石林彝族自治县": [
"103.27148",
"24.75897"
],
"嵩明县": [
"103.03729",
"25.33986"
],
"禄劝彝族苗族自治县": [
"102.4671",
"25.55387"
],
"寻甸回族彝族自治县 ": [
"103.2557",
"25.55961"
],
"安宁市": [
"102.46972",
"24.91652"
],
"曲靖市": [
"103.797851",
"25.501557"
],
"麒麟区": [
"103.80504",
"25.49515"
],
"马龙县": [
"103.57873",
"25.42521"
],
"陆良县": [
"103.6665",
"25.02335"
],
"师宗县": [
"103.99084",
"24.82822"
],
"罗平县": [
"104.30859",
"24.88444"
],
"富源县": [
"104.25387",
"25.66587"
],
"会泽县": [
"103.30017",
"26.41076"
],
"沾益县": [
"103.82135",
"25.60715"
],
"宣威市": [
"104.10409",
"26.2173"
],
"玉溪市": [
"102.543907",
"24.350461"
],
"红塔区": [
"102.5449",
"24.35411"
],
"江川县": [
"102.75412",
"24.28863"
],
"澄江县": [
"102.90817",
"24.67376"
],
"通海县": [
"102.76641",
"24.11362"
],
"华宁县": [
"102.92831",
"24.1926"
],
"易门县": [
"102.16354",
"24.67122"
],
"峨山彝族自治县": [
"102.40576",
"24.16904"
],
"新平彝族傣族自治县": [
"101.98895",
"24.06886"
],
"元江哈尼族彝族傣族自治县": [
"101.99812",
"23.59655"
],
"保山市": [
"99.167133",
"25.111802"
],
"隆阳区": [
"99.16334",
"25.11163"
],
"施甸县": [
"99.18768",
"24.72418"
],
"腾冲县": [
"98.49414",
"25.02539"
],
"龙陵县": [
"98.69024",
"24.58746"
],
"昌宁县": [
"99.6036",
"24.82763"
],
"昭通市": [
"103.717216",
"27.336999"
],
"昭阳区": [
"103.70654",
"27.31998"
],
"鲁甸县": [
"103.54721",
"27.19238"
],
"巧家县": [
"102.92416",
"26.91237"
],
"盐津县": [
"104.23461",
"28.10856"
],
"大关县": [
"103.89254",
"27.7488"
],
"永善县": [
"103.63504",
"28.2279"
],
"绥江县": [
"103.94937",
"28.59661"
],
"镇雄县": [
"104.87258",
"27.43981"
],
"彝良县": [
"104.04983",
"27.62809"
],
"威信县": [
"105.04754",
"27.84065"
],
"水富县": [
"104.4158",
"28.62986"
],
"丽江市": [
"100.233026",
"26.872108"
],
"古城区": [
"100.2257",
"26.87697"
],
"玉龙纳西族自治县": [
"100.2369",
"26.82149"
],
"永胜县": [
"100.74667",
"26.68591"
],
"华坪县": [
"101.26562",
"26.62967"
],
"宁蒗彝族自治县": [
"100.8507",
"27.28179"
],
"普洱市": [
"100.972344",
"22.777321"
],
"思茅区": [
"100.97716",
"22.78691"
],
"宁洱哈尼族彝族自治县": [
"101.04653",
"23.06341"
],
"墨江哈尼族自治县": [
"101.69171",
"23.43214"
],
"景东彝族自治县": [
"100.83599",
"24.44791"
],
"景谷傣族彝族自治县": [
"100.70251",
"23.49705"
],
"镇沅彝族哈尼族拉祜族自治县": [
"101.10675",
"24.00557"
],
"江城哈尼族彝族自治县": [
"101.85788",
"22.58424"
],
"孟连傣族拉祜族佤族自治县": [
"99.58424",
"22.32922"
],
"澜沧拉祜族自治县": [
"99.93591",
"22.55474"
],
"西盟佤族自治县": [
"99.59869",
"22.64774"
],
"临沧市": [
"100.08697",
"23.886567"
],
"临翔区": [
"100.08242",
"23.89497"
],
"凤庆县": [
"99.92837",
"24.58034"
],
"云县": [
"100.12808",
"24.44675"
],
"永德县": [
"99.25326",
"24.0276"
],
"镇康县": [
"98.8255",
"23.76241"
],
"双江拉祜族佤族布朗族傣族自治县": [
"99.82769",
"23.47349"
],
"耿马傣族佤族自治县": [
"99.39785",
"23.53776"
],
"沧源佤族自治县": [
"99.24545",
"23.14821"
],
"楚雄彝族自治州": [
"101.546046",
"25.041988"
],
"楚雄市": [
"101.54615",
"25.0329"
],
"双柏县": [
"101.64205",
"24.68882"
],
"牟定县": [
"101.54",
"25.31551"
],
"南华县": [
"101.27313",
"25.19293"
],
"姚安县": [
"101.24279",
"25.50467"
],
"大姚县": [
"101.32397",
"25.72218"
],
"永仁县": [
"101.6716",
"26.05794"
],
"元谋县": [
"101.87728",
"25.70438"
],
"武定县": [
"102.4038",
"25.5295"
],
"禄丰县": [
"102.07797",
"25.14815"
],
"红河哈尼族彝族自治州": [
"103.384182",
"23.366775"
],
"个旧市": [
"103.15966",
"23.35894"
],
"开远市": [
"103.26986",
"23.71012"
],
"蒙自市": [
"103.385005",
"23.366843"
],
"弥勒市": [
"103.436988",
"24.40837"
],
"屏边苗族自治县": [
"103.68554",
"22.98425"
],
"建水县": [
"102.82656",
"23.63472"
],
"石屏县": [
"102.49408",
"23.71441"
],
"泸西县": [
"103.76373",
"24.52854"
],
"元阳县": [
"102.83261",
"23.22281"
],
"红河县": [
"102.42059",
"23.36767"
],
"金平苗族瑶族傣族自治县": [
"103.22651",
"22.77959"
],
"绿春县": [
"102.39672",
"22.99371"
],
"河口瑶族自治县": [
"103.93936",
"22.52929"
],
"文山壮族苗族自治州": [
"104.24401",
"23.36951"
],
"文山市": [
"104.244277",
"23.369216"
],
"砚山县": [
"104.33306",
"23.60723"
],
"西畴县": [
"104.67419",
"23.43941"
],
"麻栗坡县": [
"104.70132",
"23.12028"
],
"马关县": [
"104.39514",
"23.01293"
],
"丘北县": [
"104.19256",
"24.03957"
],
"广南县": [
"105.05511",
"24.0464"
],
"富宁县": [
"105.63085",
"23.62536"
],
"西双版纳傣族自治州": [
"100.797941",
"22.001724"
],
"景洪市": [
"100.79977",
"22.01071"
],
"勐海县": [
"100.44931",
"21.96175"
],
"勐腊县": [
"101.56488",
"21.48162"
],
"大理白族自治州": [
"100.240037",
"25.592765"
],
"大理市": [
"100.22998",
"25.59157"
],
"漾濞彝族自治县": [
"99.95474",
"25.6652"
],
"祥云县": [
"100.55761",
"25.47342"
],
"宾川县": [
"100.57666",
"25.83144"
],
"弥渡县": [
"100.49075",
"25.34179"
],
"南涧彝族自治县": [
"100.50964",
"25.04349"
],
"巍山彝族回族自治县": [
"100.30612",
"25.23197"
],
"永平县": [
"99.54095",
"25.46451"
],
"云龙县": [
"99.37255",
"25.88505"
],
"洱源县": [
"99.94903",
"26.10829"
],
"剑川县": [
"99.90545",
"26.53688"
],
"鹤庆县": [
"100.17697",
"26.55798"
],
"德宏傣族景颇族自治州": [
"98.578363",
"24.436694"
],
"瑞丽市": [
"97.85183",
"24.01277"
],
"芒市": [
"98.588641",
"24.433735"
],
"梁河县": [
"98.29705",
"24.80658"
],
"盈江县": [
"97.93179",
"24.70579"
],
"陇川县": [
"97.79199",
"24.18302"
],
"怒江傈僳族自治州": [
"98.854304",
"25.850949"
],
"泸水县": [
"98.85534",
"25.83772"
],
"福贡县": [
"98.86969",
"26.90366"
],
"贡山独龙族怒族自治县": [
"98.66583",
"27.74088"
],
"兰坪白族普米族自治县": [
"99.41891",
"26.45251"
],
"迪庆藏族自治州": [
"99.706463",
"27.826853"
],
"香格里拉市": [
"99.70601",
"27.82308"
],
"德钦县": [
"98.91082",
"28.4863"
],
"维西傈僳族自治县": [
"99.28402",
"27.1793"
],
"西藏自治区": [
"91.132212",
"29.660361"
],
"拉萨市": [
"91.132212",
"29.660361"
],
"城关区": [
"103.8252",
"36.05725"
],
"林周县": [
"91.2586",
"29.89445"
],
"当雄县": [
"91.10076",
"30.48309"
],
"尼木县": [
"90.16378",
"29.43353"
],
"曲水县": [
"90.73187",
"29.35636"
],
"堆龙德庆县": [
"91.00033",
"29.65002"
],
"达孜县": [
"91.35757",
"29.6722"
],
"墨竹工卡县": [
"91.72814",
"29.83614"
],
"日喀则市": [
"88.884874",
"29.263792"
],
"桑珠孜区": [
"88.880367",
"29.269565"
],
"南木林县": [
"89.09686",
"29.68206"
],
"江孜县": [
"89.60263",
"28.91744"
],
"定日县": [
"87.12176",
"28.66129"
],
"萨迦县": [
"88.02191",
"28.90299"
],
"拉孜县": [
"87.63412",
"29.085"
],
"昂仁县": [
"87.23858",
"29.29496"
],
"谢通门县": [
"88.26242",
"29.43337"
],
"白朗县": [
"89.26205",
"29.10553"
],
"仁布县": [
"89.84228",
"29.2301"
],
"康马县": [
"89.68527",
"28.5567"
],
"定结县": [
"87.77255",
"28.36403"
],
"仲巴县": [
"84.02951",
"29.76595"
],
"亚东县": [
"88.90802",
"27.4839"
],
"吉隆县": [
"85.29846",
"28.85382"
],
"聂拉木县": [
"85.97998",
"28.15645"
],
"萨嘎县": [
"85.23413",
"29.32936"
],
"岗巴县": [
"88.52069",
"28.27504"
],
"昌都市": [
"97.178452",
"31.136875"
],
"卡若区": [
"97.18043",
"31.1385"
],
"江达县": [
"98.21865",
"31.50343"
],
"贡觉县": [
"98.27163",
"30.85941"
],
"类乌齐县": [
"96.60015",
"31.21207"
],
"丁青县": [
"95.59362",
"31.41621"
],
"察雅县": [
"97.56521",
"30.65336"
],
"八宿县": [
"96.9176",
"30.05346"
],
"左贡县": [
"97.84429",
"29.67108"
],
"芒康县": [
"98.59378",
"29.67946"
],
"洛隆县": [
"95.82644",
"30.74049"
],
"边坝县": [
"94.70687",
"30.93434"
],
"山南地区": [
"91.766529",
"29.236023"
],
"乃东县": [
"91.76153",
"29.2249"
],
"扎囊县": [
"91.33288",
"29.2399"
],
"贡嘎县": [
"90.98867",
"29.29408"
],
"桑日县": [
"92.02005",
"29.26643"
],
"琼结县": [
"91.68093",
"29.02632"
],
"曲松县": [
"92.20263",
"29.06412"
],
"措美县": [
"91.43237",
"28.43794"
],
"洛扎县": [
"90.86035",
"28.3872"
],
"加查县": [
"92.57702",
"29.13973"
],
"隆子县": [
"92.46148",
"28.40797"
],
"错那县": [
"91.95752",
"27.99224"
],
"浪卡子县": [
"90.40002",
"28.96948"
],
"那曲地区": [
"92.060214",
"31.476004"
],
"那曲县": [
"92.0535",
"31.46964"
],
"嘉黎县": [
"93.24987",
"30.64233"
],
"比如县": [
"93.68685",
"31.4779"
],
"聂荣县": [
"92.29574",
"32.11193"
],
"安多县": [
"91.6795",
"32.26125"
],
"申扎县": [
"88.70776",
"30.92995"
],
"索县": [
"93.78295",
"31.88427"
],
"班戈县": [
"90.01907",
"31.36149"
],
"巴青县": [
"94.05316",
"31.91833"
],
"尼玛县": [
"87.25256",
"31.79654"
],
"双湖县": [
"88.837776",
"33.189032"
],
"阿里地区": [
"80.105498",
"32.503187"
],
"普兰县": [
"81.177",
"30.30002"
],
"札达县": [
"79.80255",
"31.48345"
],
"噶尔县": [
"80.09579",
"32.50024"
],
"日土县": [
"79.7131",
"33.38741"
],
"革吉县": [
"81.151",
"32.3964"
],
"改则县": [
"84.06295",
"32.30446"
],
"措勤县": [
"85.16616",
"31.02095"
],
"林芝地区": [
"94.362348",
"29.654693"
],
"林芝县": [
"94.48391",
"29.57562"
],
"工布江达县": [
"93.2452",
"29.88576"
],
"米林县": [
"94.21316",
"29.21535"
],
"墨脱县": [
"95.3316",
"29.32698"
],
"波密县": [
"95.77096",
"29.85907"
],
"察隅县": [
"97.46679",
"28.6618"
],
"朗县": [
"93.0754",
"29.04549"
],
"陕西省": [
"108.948024",
"34.263161"
],
"西安市": [
"108.948024",
"34.263161"
],
"碑林区": [
"108.93426",
"34.2304"
],
"莲湖区": [
"108.9401",
"34.26709"
],
"灞桥区": [
"109.06451",
"34.27264"
],
"未央区": [
"108.94683",
"34.29296"
],
"雁塔区": [
"108.94866",
"34.22245"
],
"阎良区": [
"109.22616",
"34.66221"
],
"临潼区": [
"109.21417",
"34.36665"
],
"蓝田县": [
"109.32339",
"34.15128"
],
"周至县": [
"108.22207",
"34.16337"
],
"户县": [
"108.60513",
"34.10856"
],
"高陵区": [
"109.08816",
"34.53483"
],
"铜川市": [
"108.963122",
"34.90892"
],
"王益区": [
"109.07564",
"35.06896"
],
"印台区": [
"109.10208",
"35.1169"
],
"耀州区": [
"108.98556",
"34.91308"
],
"宜君县": [
"109.11813",
"35.40108"
],
"宝鸡市": [
"107.14487",
"34.369315"
],
"渭滨区": [
"107.14991",
"34.37116"
],
"金台区": [
"107.14691",
"34.37612"
],
"陈仓区": [
"107.38742",
"34.35451"
],
"凤翔县": [
"107.39645",
"34.52321"
],
"岐山县": [
"107.62173",
"34.44378"
],
"扶风县": [
"107.90017",
"34.37524"
],
"眉县": [
"107.75079",
"34.27569"
],
"陇县": [
"106.85946",
"34.89404"
],
"千阳县": [
"107.13043",
"34.64219"
],
"麟游县": [
"107.79623",
"34.67844"
],
"凤县": [
"106.52356",
"33.91172"
],
"太白县": [
"107.31646",
"34.06207"
],
"咸阳市": [
"108.705117",
"34.333439"
],
"秦都区": [
"108.71493",
"34.33804"
],
"杨陵区": [
"108.083481",
"34.270434"
],
"渭城区": [
"108.72227",
"34.33198"
],
"三原县": [
"108.93194",
"34.61556"
],
"泾阳县": [
"108.84259",
"34.52705"
],
"乾县": [
"108.24231",
"34.52946"
],
"礼泉县": [
"108.4263",
"34.48455"
],
"永寿县": [
"108.14474",
"34.69081"
],
"彬县": [
"108.08468",
"35.0342"
],
"长武县": [
"107.7951",
"35.2067"
],
"旬邑县": [
"108.3341",
"35.11338"
],
"淳化县": [
"108.58026",
"34.79886"
],
"武功县": [
"108.20434",
"34.26003"
],
"兴平市": [
"108.49057",
"34.29785"
],
"渭南市": [
"109.502882",
"34.499381"
],
"临渭区": [
"109.49296",
"34.49822"
],
"华县": [
"109.77185",
"34.51255"
],
"潼关县": [
"110.24362",
"34.54284"
],
"大荔县": [
"109.94216",
"34.79565"
],
"合阳县": [
"110.14862",
"35.23805"
],
"澄城县": [
"109.93444",
"35.18396"
],
"蒲城县": [
"109.5903",
"34.9568"
],
"白水县": [
"109.59286",
"35.17863"
],
"富平县": [
"109.1802",
"34.75109"
],
"韩城市": [
"110.44238",
"35.47926"
],
"华阴市": [
"110.08752",
"34.56608"
],
"延安市": [
"109.49081",
"36.596537"
],
"宝塔区": [
"109.49336",
"36.59154"
],
"延长县": [
"110.01083",
"36.57904"
],
"延川县": [
"110.19415",
"36.87817"
],
"子长县": [
"109.67532",
"37.14253"
],
"安塞县": [
"109.32708",
"36.86507"
],
"志丹县": [
"108.76815",
"36.82177"
],
"吴起县": [
"108.17611",
"36.92785"
],
"甘泉县": [
"109.35012",
"36.27754"
],
"富县": [
"109.37927",
"35.98803"
],
"洛川县": [
"109.43286",
"35.76076"
],
"宜川县": [
"110.17196",
"36.04732"
],
"黄龙县": [
"109.84259",
"35.58349"
],
"黄陵县": [
"109.26333",
"35.58357"
],
"汉中市": [
"107.028621",
"33.077668"
],
"汉台区": [
"107.03187",
"33.06774"
],
"南郑县": [
"106.94024",
"33.00299"
],
"城固县": [
"107.33367",
"33.15661"
],
"洋县": [
"107.54672",
"33.22102"
],
"西乡县": [
"107.76867",
"32.98411"
],
"勉县": [
"106.67584",
"33.15273"
],
"宁强县": [
"106.25958",
"32.82881"
],
"略阳县": [
"106.15399",
"33.33009"
],
"镇巴县": [
"107.89648",
"32.53487"
],
"留坝县": [
"106.92233",
"33.61606"
],
"佛坪县": [
"107.98974",
"33.52496"
],
"榆林市": [
"109.741193",
"38.290162"
],
"榆阳区": [
"109.73473",
"38.27843"
],
"神木县": [
"110.4989",
"38.84234"
],
"府谷县": [
"111.06723",
"39.02805"
],
"横山县": [
"109.29568",
"37.958"
],
"靖边县": [
"108.79412",
"37.59938"
],
"定边县": [
"107.59793",
"37.59037"
],
"绥德县": [
"110.26126",
"37.49778"
],
"米脂县": [
"110.18417",
"37.75529"
],
"佳县": [
"110.49362",
"38.02248"
],
"吴堡县": [
"110.74533",
"37.45709"
],
"清涧县": [
"110.12173",
"37.08854"
],
"子洲县": [
"110.03488",
"37.61238"
],
"安康市": [
"109.029273",
"32.6903"
],
"汉滨区": [
"109.02683",
"32.69517"
],
"汉阴县": [
"108.51098",
"32.89129"
],
"石泉县": [
"108.24755",
"33.03971"
],
"宁陕县": [
"108.31515",
"33.31726"
],
"紫阳县": [
"108.5368",
"32.52115"
],
"岚皋县": [
"108.90289",
"32.30794"
],
"平利县": [
"109.35775",
"32.39111"
],
"镇坪县": [
"109.52456",
"31.8833"
],
"旬阳县": [
"109.3619",
"32.83207"
],
"白河县": [
"110.11315",
"32.80955"
],
"商洛市": [
"109.939776",
"33.868319"
],
"商州区": [
"109.94126",
"33.8627"
],
"洛南县": [
"110.14645",
"34.08994"
],
"丹凤县": [
"110.33486",
"33.69468"
],
"商南县": [
"110.88375",
"33.52581"
],
"山阳县": [
"109.88784",
"33.52931"
],
"镇安县": [
"109.15374",
"33.42366"
],
"柞水县": [
"109.11105",
"33.6831"
],
"西咸新区": [
"108.810654",
"34.307144"
],
"空港新城": [
"108.760529",
"34.440894"
],
"沣东新城": [
"108.82988",
"34.267431"
],
"秦汉新城": [
"108.83812",
"34.386513"
],
"沣西新城": [
"108.71215",
"34.190453"
],
"泾河新城": [
"109.049603",
"34.460587"
],
"甘肃省": [
"103.823557",
"36.058039"
],
"兰州市": [
"103.823557",
"36.058039"
],
"七里河区": [
"103.78564",
"36.06585"
],
"西固区": [
"103.62811",
"36.08858"
],
"安宁区": [
"103.7189",
"36.10384"
],
"红古区": [
"102.85955",
"36.34537"
],
"永登县": [
"103.26055",
"36.73522"
],
"皋兰县": [
"103.94506",
"36.33215"
],
"榆中县": [
"104.1145",
"35.84415"
],
"嘉峪关市": [
"98.277304",
"39.786529"
],
"雄关区": [
"98.277398",
"39.77925"
],
"长城区": [
"98.273523",
"39.787431"
],
"镜铁区": [
"98.277304",
"39.786529"
],
"金昌市": [
"102.187888",
"38.514238"
],
"金川区": [
"102.19376",
"38.52101"
],
"永昌县": [
"101.97222",
"38.24711"
],
"白银市": [
"104.173606",
"36.54568"
],
"白银区": [
"104.17355",
"36.54411"
],
"平川区": [
"104.82498",
"36.7277"
],
"靖远县": [
"104.68325",
"36.56602"
],
"会宁县": [
"105.05297",
"35.69626"
],
"景泰县": [
"104.06295",
"37.18359"
],
"天水市": [
"105.724998",
"34.578529"
],
"秦州区": [
"105.72421",
"34.58089"
],
"麦积区": [
"105.89013",
"34.57069"
],
"清水县": [
"106.13671",
"34.75032"
],
"秦安县": [
"105.66955",
"34.85894"
],
"甘谷县": [
"105.33291",
"34.73665"
],
"武山县": [
"104.88382",
"34.72123"
],
"张家川回族自治县": [
"106.21582",
"34.99582"
],
"武威市": [
"102.634697",
"37.929996"
],
"凉州区": [
"102.64203",
"37.92832"
],
"民勤县": [
"103.09011",
"38.62487"
],
"古浪县": [
"102.89154",
"37.46508"
],
"天祝藏族自治县": [
"103.1361",
"36.97715"
],
"张掖市": [
"100.455472",
"38.932897"
],
"甘州区": [
"100.4527",
"38.92947"
],
"肃南裕固族自治县": [
"99.61407",
"38.83776"
],
"民乐县": [
"100.81091",
"38.43479"
],
"临泽县": [
"100.16445",
"39.15252"
],
"高台县": [
"99.81918",
"39.37829"
],
"山丹县": [
"101.09359",
"38.78468"
],
"平凉市": [
"106.684691",
"35.54279"
],
"崆峒区": [
"106.67483",
"35.54262"
],
"泾川县": [
"107.36581",
"35.33223"
],
"灵台县": [
"107.6174",
"35.06768"
],
"崇信县": [
"107.03738",
"35.30344"
],
"华亭县": [
"106.65463",
"35.2183"
],
"庄浪县": [
"106.03662",
"35.20235"
],
"静宁县": [
"105.72723",
"35.51991"
],
"酒泉市": [
"98.510795",
"39.744023"
],
"肃州区": [
"98.50775",
"39.74506"
],
"金塔县": [
"98.90002",
"39.97733"
],
"瓜州县": [
"95.78271",
"40.51548"
],
"肃北蒙古族自治县": [
"94.87649",
"39.51214"
],
"阿克塞哈萨克族自治县": [
"94.34097",
"39.63435"
],
"玉门市": [
"97.04538",
"40.29172"
],
"敦煌市": [
"94.66159",
"40.14211"
],
"庆阳市": [
"107.638372",
"35.734218"
],
"西峰区": [
"107.65107",
"35.73065"
],
"庆城县": [
"107.88272",
"36.01507"
],
"环县": [
"107.30835",
"36.56846"
],
"华池县": [
"107.9891",
"36.46108"
],
"合水县": [
"108.02032",
"35.81911"
],
"正宁县": [
"108.36007",
"35.49174"
],
"宁县": [
"107.92517",
"35.50164"
],
"镇原县": [
"107.199",
"35.67712"
],
"定西市": [
"104.626294",
"35.579578"
],
"安定区": [
"120.237083",
"23.121498"
],
"通渭县": [
"105.24224",
"35.21101"
],
"陇西县": [
"104.63446",
"35.00238"
],
"渭源县": [
"104.21435",
"35.13649"
],
"临洮县": [
"103.86196",
"35.3751"
],
"漳县": [
"104.46704",
"34.84977"
],
"岷县": [
"104.03772",
"34.43444"
],
"陇南市": [
"104.929379",
"33.388598"
],
"武都区": [
"104.92652",
"33.39239"
],
"成县": [
"105.72586",
"33.73925"
],
"文县": [
"104.68362",
"32.94337"
],
"宕昌县": [
"104.39349",
"34.04732"
],
"康县": [
"105.60711",
"33.32912"
],
"西和县": [
"105.30099",
"34.01432"
],
"礼县": [
"105.17785",
"34.18935"
],
"徽县": [
"106.08529",
"33.76898"
],
"两当县": [
"106.30484",
"33.9096"
],
"临夏回族自治州": [
"103.212006",
"35.599446"
],
"临夏市": [
"103.21",
"35.59916"
],
"临夏县": [
"102.9938",
"35.49519"
],
"康乐县": [
"103.71093",
"35.37219"
],
"永靖县": [
"103.32043",
"35.93835"
],
"广河县": [
"103.56933",
"35.48097"
],
"和政县": [
"103.34936",
"35.42592"
],
"东乡族自治县": [
"103.39477",
"35.66471"
],
"积石山保安族东乡族撒拉族自治县": [
"102.87374",
"35.7182"
],
"甘南藏族自治州": [
"102.911008",
"34.986354"
],
"合作市": [
"102.91082",
"35.00016"
],
"临潭县": [
"103.35287",
"34.69515"
],
"卓尼县": [
"103.50811",
"34.58919"
],
"舟曲县": [
"104.37155",
"33.78468"
],
"迭部县": [
"103.22274",
"34.05623"
],
"玛曲县": [
"102.0754",
"33.997"
],
"碌曲县": [
"102.49176",
"34.58872"
],
"夏河县": [
"102.52215",
"35.20487"
],
"青海省": [
"101.778916",
"36.623178"
],
"西宁市": [
"101.778916",
"36.623178"
],
"城东区": [
"101.80373",
"36.59969"
],
"城西区": [
"101.76588",
"36.62828"
],
"城北区": [
"101.7662",
"36.65014"
],
"大通回族土族自治县": [
"101.70236",
"36.93489"
],
"湟中县": [
"101.57159",
"36.50083"
],
"湟源县": [
"101.25643",
"36.68243"
],
"海东市": [
"102.10327",
"36.502916"
],
"乐都区": [
"102.402431",
"36.480291"
],
"平安县": [
"102.104295",
"36.502714"
],
"民和回族土族自治县": [
"102.804209",
"36.329451"
],
"互助土族自治县": [
"101.956734",
"36.83994"
],
"化隆回族自治县": [
"102.262329",
"36.098322"
],
"循化撒拉族自治县": [
"102.486534",
"35.847247"
],
"海北藏族自治州": [
"100.901059",
"36.959435"
],
"门源回族自治县": [
"101.62228",
"37.37611"
],
"祁连县": [
"100.24618",
"38.17901"
],
"海晏县": [
"100.9927",
"36.89902"
],
"刚察县": [
"100.14675",
"37.32161"
],
"黄南藏族自治州": [
"102.019988",
"35.517744"
],
"同仁县": [
"102.0184",
"35.51603"
],
"尖扎县": [
"102.03411",
"35.93947"
],
"泽库县": [
"101.46444",
"35.03519"
],
"河南蒙古族自治县": [
"101.60864",
"34.73476"
],
"海南藏族自治州": [
"100.619542",
"36.280353"
],
"共和县": [
"100.62003",
"36.2841"
],
"同德县": [
"100.57159",
"35.25488"
],
"贵德县": [
"101.432",
"36.044"
],
"兴海县": [
"99.98846",
"35.59031"
],
"贵南县": [
"100.74716",
"35.58667"
],
"果洛藏族自治州": [
"100.242143",
"34.4736"
],
"玛沁县": [
"100.23901",
"34.47746"
],
"班玛县": [
"100.73745",
"32.93253"
],
"甘德县": [
"99.90246",
"33.96838"
],
"达日县": [
"99.65179",
"33.75193"
],
"久治县": [
"101.48342",
"33.42989"
],
"玛多县": [
"98.20996",
"34.91567"
],
"玉树藏族自治州": [
"97.008522",
"33.004049"
],
"玉树市": [
"97.008762",
"33.00393"
],
"杂多县": [
"95.29864",
"32.89318"
],
"称多县": [
"97.10788",
"33.36899"
],
"治多县": [
"95.61572",
"33.8528"
],
"囊谦县": [
"96.47753",
"32.20359"
],
"曲麻莱县": [
"95.79757",
"34.12609"
],
"海西蒙古族藏族自治州": [
"97.370785",
"37.374663"
],
"格尔木市": [
"94.90329",
"36.40236"
],
"德令哈市": [
"97.36084",
"37.36946"
],
"乌兰县": [
"98.48196",
"36.93471"
],
"都兰县": [
"98.09228",
"36.30135"
],
"天峻县": [
"99.02453",
"37.30326"
],
"宁夏回族自治区": [
"106.278179",
"38.46637"
],
"银川市": [
"106.278179",
"38.46637"
],
"兴庆区": [
"106.28872",
"38.47392"
],
"西夏区": [
"106.15023",
"38.49137"
],
"金凤区": [
"106.24261",
"38.47294"
],
"永宁县": [
"106.2517",
"38.27559"
],
"贺兰县": [
"106.34982",
"38.5544"
],
"灵武市": [
"106.33999",
"38.10266"
],
"石嘴山市": [
"106.376173",
"39.01333"
],
"大武口区": [
"106.37717",
"39.01226"
],
"惠农区": [
"106.71145",
"39.13193"
],
"平罗县": [
"106.54538",
"38.90429"
],
"吴忠市": [
"106.199409",
"37.986165"
],
"利通区": [
"106.20311",
"37.98512"
],
"红寺堡区": [
"106.19822",
"37.99747"
],
"盐池县": [
"107.40707",
"37.7833"
],
"同心县": [
"105.91418",
"36.98116"
],
"青铜峡市": [
"106.07489",
"38.02004"
],
"固原市": [
"106.285241",
"36.004561"
],
"原州区": [
"106.28778",
"36.00374"
],
"西吉县": [
"105.73107",
"35.96616"
],
"隆德县": [
"106.12426",
"35.61718"
],
"泾源县": [
"106.33902",
"35.49072"
],
"彭阳县": [
"106.64462",
"35.85076"
],
"中卫市": [
"105.189568",
"37.514951"
],
"沙坡头区": [
"105.18962",
"37.51044"
],
"中宁县": [
"105.68515",
"37.49149"
],
"海原县": [
"105.64712",
"36.56498"
],
"新疆维吾尔自治区": [
"87.617733",
"43.792818"
],
"乌鲁木齐市": [
"87.617733",
"43.792818"
],
"天山区": [
"87.63167",
"43.79439"
],
"沙依巴克区": [
"87.59788",
"43.80118"
],
"水磨沟区": [
"87.64249",
"43.83247"
],
"头屯河区": [
"87.29138",
"43.85487"
],
"达坂城区": [
"88.30697",
"43.35797"
],
"米东区": [
"87.68583",
"43.94739"
],
"乌鲁木齐县": [
"87.40939",
"43.47125"
],
"克拉玛依市": [
"84.873946",
"45.595886"
],
"独山子区": [
"84.88671",
"44.32867"
],
"克拉玛依区": [
"84.86225",
"45.59089"
],
"白碱滩区": [
"85.13244",
"45.68768"
],
"乌尔禾区": [
"85.69143",
"46.09006"
],
"吐鲁番地区": [
"89.184078",
"42.947613"
],
"吐鲁番市": [
"89.18579",
"42.93505"
],
"鄯善县": [
"90.21402",
"42.8635"
],
"托克逊县": [
"88.65823",
"42.79231"
],
"哈密地区": [
"93.51316",
"42.833248"
],
"哈密市": [
"93.51452",
"42.82699"
],
"巴里坤哈萨克自治县": [
"93.01236",
"43.59993"
],
"伊吾县": [
"94.69403",
"43.2537"
],
"昌吉回族自治州": [
"87.304012",
"44.014577"
],
"昌吉市": [
"87.30249",
"44.01267"
],
"阜康市": [
"87.98529",
"44.1584"
],
"呼图壁县": [
"86.89892",
"44.18977"
],
"玛纳斯县": [
"86.2145",
"44.30438"
],
"奇台县": [
"89.5932",
"44.02221"
],
"吉木萨尔县": [
"89.18078",
"44.00048"
],
"木垒哈萨克自治县": [
"90.28897",
"43.83508"
],
"博尔塔拉蒙古自治州": [
"82.074778",
"44.903258"
],
"博乐市": [
"82.0713",
"44.90052"
],
"阿拉山口市": [
"82.567721",
"45.170616"
],
"精河县": [
"82.89419",
"44.60774"
],
"温泉县": [
"81.03134",
"44.97373"
],
"巴音郭楞蒙古自治州": [
"86.150969",
"41.768552"
],
"库尔勒市": [
"86.15528",
"41.76602"
],
"轮台县": [
"84.26101",
"41.77642"
],
"尉犁县": [
"86.25903",
"41.33632"
],
"若羌县": [
"88.16812",
"39.0179"
],
"且末县": [
"85.52975",
"38.14534"
],
"焉耆回族自治县": [
"86.5744",
"42.059"
],
"和静县": [
"86.39611",
"42.31838"
],
"和硕县": [
"86.86392",
"42.26814"
],
"博湖县": [
"86.63333",
"41.98014"
],
"阿克苏地区": [
"80.265068",
"41.170712"
],
"阿克苏市": [
"80.26338",
"41.16754"
],
"温宿县": [
"80.24173",
"41.27679"
],
"库车县": [
"82.96209",
"41.71793"
],
"沙雅县": [
"82.78131",
"41.22497"
],
"新和县": [
"82.61053",
"41.54964"
],
"拜城县": [
"81.87564",
"41.79801"
],
"乌什县": [
"79.22937",
"41.21569"
],
"阿瓦提县": [
"80.38336",
"40.63926"
],
"柯坪县": [
"79.04751",
"40.50585"
],
"克孜勒苏柯尔克孜自治州": [
"76.172825",
"39.713431"
],
"阿图什市": [
"76.16827",
"39.71615"
],
"阿克陶县": [
"75.94692",
"39.14892"
],
"阿合奇县": [
"78.44848",
"40.93947"
],
"乌恰县": [
"75.25839",
"39.71984"
],
"喀什地区": [
"75.989138",
"39.467664"
],
"喀什市": [
"75.99379",
"39.46768"
],
"疏附县": [
"75.86029",
"39.37534"
],
"疏勒县": [
"76.05398",
"39.40625"
],
"英吉沙县": [
"76.17565",
"38.92968"
],
"泽普县": [
"77.27145",
"38.18935"
],
"莎车县": [
"77.24316",
"38.41601"
],
"叶城县": [
"77.41659",
"37.88324"
],
"麦盖提县": [
"77.64224",
"38.89662"
],
"岳普湖县": [
"76.77233",
"39.23561"
],
"伽师县": [
"76.72372",
"39.48801"
],
"巴楚县": [
"78.54888",
"39.7855"
],
"塔什库尔干塔吉克自治县": [
"75.23196",
"37.77893"
],
"和田地区": [
"79.92533",
"37.110687"
],
"和田市": [
"79.91353",
"37.11214"
],
"和田县": [
"79.82874",
"37.08922"
],
"墨玉县": [
"79.74035",
"37.27248"
],
"皮山县": [
"78.28125",
"37.62007"
],
"洛浦县": [
"80.18536",
"37.07364"
],
"策勒县": [
"80.80999",
"36.99843"
],
"于田县": [
"81.66717",
"36.854"
],
"民丰县": [
"82.68444",
"37.06577"
],
"伊犁哈萨克自治州": [
"81.317946",
"43.92186"
],
"伊宁市": [
"81.32932",
"43.91294"
],
"奎屯市": [
"84.90228",
"44.425"
],
"霍尔果斯市": [
"80.418189",
"44.205778"
],
"伊宁县": [
"81.52764",
"43.97863"
],
"察布查尔锡伯自治县": [
"81.14956",
"43.84023"
],
"霍城县": [
"80.87826",
"44.0533"
],
"巩留县": [
"82.22851",
"43.48429"
],
"新源县": [
"83.26095",
"43.4284"
],
"昭苏县": [
"81.1307",
"43.15828"
],
"特克斯县": [
"81.84005",
"43.21938"
],
"尼勒克县": [
"82.51184",
"43.79901"
],
"塔城地区": [
"82.985732",
"46.746301"
],
"塔城市": [
"82.97892",
"46.74852"
],
"乌苏市": [
"84.68258",
"44.43729"
],
"额敏县": [
"83.62872",
"46.5284"
],
"沙湾县": [
"85.61932",
"44.33144"
],
"托里县": [
"83.60592",
"45.93623"
],
"裕民县": [
"82.99002",
"46.20377"
],
"和布克赛尔蒙古自治县": [
"85.72662",
"46.79362"
],
"阿勒泰地区": [
"88.13963",
"47.848393"
],
"阿勒泰市": [
"88.13913",
"47.8317"
],
"布尔津县": [
"86.85751",
"47.70062"
],
"富蕴县": [
"89.52679",
"46.99444"
],
"福海县": [
"87.49508",
"47.11065"
],
"哈巴河县": [
"86.42092",
"48.06046"
],
"青河县": [
"90.38305",
"46.67382"
],
"吉木乃县": [
"85.87814",
"47.43359"
],
"石河子市": [
"86.041075",
"44.305886"
],
"阿拉尔市": [
"81.285884",
"40.541914"
],
"图木舒克市": [
"79.077978",
"39.867316"
],
"五家渠市": [
"87.526884",
"44.167401"
],
"北屯市": [
"87.808456",
"47.362308"
],
"铁门关市": [
"86.194687",
"41.811007"
],
"双河市": [
"91.132212",
"29.660361"
],
"台湾": [
"121.509062",
"25.044332"
],
"台北市": [
"121.565170",
"25.037798"
],
"信义区": [
"121.751381",
"25.129407"
],
"中正区": [
"121.518267",
"25.032361"
],
"万华区": [
"121.499332",
"25.031933"
],
"文山区": [
"121.570458",
"24.989786"
],
"南港区": [
"121.606858",
"25.054656"
],
"内湖区": [
"121.588998",
"25.069664"
],
"士林区": [
"121.519874",
"25.092822"
],
"北投区": [
"121.501379",
"25.132419"
],
"高雄市": [
"120.311922",
"22.620856"
],
"盐埕区": [
"120.286795",
"22.624666"
],
"鼓山区": [
"120.281154",
"22.636797"
],
"左营区": [
"120.294958",
"22.690124"
],
"楠梓区": [
"120.326314",
"22.728401"
],
"三民区": [
"120.299622",
"22.647695"
],
"前金区": [
"120.294159",
"22.627421"
],
"苓雅区": [
"120.312347",
"22.621770"
],
"前镇区": [
"120.318583",
"22.586425"
],
"旗津区": [
"120.284429",
"22.590565"
],
"小港区": [
"120.337970",
"22.565354"
],
"凤山区": [
"120.356892",
"22.626945"
],
"林园区": [
"120.395977",
"22.501490"
],
"大寮区": [
"120.395422",
"22.605386"
],
"大树区": [
"120.433095",
"22.693394"
],
"大社区": [
"120.346635",
"22.729966"
],
"仁武区": [
"120.347779",
"22.701901"
],
"鸟松区": [
"120.364402",
"22.659340"
],
"冈山区": [
"120.295820",
"22.796762"
],
"桥头区": [
"120.305741",
"22.757501"
],
"燕巢区": [
"120.361956",
"22.793370"
],
"田寮区": [
"120.359636",
"22.869307"
],
"阿莲区": [
"120.327036",
"22.883703"
],
"路竹区": [
"120.261828",
"22.856851"
],
"湖内区": [
"120.211530",
"22.908188"
],
"茄萣区": [
"120.182815",
"22.906556"
],
"永安区": [
"120.225308",
"22.818580"
],
"弥陀区": [
"120.247344",
"22.782879"
],
"梓官区": [
"120.267322",
"22.760475"
],
"旗山区": [
"120.483550",
"22.888491"
],
"美浓区": [
"120.541530",
"22.897880"
],
"六龟区": [
"120.633418",
"22.997914"
],
"甲仙区": [
"120.591185",
"23.084688"
],
"杉林区": [
"120.538980",
"22.970712"
],
"内门区": [
"120.462351",
"22.943437"
],
"茂林区": [
"120.663217",
"22.886248"
],
"桃源区": [
"120.760049",
"23.159137"
],
"那玛夏区": [
"120.692799",
"23.216964"
],
"基隆市": [
"121.746248",
"25.130741"
],
"七堵区": [
"121.713032",
"25.095739"
],
"暖暖区": [
"121.736102",
"25.099777"
],
"仁爱区": [
"121.740940",
"25.127526"
],
"安乐区": [
"121.723203",
"25.120910"
],
"台中市": [
"120.679040",
"24.138620"
],
"中区": [
"120.679510",
"24.143830"
],
"北区": [
"114.148959",
"22.494086"
],
"西屯区": [
"120.639820",
"24.181340"
],
"南屯区": [
"120.643080",
"24.138270"
],
"北屯区": [
"120.686250",
"24.182220"
],
"丰原区": [
"120.718460",
"24.242190"
],
"东势区": [
"120.827770",
"24.258610"
],
"大甲区": [
"120.622390",
"24.348920"
],
"清水区": [
"120.559780",
"24.268650"
],
"沙鹿区": [
"120.565700",
"24.233480"
],
"梧栖区": [
"120.531520",
"24.254960"
],
"后里区": [
"120.710710",
"24.304910"
],
"神冈区": [
"120.661550",
"24.257770"
],
"潭子区": [
"120.705160",
"24.209530"
],
"大雅区": [
"120.647780",
"24.229230"
],
"新社区": [
"120.809500",
"24.234140"
],
"石冈区": [
"120.780410",
"24.274980"
],
"外埔区": [
"120.654370",
"24.332010"
],
"乌日区": [
"120.623810",
"24.104500"
],
"大肚区": [
"120.540960",
"24.153660"
],
"龙井区": [
"120.545940",
"24.192710"
],
"雾峰区": [
"120.700200",
"24.061520"
],
"大里区": [
"120.677860",
"24.099390"
],
"台南市": [
"120.279363",
"23.172478"
],
"安南区": [
"120.184617",
"23.047230"
],
"安平区": [
"120.166810",
"23.000763"
],
"中西区": [
"114.154374",
"22.281981"
],
"新营区": [
"120.316698",
"23.310274"
],
"盐水区": [
"120.266398",
"23.319828"
],
"白河区": [
"120.415810",
"23.351221"
],
"柳营区": [
"120.311286",
"23.278133"
],
"后壁区": [
"120.362726",
"23.366721"
],
"麻豆区": [
"120.248179",
"23.181680"
],
"下营区": [
"120.264484",
"23.235413"
],
"六甲区": [
"120.347600",
"23.231931"
],
"官田区": [
"120.314374",
"23.194652"
],
"大内区": [
"120.348853",
"23.119460"
],
"佳里区": [
"120.177211",
"23.165121"
],
"学甲区": [
"120.180255",
"23.232348"
],
"西港区": [
"120.203618",
"23.123057"
],
"七股区": [
"120.140003",
"23.140545"
],
"将军区": [
"120.156871",
"23.199543"
],
"北门区": [
"120.125821",
"23.267148"
],
"新化区": [
"120.310807",
"23.038533"
],
"善化区": [
"120.296895",
"23.132261"
],
"山上区": [
"120.352908",
"23.103223"
],
"玉井区": [
"120.460110",
"23.123859"
],
"楠西区": [
"120.485396",
"23.173454"
],
"南化区": [
"120.477116",
"23.042614"
],
"左镇区": [
"120.407309",
"23.057955"
],
"仁德区": [
"120.251520",
"22.972212"
],
"归仁区": [
"120.293791",
"22.967081"
],
"关庙区": [
"120.327689",
"22.962949"
],
"龙崎区": [
"120.360824",
"22.965681"
],
"永康区": [
"120.257069",
"23.026061"
],
"新竹市": [
"120.968798",
"24.806738"
],
"香山区": [
"120.956679",
"24.768933"
],
"嘉义市": [
"120.452538",
"23.481568"
],
"新北市": [
"121.465746",
"25.012366"
],
"板桥区": [
"121.459084",
"25.009578"
],
"三重区": [
"121.488102",
"25.061486"
],
"中和区": [
"121.498980",
"24.999397"
],
"永和区": [
"121.513660",
"25.007802"
],
"新庄区": [
"121.450413",
"25.035947"
],
"新店区": [
"121.541750",
"24.967558"
],
"树林区": [
"121.420533",
"24.990706"
],
"莺歌区": [
"121.354573",
"24.955413"
],
"三峡区": [
"121.368905",
"24.934339"
],
"淡水区": [
"121.440915",
"25.169452"
],
"汐止区": [
"121.629470",
"25.062999"
],
"瑞芳区": [
"121.810061",
"25.108895"
],
"土城区": [
"121.443348",
"24.972201"
],
"芦洲区": [
"121.473700",
"25.084923"
],
"五股区": [
"121.438156",
"25.082743"
],
"林口区": [
"121.391602",
"25.077531"
],
"深坑区": [
"121.615670",
"25.002329"
],
"石碇区": [
"121.658567",
"24.991679"
],
"坪林区": [
"121.711185",
"24.937388"
],
"三芝区": [
"121.500866",
"25.258047"
],
"石门区": [
"121.568491",
"25.290412"
],
"八里区": [
"121.398227",
"25.146680"
],
"平溪区": [
"121.738255",
"25.025725"
],
"双溪区": [
"121.865676",
"25.033409"
],
"贡寮区": [
"121.908185",
"25.022388"
],
"万里区": [
"121.688687",
"25.181234"
],
"乌来区": [
"121.550531",
"24.865296"
],
"宜兰县": [
"121.500000",
"24.600000"
],
"宜兰市": [
"121.753476",
"24.751682"
],
"罗东镇": [
"121.766919",
"24.677033"
],
"苏澳镇": [
"121.842656",
"24.594622"
],
"头城镇": [
"121.823307",
"24.859217"
],
"礁溪乡": [
"121.766680",
"24.822345"
],
"壮围乡": [
"121.781619",
"24.744949"
],
"员山乡": [
"121.721733",
"24.741771"
],
"冬山乡": [
"121.792280",
"24.634514"
],
"五结乡": [
"121.798297",
"24.684640"
],
"三星乡": [
"121.003418",
"23.775291"
],
"大同乡": [
"121.605557",
"24.675997"
],
"南澳乡": [
"121.799810",
"24.465393"
],
"桃园县": [
"121.083000",
"25.000000"
],
"桃园市": [
"121.301337",
"24.993777"
],
"中坜市": [
"121.224926",
"24.965353"
],
"平镇市": [
"121.218359",
"24.945752"
],
"八德市": [
"121.284655",
"24.928651"
],
"杨梅市": [
"121.145873",
"24.907575"
],
"芦竹市": [
"121.292064",
"25.045392"
],
"大溪镇": [
"121.286962",
"24.880584"
],
"大园乡": [
"121.196292",
"25.064471"
],
"龟山乡": [
"121.337767",
"24.992517"
],
"龙潭乡": [
"121.216392",
"24.863851"
],
"新屋乡": [
"121.105801",
"24.972203"
],
"观音乡": [
"121.077519",
"25.033303"
],
"复兴乡": [
"121.352613",
"24.820908"
],
"新竹县": [
"121.160000",
"24.600000"
],
"竹北市": [
"121.004317",
"24.839652"
],
"竹东镇": [
"121.086418",
"24.733601"
],
"新埔镇": [
"121.072804",
"24.824820"
],
"关西镇": [
"121.177301",
"24.788842"
],
"湖口乡": [
"121.043691",
"24.903943"
],
"新丰乡": [
"120.983006",
"24.899600"
],
"芎林乡": [
"121.076924",
"24.774436"
],
"横山乡": [
"121.116244",
"24.720807"
],
"北埔乡": [
"121.053156",
"24.697126"
],
"宝山乡": [
"120.985752",
"24.760975"
],
"峨眉乡": [
"121.015291",
"24.686127"
],
"尖石乡": [
"121.197802",
"24.704360"
],
"五峰乡": [
"121.003418",
"23.775291"
],
"苗栗县": [
"120.750000",
"24.500000"
],
"苗栗市": [
"120.818869",
"24.561472"
],
"苑里镇": [
"120.648907",
"24.441750"
],
"通霄镇": [
"120.676700",
"24.489087"
],
"竹南镇": [
"120.872641",
"24.685513"
],
"头份镇": [
"120.895188",
"24.687993"
],
"后龙镇": [
"120.786480",
"24.612617"
],
"卓兰镇": [
"120.823441",
"24.309509"
],
"大湖乡": [
"120.863641",
"24.422547"
],
"公馆乡": [
"120.822983",
"24.499108"
],
"铜锣乡": [
"121.003418",
"23.775291"
],
"南庄乡": [
"120.994957",
"24.596835"
],
"头屋乡": [
"120.846616",
"24.574249"
],
"三义乡": [
"120.742340",
"24.350270"
],
"西湖乡": [
"121.003418",
"23.775291"
],
"造桥乡": [
"120.862399",
"24.637537"
],
"三湾乡": [
"120.951484",
"24.651051"
],
"狮潭乡": [
"120.918024",
"24.540004"
],
"泰安乡": [
"120.904441",
"24.442600"
],
"彰化县": [
"120.416000",
"24.000000"
],
"彰化市": [
"120.542294",
"24.080911"
],
"鹿港镇": [
"120.435392",
"24.056937"
],
"和美镇": [
"120.500265",
"24.110904"
],
"线西乡": [
"120.465921",
"24.128653"
],
"伸港乡": [
"120.484224",
"24.146081"
],
"福兴乡": [
"120.443682",
"24.047883"
],
"秀水乡": [
"120.502658",
"24.035267"
],
"花坛乡": [
"120.538403",
"24.029399"
],
"芬园乡": [
"120.629024",
"24.013658"
],
"员林镇": [
"120.574625",
"23.958999"
],
"溪湖镇": [
"120.479144",
"23.962315"
],
"田中镇": [
"120.580629",
"23.861718"
],
"大村乡": [
"120.540713",
"23.993726"
],
"埔盐乡": [
"120.464044",
"24.000346"
],
"埔心乡": [
"120.543568",
"23.953019"
],
"永靖乡": [
"120.547775",
"23.924703"
],
"社头乡": [
"120.582681",
"23.896686"
],
"二水乡": [
"120.618788",
"23.806995"
],
"北斗镇": [
"120.520449",
"23.870911"
],
"二林镇": [
"120.374468",
"23.899751"
],
"田尾乡": [
"120.524717",
"23.890735"
],
"埤头乡": [
"120.462599",
"23.891324"
],
"芳苑乡": [
"120.320329",
"23.924222"
],
"大城乡": [
"120.320934",
"23.852382"
],
"竹塘乡": [
"120.427499",
"23.860112"
],
"溪州乡": [
"120.498706",
"23.851229"
],
"南投县": [
"120.830000",
"23.830000"
],
"南投市": [
"120.683706",
"23.909956"
],
"埔里镇": [
"120.964648",
"23.964789"
],
"草屯镇": [
"120.680343",
"23.973947"
],
"竹山镇": [
"120.672007",
"23.757655"
],
"集集镇": [
"120.783673",
"23.829013"
],
"名间乡": [
"120.702797",
"23.838427"
],
"鹿谷乡": [
"120.752796",
"23.744471"
],
"中寮乡": [
"120.766654",
"23.878935"
],
"鱼池乡": [
"120.936060",
"23.896356"
],
"国姓乡": [
"120.858541",
"24.042298"
],
"水里乡": [
"120.855912",
"23.812086"
],
"信义乡": [
"120.855257",
"23.699922"
],
"仁爱乡": [
"121.133543",
"24.024429"
],
"云林县": [
"120.250000",
"23.750000"
],
"斗六市": [
"120.527360",
"23.697651"
],
"斗南镇": [
"120.479075",
"23.679731"
],
"虎尾镇": [
"120.445339",
"23.708182"
],
"西螺镇": [
"120.466010",
"23.797984"
],
"土库镇": [
"120.392572",
"23.677822"
],
"北港镇": [
"120.302393",
"23.575525"
],
"古坑乡": [
"120.562043",
"23.642568"
],
"大埤乡": [
"120.430516",
"23.645908"
],
"莿桐乡": [
"120.502374",
"23.760784"
],
"林内乡": [
"120.611365",
"23.758712"
],
"二仑乡": [
"120.415077",
"23.771273"
],
"仑背乡": [
"120.353895",
"23.758840"
],
"麦寮乡": [
"120.252043",
"23.753841"
],
"东势乡": [
"120.252672",
"23.674679"
],
"褒忠乡": [
"120.310488",
"23.694245"
],
"台西乡": [
"120.196141",
"23.702819"
],
"元长乡": [
"120.315124",
"23.649458"
],
"四湖乡": [
"120.225741",
"23.637740"
],
"口湖乡": [
"120.185370",
"23.582406"
],
"水林乡": [
"120.245948",
"23.572634"
],
"嘉义县": [
"120.300000",
"23.500000"
],
"太保市": [
"120.332876",
"23.459647"
],
"朴子市": [
"120.247014",
"23.464961"
],
"布袋镇": [
"120.166936",
"23.377979"
],
"大林镇": [
"120.471336",
"23.603815"
],
"民雄乡": [
"120.428577",
"23.551456"
],
"溪口乡": [
"120.393822",
"23.602223"
],
"新港乡": [
"120.347647",
"23.551806"
],
"六脚乡": [
"120.291083",
"23.493942"
],
"东石乡": [
"120.153822",
"23.459235"
],
"义竹乡": [
"120.243423",
"23.336277"
],
"鹿草乡": [
"120.308370",
"23.410784"
],
"水上乡": [
"120.397936",
"23.428104"
],
"中埔乡": [
"120.522948",
"23.425148"
],
"竹崎乡": [
"120.551466",
"23.523184"
],
"梅山乡": [
"120.557192",
"23.584915"
],
"番路乡": [
"120.555043",
"23.465222"
],
"大埔乡": [
"120.593795",
"23.296715"
],
"阿里山乡": [
"120.732520",
"23.467950"
],
"屏东县": [
"120.487928",
"22.682802"
],
"屏东市": [
"120.488465",
"22.669723"
],
"潮州镇": [
"120.542854",
"22.550536"
],
"东港镇": [
"120.454489",
"22.466626"
],
"恒春镇": [
"120.745451",
"22.002373"
],
"万丹乡": [
"120.484533",
"22.589839"
],
"长治乡": [
"120.527614",
"22.677062"
],
"麟洛乡": [
"120.527283",
"22.650604"
],
"九如乡": [
"120.490142",
"22.739778"
],
"里港乡": [
"120.494490",
"22.779220"
],
"盐埔乡": [
"120.572849",
"22.754783"
],
"高树乡": [
"120.600214",
"22.826789"
],
"万峦乡": [
"120.566477",
"22.571965"
],
"内埔乡": [
"120.566865",
"22.611967"
],
"竹田乡": [
"120.544038",
"22.584678"
],
"新埤乡": [
"120.549546",
"22.469976"
],
"枋寮乡": [
"120.593438",
"22.365560"
],
"新园乡": [
"120.461739",
"22.543952"
],
"崁顶乡": [
"120.514571",
"22.514795"
],
"林边乡": [
"120.515091",
"22.434015"
],
"南州乡": [
"120.509808",
"22.490192"
],
"佳冬乡": [
"120.551544",
"22.417653"
],
"琉球乡": [
"120.369020",
"22.342366"
],
"车城乡": [
"120.710979",
"22.072077"
],
"满州乡": [
"120.838843",
"22.020853"
],
"枋山乡": [
"120.656356",
"22.260338"
],
"三地门乡": [
"120.654486",
"22.713877"
],
"雾台乡": [
"120.732318",
"22.744877"
],
"玛家乡": [
"120.644130",
"22.706718"
],
"泰武乡": [
"120.632856",
"22.591819"
],
"来义乡": [
"120.633601",
"22.525866"
],
"春日乡": [
"120.628793",
"22.370672"
],
"狮子乡": [
"120.704617",
"22.201917"
],
"牡丹乡": [
"120.770108",
"22.125687"
],
"台东县": [
"120.916000",
"23.000000"
],
"台东市": [
"121.145654",
"22.756045"
],
"成功镇": [
"121.379571",
"23.100223"
],
"关山镇": [
"121.163134",
"23.047450"
],
"卑南乡": [
"121.083503",
"22.786039"
],
"鹿野乡": [
"121.135982",
"22.913951"
],
"池上乡": [
"121.215139",
"23.122393"
],
"东河乡": [
"121.300334",
"22.969934"
],
"长滨乡": [
"121.451522",
"23.315041"
],
"太麻里乡": [
"121.007394",
"22.615383"
],
"大武乡": [
"120.889938",
"22.339919"
],
"绿岛乡": [
"121.492596",
"22.661676"
],
"海端乡": [
"121.172008",
"23.101074"
],
"延平乡": [
"121.084499",
"22.902358"
],
"金峰乡": [
"120.971292",
"22.595511"
],
"达仁乡": [
"120.884131",
"22.294869"
],
"兰屿乡": [
"121.532473",
"22.056736"
],
"花莲县": [
"121.300000",
"23.830000"
],
"花莲市": [
"121.606810",
"23.982074"
],
"凤林镇": [
"121.451687",
"23.744648"
],
"玉里镇": [
"121.316445",
"23.336509"
],
"新城乡": [
"121.640512",
"24.128133"
],
"吉安乡": [
"121.568005",
"23.961635"
],
"寿丰乡": [
"121.508955",
"23.870680"
],
"光复乡": [
"121.423496",
"23.669084"
],
"丰滨乡": [
"121.518639",
"23.597080"
],
"瑞穗乡": [
"121.375992",
"23.496817"
],
"富里乡": [
"121.250124",
"23.179984"
],
"秀林乡": [
"121.620381",
"24.116642"
],
"万荣乡": [
"121.407493",
"23.715346"
],
"卓溪乡": [
"121.303422",
"23.346369"
],
"澎湖县": [
"119.566417",
"23.569733"
],
"马公市": [
"119.566499",
"23.565845"
],
"湖西乡": [
"119.659666",
"23.583358"
],
"白沙乡": [
"119.597919",
"23.666060"
],
"西屿乡": [
"119.506974",
"23.600836"
],
"望安乡": [
"119.500538",
"23.357531"
],
"七美乡": [
"119.423929",
"23.206018"
],
"金城镇": [
"118.316667",
"24.416667"
],
"金湖镇": [
"118.419743",
"24.438633"
],
"金沙镇": [
"118.427993",
"24.481109"
],
"金宁乡": [
"118.334506",
"24.45672"
],
"烈屿乡": [
"118.247255",
"24.433102"
],
"乌丘乡": [
"118.319578",
"24.435038"
],
"南竿乡": [
"119.944267",
"26.144035"
],
"北竿乡": [
"120.000572",
"26.221983"
],
"莒光乡": [
"119.940405",
"25.976256"
],
"东引乡": [
"120.493955",
"26.366164"
],
"香港特别行政区": [
"114.173355",
"22.320048"
],
"香港岛": [
"114.177314",
"22.266416"
],
"湾仔区": [
"114.182915",
"22.276389"
],
"九龙": [
"114.17495",
"22.327115"
],
"油尖旺区": [
"114.173332",
"22.311704"
],
"深水埗区": [
"114.16721",
"22.328171"
],
"九龙城区": [
"114.195053",
"22.32673"
],
"黄大仙区": [
"114.19924",
"22.336313"
],
"观塘区": [
"114.231268",
"22.30943"
],
"新界": [
"114.202408",
"22.341766"
],
"荃湾区": [
"114.122952",
"22.370973"
],
"屯门区": [
"113.977416",
"22.391047"
],
"元朗区": [
"114.039796",
"22.443342"
],
"大埔区": [
"114.171743",
"22.445653"
],
"西贡区": [
"114.27854",
"22.37944"
],
"沙田区": [
"114.191941",
"22.379294"
],
"葵青区": [
"114.13932",
"22.363877"
],
"离岛区": [
"113.945842",
"22.281508"
],
"澳门特别行政区": [
"113.54909",
"22.198951"
],
"澳门半岛": [
"113.549134",
"22.198751"
],
"花地玛堂区": [
"113.552284",
"22.208067"
],
"圣安多尼堂区": [
"113.564301",
"22.12381"
],
"大堂区": [
"113.552971",
"22.188359"
],
"望德堂区": [
"113.550568",
"22.194081"
],
"风顺堂区": [
"113.541928",
"22.187368"
],
"氹仔岛": [
"113.577669",
"22.156838"
],
"嘉模堂区": [
"113.565303",
"22.149029"
],
"路环岛": [
"113.564857",
"22.116226"
],
"圣方济各堂区": [
"113.559954",
"22.123486"
],
"钓鱼岛": [
"123.478088",
"25.742385"
]
} | "德城区": [
"116.29943",
"37.45126" |
combobox.py | """
Python wrapper for libui.
"""
import ctypes
from . import clibui
class uiCombobox(ctypes.Structure):
"""Wrapper for the uiCombobox C struct."""
pass
def uiComboboxPointer(obj):
"""
Casts an object to uiCombobox pointer type.
:param obj: a generic object
:return: uiCombobox
"""
return ctypes.cast(obj, ctypes.POINTER(uiCombobox))
# - void uiComboboxAppend(uiCombobox *c, const char *text);
def uiComboboxAppend(combobox, text):
"""
Appends a new item to the combobox.
:param combobox: uiCombobox
:param text: string
:return: None
"""
clibui.uiComboboxAppend(combobox, bytes(text, 'utf-8'))
# - int uiComboboxSelected(uiCombobox *c);
def uiComboboxSelected(combobox):
"""
Returns selected items index.
:param combobox: uiCombobox
:return: int
"""
return clibui.uiComboboxSelected(combobox)
# - void uiComboboxSetSelected(uiCombobox *c, int n);
def uiComboboxSetSelected(combobox, n):
"""
Sets selected item.
:param combobox: uiCombobox
:param n: integer
:return: None
"""
clibui.uiComboboxSetSelected(combobox, n)
# - void uiComboboxOnSelected(uiCombobox *c, void (*f)(uiCombobox *c, void *data), void *data);
def | (combobox, callback, data):
"""
Executes a callback function when an item selected.
:param combobox: uiCombobox
:param callback: function
:param data: data
:return: reference to C callback function
"""
c_type = ctypes.CFUNCTYPE(
ctypes.c_int, ctypes.POINTER(uiCombobox), ctypes.c_void_p)
c_callback = c_type(callback)
clibui.uiComboboxOnSelected(combobox, c_callback, data)
return c_callback
def uiNewCombobox():
"""
Creates a new combobox.
:return: uiCombobox
"""
clibui.uiNewCombobox.restype = ctypes.POINTER(uiCombobox)
return clibui.uiNewCombobox()
| uiComboboxOnSelected |
stream_field.py | import dataclasses
from typing import Any, Callable, List, Optional, Type
from wagtail.core.blocks.field_block import CharBlock, FieldBlock, RichTextBlock
from wagtail.core.blocks.stream_block import StreamBlock | from wagtail.core.fields import StreamField
from wagtail.images.blocks import ImageChooserBlock
import strawberry
import strawberry.django
from strawberry.union import StrawberryUnion
from strawberry.utils.str_converters import capitalize_first, to_camel_case
from .scalars import HTML
def _make_type(
class_name: str, value_field_name: str, value_type: Type, from_data: Callable
) -> Type:
# TODO: don't use dataclasses
x = dataclasses.make_dataclass(
class_name, [("id", strawberry.ID), (value_field_name, value_type)]
)
x.from_data = classmethod(from_data)
return strawberry.type(x)
def get_type_for_stream_block(
block: StreamBlock,
class_name: str,
) -> Type:
types = set()
block_map = {}
for field_block in block.child_blocks.values():
name = class_name + capitalize_first(to_camel_case(field_block.name))
type_ = _get_type_for_field_block(field_block, name)
if isinstance(type_, StrawberryUnion):
assert type_.graphql_name
type_.graphql_name += "Values"
type_ = _make_type(name, "values", List[type_], None)
block_map[field_block.name] = type_
types.add(type_)
union_type = strawberry.union(
class_name, types=tuple(sorted(types, key=lambda x: str(x)))
)
union_type._block_map = block_map
return union_type
def _get_type_for_field_block(field_block: FieldBlock, name: str) -> Optional[Type]:
type_ = None
if isinstance(field_block, CharBlock):
def from_data(cls, data: dict) -> str:
return cls(id=data["id"], value=data["value"])
type_ = _make_type(name, "value", str, from_data)
elif isinstance(field_block, RichTextBlock):
def from_data(cls, data: dict) -> str:
return cls(id=data["id"], html=data["value"])
type_ = _make_type(name, "html", HTML, from_data)
elif isinstance(field_block, ImageChooserBlock):
def from_data(cls, data: dict) -> str:
return cls(id=data["id"], image=data["value"])
type_ = _make_type(name, "image", str, from_data)
elif isinstance(field_block, StreamBlock):
type_ = get_type_for_stream_block(field_block, name)
if type_ is None:
raise ValueError(f"Unknown type for {field_block}")
type_._origin_field_block = field_block # type: ignore
return type_
def _get_block(block: dict, parent_type: Type) -> Any:
block_type = parent_type._block_map.get(block["type"])
if not block_type:
return None
block_data = block.copy()
block_data.pop("type")
if type(block["value"]) is list:
# mmm
print("🌼🌼🌼")
print(block_type._type_definition.fields[1].__dict__)
block_value_type = block_type._type_definition.fields[1].type.of_type
value = [
_get_block(sub_block, block_value_type) for sub_block in block["value"]
]
print(block_type)
print(block_value_type)
print(value)
return block_type(id=block_data["id"], values=value)
return block_type.from_data(block_data)
def get_resolver_for_stream_field(field: StreamField, type: Type) -> Callable:
def _resolver(root: Any) -> List[type]:
raw_data = getattr(root, field.name)._raw_data
data = []
for block in raw_data:
block_data = _get_block(block, type)
if block_data:
data.append(block_data)
return data
return _resolver | |
101-starwars_characters.js | #!/usr/bin/node
/* prints all characters of a Star Wars movie. | */
const request = require('request');
const url = 'https://swapi-api.hbtn.io/api/films/' + process.argv[2];
request(url, function (error, response, body) {
if (error) console.log(error);
else {
const characters = JSON.parse(body).characters;
characters_dict = {};
characters.forEach((character) => {
request(character, function (error, response, body) {
if (error) console.log(error);
else {
let id = JSON.parse(body).url;
id = id.replace('https://swapi-api.hbtn.io/api/people/', '');
id = id.replace('/', '');
const name = JSON.parse(body).name;
characters_dict[id] = (name);
}
});
});
for (let id in characters_dict) {
console.log(characters_dict[id]);
}
}
}); | - the first argument is the Movie ID.
- displays one character name by line. |
startCirq3226.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=47
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=9
c.append(cirq.H.on(input_qubit[2])) # number=39
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.H.on(input_qubit[0])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=16
c.append(cirq.CZ.on(input_qubit[1],input_qubit[3])) # number=17
c.append(cirq.Y.on(input_qubit[3])) # number=46
c.append(cirq.H.on(input_qubit[3])) # number=18
c.append(cirq.H.on(input_qubit[1])) # number=6
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=10
c.append(cirq.H.on(input_qubit[3])) # number=40
c.append(cirq.CZ.on(input_qubit[0],input_qubit[3])) # number=41
c.append(cirq.H.on(input_qubit[3])) # number=42
c.append(cirq.H.on(input_qubit[3])) # number=43
c.append(cirq.CZ.on(input_qubit[0],input_qubit[3])) # number=44
c.append(cirq.H.on(input_qubit[3])) # number=45
c.append(cirq.X.on(input_qubit[3])) # number=34
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=35
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=25
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=12
c.append(cirq.H.on(input_qubit[2])) # number=30
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=31
c.append(cirq.H.on(input_qubit[2])) # number=32
c.append(cirq.X.on(input_qubit[2])) # number=21
c.append(cirq.H.on(input_qubit[2])) # number=36
c.append(cirq.CZ.on(input_qubit[0],input_qubit[2])) # number=37
c.append(cirq.H.on(input_qubit[2])) # number=38
c.append(cirq.H.on(input_qubit[0])) # number=26
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=27
c.append(cirq.H.on(input_qubit[0])) # number=28
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=14
c.append(cirq.Y.on(input_qubit[2])) # number=29
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap') |
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq3226.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() |
circuit_sample_count =2000 |
lib.rs | /// Application/Server-level management and routing configuration and testing support; outermost functionality.
pub mod app;
/// Handler construction and prototypes
pub mod handler;
/// Macros for quality-of-life when interacting with Handlers
pub mod macros;
/// Path management for Routes
pub(crate) mod path;
/// Router, Route management and organization
pub(crate) mod router;
use http::{Request, Response};
use std::{collections::BTreeMap, pin::Pin};
/// Params are a mapping of name -> parameter for the purposes of routing.
pub type Params = BTreeMap<String, String>;
pub(crate) type PinBox<F> = Pin<Box<F>>;
/// An error for server-related issues.
#[derive(Debug, Clone)]
pub struct ServerError(String);
impl<T> From<T> for ServerError
where
T: ToString,
{
fn from(t: T) -> Self {
ServerError(t.to_string())
}
}
/// General errors for ratpack handlers. Yield either a StatusCode for a literal status, or a
/// String for a 500 Internal Server Error. Other status codes should be yielded through
/// [http::Response] returns.
#[derive(Clone, Debug)]
pub enum Error {
StatusCode(http::StatusCode, String),
InternalServerError(String),
}
impl Default for Error {
fn default() -> Self {
Self::InternalServerError("internal server error".to_string())
}
}
impl Error {
/// Convenience method to pass anything in that accepts a .to_string method.
pub fn new<T>(message: T) -> Self
where
T: ToString,
{
Self::InternalServerError(message.to_string())
}
/// A convenient way to return status codes with optional informational bodies.
pub fn new_status<T>(error: http::StatusCode, message: T) -> Self
where
T: ToString,
{
Self::StatusCode(error, message.to_string())
}
}
impl<T> From<T> for Error
where
T: ToString,
{
fn from(t: T) -> Self {
Self::new(t.to_string())
}
}
pub trait ToStatus
where
Self: ToString,
{
fn to_status(&self) -> Error;
}
/// HTTPResult is the return type for handlers. If a handler terminates at the end of its chain
/// with [std::option::Option::None] as the [http::Response], a 500 Internal Server Error will be
/// returned. If you wish to return Err(), a [http::StatusCode] or [std::string::String] can be
/// returned, the former is resolved to its status with an empty body, and the latter corresponds
/// to a 500 Internal Server Error with the body set to the string.
pub type HTTPResult<TransientState> = Result<
(
Request<hyper::Body>,
Option<Response<hyper::Body>>,
TransientState,
),
Error,
>;
| pub trait TransientState
where
Self: Clone + Send,
{
/// initial prescribes an initial state for the trait, allowing it to be constructed at
/// dispatch time.
fn initial() -> Self;
}
/// NoState is an empty [crate::TransientState].
#[derive(Clone)]
pub struct NoState;
impl TransientState for NoState {
fn initial() -> Self {
Self {}
}
}
/// A convenience import to gather all of `ratpack`'s dependencies in one easy place.
/// To use:
///
/// ```
/// use ratpack::prelude::*;
/// ```
pub mod prelude {
pub use crate::{
app::App, compose_handler, Error, HTTPResult, NoState, Params, ServerError, ToStatus,
TransientState,
};
pub use http::{Request, Response, StatusCode};
pub use hyper::Body;
} | /// TransientState must be implemented to use state between handlers. |
clientset.go | /*
Copyright 2020 The KubeSphere Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package versioned
import (
"fmt"
discovery "k8s.io/client-go/discovery"
rest "k8s.io/client-go/rest"
flowcontrol "k8s.io/client-go/util/flowcontrol"
applicationv1alpha1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/application/v1alpha1"
auditingv1alpha1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/auditing/v1alpha1"
clusterv1alpha1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/cluster/v1alpha1"
devopsv1alpha1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/devops/v1alpha1"
devopsv1alpha3 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/devops/v1alpha3"
iamv1alpha2 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/iam/v1alpha2"
networkv1alpha1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/network/v1alpha1"
notificationv2beta1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/notification/v2beta1"
quotav1alpha2 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/quota/v1alpha2"
servicemeshv1alpha2 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/servicemesh/v1alpha2"
storagev1alpha1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/storage/v1alpha1"
tenantv1alpha1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/tenant/v1alpha1"
tenantv1alpha2 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/tenant/v1alpha2"
typesv1beta1 "kubesphere.io/kubesphere/pkg/client/clientset/versioned/typed/types/v1beta1"
)
type Interface interface {
Discovery() discovery.DiscoveryInterface
ApplicationV1alpha1() applicationv1alpha1.ApplicationV1alpha1Interface
AuditingV1alpha1() auditingv1alpha1.AuditingV1alpha1Interface
ClusterV1alpha1() clusterv1alpha1.ClusterV1alpha1Interface
DevopsV1alpha1() devopsv1alpha1.DevopsV1alpha1Interface
DevopsV1alpha3() devopsv1alpha3.DevopsV1alpha3Interface
IamV1alpha2() iamv1alpha2.IamV1alpha2Interface
NetworkV1alpha1() networkv1alpha1.NetworkV1alpha1Interface
NotificationV2beta1() notificationv2beta1.NotificationV2beta1Interface
QuotaV1alpha2() quotav1alpha2.QuotaV1alpha2Interface
ServicemeshV1alpha2() servicemeshv1alpha2.ServicemeshV1alpha2Interface
StorageV1alpha1() storagev1alpha1.StorageV1alpha1Interface
TenantV1alpha1() tenantv1alpha1.TenantV1alpha1Interface
TenantV1alpha2() tenantv1alpha2.TenantV1alpha2Interface
TypesV1beta1() typesv1beta1.TypesV1beta1Interface
}
// Clientset contains the clients for groups. Each group has exactly one
// version included in a Clientset.
type Clientset struct {
*discovery.DiscoveryClient
applicationV1alpha1 *applicationv1alpha1.ApplicationV1alpha1Client
auditingV1alpha1 *auditingv1alpha1.AuditingV1alpha1Client
clusterV1alpha1 *clusterv1alpha1.ClusterV1alpha1Client
devopsV1alpha1 *devopsv1alpha1.DevopsV1alpha1Client
devopsV1alpha3 *devopsv1alpha3.DevopsV1alpha3Client
iamV1alpha2 *iamv1alpha2.IamV1alpha2Client
networkV1alpha1 *networkv1alpha1.NetworkV1alpha1Client
notificationV2beta1 *notificationv2beta1.NotificationV2beta1Client
quotaV1alpha2 *quotav1alpha2.QuotaV1alpha2Client
servicemeshV1alpha2 *servicemeshv1alpha2.ServicemeshV1alpha2Client
storageV1alpha1 *storagev1alpha1.StorageV1alpha1Client
tenantV1alpha1 *tenantv1alpha1.TenantV1alpha1Client
tenantV1alpha2 *tenantv1alpha2.TenantV1alpha2Client
typesV1beta1 *typesv1beta1.TypesV1beta1Client
}
// ApplicationV1alpha1 retrieves the ApplicationV1alpha1Client
func (c *Clientset) ApplicationV1alpha1() applicationv1alpha1.ApplicationV1alpha1Interface {
return c.applicationV1alpha1
}
// AuditingV1alpha1 retrieves the AuditingV1alpha1Client
func (c *Clientset) AuditingV1alpha1() auditingv1alpha1.AuditingV1alpha1Interface {
return c.auditingV1alpha1
}
// ClusterV1alpha1 retrieves the ClusterV1alpha1Client
func (c *Clientset) ClusterV1alpha1() clusterv1alpha1.ClusterV1alpha1Interface {
return c.clusterV1alpha1
}
// DevopsV1alpha1 retrieves the DevopsV1alpha1Client
func (c *Clientset) DevopsV1alpha1() devopsv1alpha1.DevopsV1alpha1Interface {
return c.devopsV1alpha1
}
// DevopsV1alpha3 retrieves the DevopsV1alpha3Client
func (c *Clientset) DevopsV1alpha3() devopsv1alpha3.DevopsV1alpha3Interface {
return c.devopsV1alpha3
}
// IamV1alpha2 retrieves the IamV1alpha2Client
func (c *Clientset) IamV1alpha2() iamv1alpha2.IamV1alpha2Interface {
return c.iamV1alpha2
}
// NetworkV1alpha1 retrieves the NetworkV1alpha1Client
func (c *Clientset) NetworkV1alpha1() networkv1alpha1.NetworkV1alpha1Interface {
return c.networkV1alpha1
}
// NotificationV2beta1 retrieves the NotificationV2beta1Client
func (c *Clientset) NotificationV2beta1() notificationv2beta1.NotificationV2beta1Interface {
return c.notificationV2beta1
}
// QuotaV1alpha2 retrieves the QuotaV1alpha2Client
func (c *Clientset) QuotaV1alpha2() quotav1alpha2.QuotaV1alpha2Interface {
return c.quotaV1alpha2
}
// ServicemeshV1alpha2 retrieves the ServicemeshV1alpha2Client
func (c *Clientset) ServicemeshV1alpha2() servicemeshv1alpha2.ServicemeshV1alpha2Interface {
return c.servicemeshV1alpha2
}
// StorageV1alpha1 retrieves the StorageV1alpha1Client
func (c *Clientset) StorageV1alpha1() storagev1alpha1.StorageV1alpha1Interface {
return c.storageV1alpha1
}
// TenantV1alpha1 retrieves the TenantV1alpha1Client
func (c *Clientset) TenantV1alpha1() tenantv1alpha1.TenantV1alpha1Interface {
return c.tenantV1alpha1
}
// TenantV1alpha2 retrieves the TenantV1alpha2Client
func (c *Clientset) TenantV1alpha2() tenantv1alpha2.TenantV1alpha2Interface {
return c.tenantV1alpha2
}
// TypesV1beta1 retrieves the TypesV1beta1Client
func (c *Clientset) TypesV1beta1() typesv1beta1.TypesV1beta1Interface {
return c.typesV1beta1
}
// Discovery retrieves the DiscoveryClient
func (c *Clientset) Discovery() discovery.DiscoveryInterface {
if c == nil {
return nil
}
return c.DiscoveryClient
}
// NewForConfig creates a new Clientset for the given config.
// If config's RateLimiter is not set and QPS and Burst are acceptable,
// NewForConfig will generate a rate-limiter in configShallowCopy.
func NewForConfig(c *rest.Config) (*Clientset, error) {
configShallowCopy := *c
if configShallowCopy.RateLimiter == nil && configShallowCopy.QPS > 0 {
if configShallowCopy.Burst <= 0 {
return nil, fmt.Errorf("burst is required to be greater than 0 when RateLimiter is not set and QPS is set to greater than 0")
}
configShallowCopy.RateLimiter = flowcontrol.NewTokenBucketRateLimiter(configShallowCopy.QPS, configShallowCopy.Burst)
}
var cs Clientset
var err error
cs.applicationV1alpha1, err = applicationv1alpha1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.auditingV1alpha1, err = auditingv1alpha1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.clusterV1alpha1, err = clusterv1alpha1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.devopsV1alpha1, err = devopsv1alpha1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.devopsV1alpha3, err = devopsv1alpha3.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.iamV1alpha2, err = iamv1alpha2.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.networkV1alpha1, err = networkv1alpha1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.notificationV2beta1, err = notificationv2beta1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.quotaV1alpha2, err = quotav1alpha2.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.servicemeshV1alpha2, err = servicemeshv1alpha2.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.storageV1alpha1, err = storagev1alpha1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.tenantV1alpha1, err = tenantv1alpha1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.tenantV1alpha2, err = tenantv1alpha2.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.typesV1beta1, err = typesv1beta1.NewForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
cs.DiscoveryClient, err = discovery.NewDiscoveryClientForConfig(&configShallowCopy)
if err != nil {
return nil, err
}
return &cs, nil
}
// NewForConfigOrDie creates a new Clientset for the given config and
// panics if there is an error in the config.
func | (c *rest.Config) *Clientset {
var cs Clientset
cs.applicationV1alpha1 = applicationv1alpha1.NewForConfigOrDie(c)
cs.auditingV1alpha1 = auditingv1alpha1.NewForConfigOrDie(c)
cs.clusterV1alpha1 = clusterv1alpha1.NewForConfigOrDie(c)
cs.devopsV1alpha1 = devopsv1alpha1.NewForConfigOrDie(c)
cs.devopsV1alpha3 = devopsv1alpha3.NewForConfigOrDie(c)
cs.iamV1alpha2 = iamv1alpha2.NewForConfigOrDie(c)
cs.networkV1alpha1 = networkv1alpha1.NewForConfigOrDie(c)
cs.notificationV2beta1 = notificationv2beta1.NewForConfigOrDie(c)
cs.quotaV1alpha2 = quotav1alpha2.NewForConfigOrDie(c)
cs.servicemeshV1alpha2 = servicemeshv1alpha2.NewForConfigOrDie(c)
cs.storageV1alpha1 = storagev1alpha1.NewForConfigOrDie(c)
cs.tenantV1alpha1 = tenantv1alpha1.NewForConfigOrDie(c)
cs.tenantV1alpha2 = tenantv1alpha2.NewForConfigOrDie(c)
cs.typesV1beta1 = typesv1beta1.NewForConfigOrDie(c)
cs.DiscoveryClient = discovery.NewDiscoveryClientForConfigOrDie(c)
return &cs
}
// New creates a new Clientset for the given RESTClient.
func New(c rest.Interface) *Clientset {
var cs Clientset
cs.applicationV1alpha1 = applicationv1alpha1.New(c)
cs.auditingV1alpha1 = auditingv1alpha1.New(c)
cs.clusterV1alpha1 = clusterv1alpha1.New(c)
cs.devopsV1alpha1 = devopsv1alpha1.New(c)
cs.devopsV1alpha3 = devopsv1alpha3.New(c)
cs.iamV1alpha2 = iamv1alpha2.New(c)
cs.networkV1alpha1 = networkv1alpha1.New(c)
cs.notificationV2beta1 = notificationv2beta1.New(c)
cs.quotaV1alpha2 = quotav1alpha2.New(c)
cs.servicemeshV1alpha2 = servicemeshv1alpha2.New(c)
cs.storageV1alpha1 = storagev1alpha1.New(c)
cs.tenantV1alpha1 = tenantv1alpha1.New(c)
cs.tenantV1alpha2 = tenantv1alpha2.New(c)
cs.typesV1beta1 = typesv1beta1.New(c)
cs.DiscoveryClient = discovery.NewDiscoveryClient(c)
return &cs
}
| NewForConfigOrDie |
workbench.test.tsx | import React from 'react';
import { act, fireEvent, render } from '@testing-library/react';
import '@testing-library/jest-dom';
import { WorkbenchView, Workbench } from '../workbench';
import {
ActivityBarModel,
IActivityBar,
IMenuBar,
IPanel,
ISidebar,
IStatusBar,
IWorkbench,
MenuBarModel,
PanelModel,
SidebarModel,
StatusBarModel,
} from 'mo/model';
import {
ILayout,
IPanelViewState,
ISidebarViewState,
LayoutModel,
ViewVisibility,
IMenuBarViewState,
MenuBarMode,
} from 'mo/model/workbench/layout';
import { select, selectAll } from 'mo/common/dom';
import {
sashHorizontalClassName,
splitClassName,
} from 'mo/components/split/base';
import { sleep } from '@test/utils';
function expectElementInOrNot(
ele: Element | null,
InDocument: boolean,
horizontal: boolean = true
) {
if (InDocument) {
expect(
ele?.parentElement?.style[horizontal ? 'height' : 'width']
).not.toBe('0px');
} else {
expect(ele?.parentElement?.style[horizontal ? 'height' : 'width']).toBe(
'0px'
);
}
}
/**
* Should display the Editor, Panel, Sidebar, ActivityBar, StatusBar, and MenuBar so on Views
* @param container
*/
function expectBasicPartsInTheDocument() {
expectElementInOrNot(select('.mo-editor'), true);
expectElementInOrNot(select('.mo-panel'), true);
expectElementInOrNot(select('.mo-sidebar'), true);
expect(select('.mo-activityBar')).toBeInTheDocument();
expect(select('.mo-statusBar')).toBeInTheDocument();
expect(select('.mo-menuBar')).toBeInTheDocument();
}
describe('Test Workbench Component', () => {
let original;
const observerFnCollection: any[] = [];
beforeEach(() => {
original = HTMLElement.prototype.getBoundingClientRect;
// @ts-ignore
HTMLElement.prototype.getBoundingClientRect = () => ({
width: 500,
height: 500,
});
global.ResizeObserver = jest.fn().mockImplementation((fn) => {
observerFnCollection.push(fn);
return {
observe: jest.fn(),
unobserve: jest.fn(),
disconnect: jest.fn(),
};
});
});
afterEach(() => {
HTMLElement.prototype.getBoundingClientRect = original;
observerFnCollection.length = 0;
});
function workbenchModel(): IWorkbench & ILayout {
const panel = new PanelModel();
const activityBar = new ActivityBarModel();
const menuBar = new MenuBarModel();
const statusBar = new StatusBarModel();
const sidebar = new SidebarModel();
const layout = new LayoutModel();
const panelState = Object.assign<IPanel, IPanelViewState>(
panel,
layout.panel
);
const activityBarState = Object.assign<IActivityBar, ViewVisibility>(
activityBar,
layout.activityBar
);
const menuBarState = Object.assign<IMenuBar, IMenuBarViewState>(
menuBar,
layout.menuBar
);
const statusBarState = Object.assign<IStatusBar, ViewVisibility>(
statusBar,
layout.statusBar
);
const sidebarState = Object.assign<ISidebar, ISidebarViewState>(
sidebar,
layout.sidebar
);
return {
panel: panelState,
activityBar: activityBarState,
menuBar: menuBarState,
statusBar: statusBarState,
sidebar: sidebarState,
splitPanePos: layout.splitPanePos,
horizontalSplitPanePos: layout.horizontalSplitPanePos,
groupSplitPos: layout.groupSplitPos,
};
}
test('Match The WorkbenchView snapshot', () => {
const component = render(<WorkbenchView {...workbenchModel()} />);
const tree = component.asFragment();
expect(tree).toMatchSnapshot();
});
test('Workbench should render all basic parts', () => {
render(<Workbench />);
expectBasicPartsInTheDocument();
});
test('WorkbenchView should render all basic parts', async () => {
const workbench = workbenchModel();
render(<WorkbenchView {...workbench} />);
expectBasicPartsInTheDocument();
});
test('Listen to The WorkbenchView onPaneSizeChange event', async () => {
const fn = jest.fn();
render(<WorkbenchView {...workbenchModel()} onPaneSizeChange={fn} />);
const sashs = selectAll<HTMLDivElement>('div[role="Resizer"]');
const wrapper = select<HTMLDivElement>(`.${splitClassName}`);
fireEvent.mouseDown(sashs[1]);
fireEvent.mouseMove(wrapper!, { screenX: 10, screenY: 10 });
fireEvent.mouseUp(wrapper!);
expect(fn).toBeCalled();
// Compare the splitPanePos arguments
expect(fn.mock.calls[0][0].length).toBe(2);
});
test('Listen to The WorkbenchView onHorizontalPaneSizeChange event', async () => {
const fn = jest.fn();
render(
<WorkbenchView
{...workbenchModel()}
onHorizontalPaneSizeChange={fn}
/>
);
const sashs = selectAll<HTMLDivElement>(`.${sashHorizontalClassName}`);
const wrapper = selectAll<HTMLDivElement>(`.${splitClassName}`)[1];
fireEvent.mouseDown(sashs[1]);
fireEvent.mouseMove(wrapper!, { screenX: 10, screenY: 10 });
fireEvent.mouseUp(wrapper!);
expect(fn).toBeCalled();
});
test('Hide the Panel view', async () => {
const workbench = workbenchModel();
const { rerender } = render(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-panel'), true);
workbench.panel.hidden = true;
rerender(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-panel'), false);
});
test('Maximize the Panel', async () => {
const workbench = workbenchModel();
const { rerender } = render(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-editor'), true);
workbench.panel.panelMaximized = true;
rerender(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-editor'), false);
workbench.panel.panelMaximized = false;
rerender(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-editor'), true);
});
test('Set the panel hidden and panelMaximized', async () => {
const workbench = workbenchModel();
workbench.panel.panelMaximized = true;
const { rerender } = render(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-editor'), false);
expectElementInOrNot(select('.mo-panel'), true);
workbench.panel.hidden = true;
workbench.panel.panelMaximized = true;
rerender(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-editor'), true); | });
test('Hide the Sidebar', async () => {
const workbench = workbenchModel();
const { rerender } = render(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-sidebar'), true, false);
workbench.sidebar.hidden = true;
rerender(<WorkbenchView {...workbench} />);
expectElementInOrNot(select('.mo-sidebar'), false, false);
});
test('Hide the StatusBar', async () => {
const workbench = workbenchModel();
const { rerender } = render(<WorkbenchView {...workbench} />);
expect(select('.mo-statusBar')).toBeInTheDocument();
workbench.statusBar.hidden = true;
rerender(<WorkbenchView {...workbench} />);
expect(select('.mo-statusBar')?.parentElement?.style.display).toBe(
'none'
);
});
test('Should support to change the layout mode of MenuBar', async () => {
const workbench = workbenchModel();
workbench.menuBar.mode = MenuBarMode.vertical;
const { rerender } = render(<WorkbenchView {...workbench} />);
expect(select('.mo-menuBar')).toBeInTheDocument();
workbench.menuBar.mode = MenuBarMode.horizontal;
rerender(<WorkbenchView {...workbench} />);
expect(select('.mo-menuBar--horizontal')).toBeInTheDocument();
});
test('Should resize panes when called on ResizeObserver', async () => {
const workbench = workbenchModel();
const horizontalMockFn = jest.fn();
const paneChangeMockFn = jest.fn();
render(
<WorkbenchView
{...workbench}
onHorizontalPaneSizeChange={horizontalMockFn}
onPaneSizeChange={paneChangeMockFn}
/>
);
await act(async () => {
// mock resize
// @ts-ignore
HTMLElement.prototype.getBoundingClientRect = () => ({
width: 1000,
height: 1000,
});
observerFnCollection.forEach((f) => f());
await sleep(150);
});
expect(horizontalMockFn).toBeCalled();
expect(horizontalMockFn.mock.calls[0][0]).toEqual([850, 150]);
expect(paneChangeMockFn).toBeCalled();
expect(paneChangeMockFn.mock.calls[0][0]).toEqual([300, 700]);
});
}); | expectElementInOrNot(select('.mo-panel'), false); |
camera.py | import threading
import binascii
from time import sleep
from utils import *
############################################################################
import base64
import io
from PIL import Image
def img_to_txt(filename):
msg = b"<plain_txt_msg:img>"
with open(filename, "rb") as imageFile:
msg = msg + base64.b64encode(imageFile.read())
msg = msg + b"<!plain_txt_msg>"
return msg
def decode_img(msg):
msg = msg[msg.find(b"<plain_txt_msg:img>")+len(b"<plain_txt_msg:img>"):
msg.find(b"<!plain_txt_msg>")]
msg = base64.b64decode(msg)
buf = io.BytesIO(msg)
img = Image.open(buf)
return img
# filename = 'test.png'
# msg = img_to_txt(filename)
# img = decode_img(msg)
# img.show()
#########################################################################
class Camera(object):
def __init__(self, makeup_artist):
self.to_process = []
self.to_output = []
self.makeup_artist = makeup_artist
thread = threading.Thread(target=self.keep_processing, args=())
thread.daemon = True
thread.start()
def process_one(self):
if not self.to_process:
return
# input is an ascii string.
input_str = self.to_process.pop(0)
# convert it to a pil image
input_img = decode_img(input_str)
input_img.show()
input_img.convert('1')
input_img.show()
################## where the hard work is done ############
# output_img is an PIL image
output_img = self.makeup_artist.apply_makeup(input_img)
# output_str is a base64 string in ascii
output_str = img_to_txt(output_img)
# convert eh base64 string in ascii to base64 string in _bytes_
self.to_output.append(binascii.a2b_base64(output_str))
def | (self):
while True:
self.process_one()
sleep(0.01)
def enqueue_input(self, input):
self.to_process.append(input)
def get_frame(self):
while not self.to_output:
sleep(0.05)
return self.to_output.pop(0) | keep_processing |
remote.rs | use std::cmp::min;
use std::collections::{BTreeMap, HashSet};
use std::convert::TryInto;
use std::fmt;
use std::ops::Range;
use std::sync::Arc;
use std::time::{Duration, Instant};
use async_oncecell::OnceCell;
use bytes::{Bytes, BytesMut};
use futures::Future;
use futures::StreamExt;
use grpc_util::retry::{retry_call, status_is_retryable};
use grpc_util::{headers_to_http_header_map, layered_service, status_to_str, LayeredService};
use hashing::Digest;
use log::Level;
use protos::gen::build::bazel::remote::execution::v2 as remexec;
use protos::gen::google::bytestream::byte_stream_client::ByteStreamClient;
use remexec::{
capabilities_client::CapabilitiesClient,
content_addressable_storage_client::ContentAddressableStorageClient, BatchUpdateBlobsRequest,
ServerCapabilities,
};
use tonic::{Code, Request, Status};
use workunit_store::{in_workunit, Metric, ObservationMetric, WorkunitMetadata};
#[derive(Clone)]
pub struct ByteStore {
instance_name: Option<String>,
chunk_size_bytes: usize,
_upload_timeout: Duration,
_rpc_attempts: usize,
byte_stream_client: Arc<ByteStreamClient<LayeredService>>,
cas_client: Arc<ContentAddressableStorageClient<LayeredService>>,
capabilities_cell: Arc<OnceCell<ServerCapabilities>>,
capabilities_client: Arc<CapabilitiesClient<LayeredService>>,
batch_api_size_limit: usize,
}
impl fmt::Debug for ByteStore {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "ByteStore(name={:?})", self.instance_name)
}
}
/// Represents an error from accessing a remote bytestore.
#[derive(Debug)]
pub enum ByteStoreError {
/// gRPC error
Grpc(Status),
/// Other errors
Other(String),
}
impl fmt::Display for ByteStoreError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ByteStoreError::Grpc(status) => fmt::Display::fmt(status, f),
ByteStoreError::Other(msg) => fmt::Display::fmt(msg, f),
}
}
}
impl std::error::Error for ByteStoreError {}
impl ByteStore {
// TODO: Consider extracting these options to a struct with `impl Default`, similar to
// `super::LocalOptions`.
pub fn new(
cas_address: &str,
instance_name: Option<String>,
tls_config: grpc_util::tls::Config,
mut headers: BTreeMap<String, String>,
chunk_size_bytes: usize,
upload_timeout: Duration,
rpc_retries: usize,
rpc_concurrency_limit: usize,
capabilities_cell_opt: Option<Arc<OnceCell<ServerCapabilities>>>,
batch_api_size_limit: usize,
) -> Result<ByteStore, String> {
let tls_client_config = if cas_address.starts_with("https://") {
Some(tls_config.try_into()?)
} else {
None
};
let endpoint =
grpc_util::create_endpoint(cas_address, tls_client_config.as_ref(), &mut headers)?;
let http_headers = headers_to_http_header_map(&headers)?;
let channel = layered_service(
tonic::transport::Channel::balance_list(vec![endpoint].into_iter()),
rpc_concurrency_limit,
http_headers,
);
let byte_stream_client = Arc::new(ByteStreamClient::new(channel.clone()));
let cas_client = Arc::new(ContentAddressableStorageClient::new(channel.clone()));
let capabilities_client = Arc::new(CapabilitiesClient::new(channel));
Ok(ByteStore {
instance_name,
chunk_size_bytes,
_upload_timeout: upload_timeout,
_rpc_attempts: rpc_retries + 1,
byte_stream_client,
cas_client,
capabilities_cell: capabilities_cell_opt.unwrap_or_else(|| Arc::new(OnceCell::new())),
capabilities_client,
batch_api_size_limit,
})
}
pub(crate) fn chunk_size_bytes(&self) -> usize {
self.chunk_size_bytes
}
pub async fn store_buffered<WriteToBuffer, WriteResult>(
&self,
digest: Digest,
mut write_to_buffer: WriteToBuffer,
) -> Result<(), String>
where
WriteToBuffer: FnMut(std::fs::File) -> WriteResult,
WriteResult: Future<Output = Result<(), String>>,
{
let write_buffer = tempfile::tempfile().map_err(|e| {
format!(
"Failed to create a temporary blob upload buffer for {digest:?}: {err}",
digest = digest,
err = e
)
})?;
let read_buffer = write_buffer.try_clone().map_err(|e| {
format!(
"Failed to create a read handle for the temporary upload buffer for {digest:?}: {err}",
digest = digest,
err = e
)
})?;
write_to_buffer(write_buffer).await?;
// Unsafety: Mmap presents an immutable slice of bytes, but the underlying file that is mapped
// could be mutated by another process. We guard against this by creating an anonymous
// temporary file and ensuring it is written to and closed via the only other handle to it in
// the code just above.
let mmap = Arc::new(unsafe {
let mapping = memmap::Mmap::map(&read_buffer).map_err(|e| {
format!(
"Failed to memory map the temporary file buffer for {digest:?}: {err}",
digest = digest,
err = e
)
})?;
if let Err(err) = madvise::madvise(
mapping.as_ptr(),
mapping.len(),
madvise::AccessPattern::Sequential,
) {
log::warn!(
"Failed to madvise(MADV_SEQUENTIAL) for the memory map of the temporary file buffer for \
{digest:?}. Continuing with possible reduced performance: {err}",
digest = digest,
err = err
)
}
Ok(mapping) as Result<memmap::Mmap, String>
}?);
retry_call(
mmap,
|mmap| self.store_bytes_source(digest, move |range| Bytes::copy_from_slice(&mmap[range])),
|err| match err {
ByteStoreError::Grpc(status) => status_is_retryable(status),
_ => false,
},
)
.await
.map_err(|err| match err {
ByteStoreError::Grpc(status) => status_to_str(status),
ByteStoreError::Other(msg) => msg,
})
}
pub async fn store_bytes(&self, bytes: Bytes) -> Result<(), String> {
let digest = Digest::of_bytes(&bytes);
retry_call(
bytes,
|bytes| self.store_bytes_source(digest, move |range| bytes.slice(range)),
|err| match err {
ByteStoreError::Grpc(status) => status_is_retryable(status),
_ => false,
},
)
.await
.map_err(|err| match err {
ByteStoreError::Grpc(status) => status_to_str(status),
ByteStoreError::Other(msg) => msg,
})
}
async fn store_bytes_source<ByteSource>(
&self,
digest: Digest,
bytes: ByteSource,
) -> Result<(), ByteStoreError>
where
ByteSource: Fn(Range<usize>) -> Bytes + Send + Sync + 'static,
{
let len = digest.size_bytes;
let max_batch_total_size_bytes = {
let capabilities = self.get_capabilities().await?;
capabilities
.cache_capabilities
.as_ref()
.map(|c| c.max_batch_total_size_bytes as usize)
.unwrap_or_default()
};
let batch_api_allowed_by_local_config = len <= self.batch_api_size_limit;
let batch_api_allowed_by_server_config =
max_batch_total_size_bytes == 0 || len < max_batch_total_size_bytes;
if batch_api_allowed_by_local_config && batch_api_allowed_by_server_config {
self.store_bytes_source_batch(digest, bytes).await
} else {
self.store_bytes_source_stream(digest, bytes).await
}
}
async fn store_bytes_source_batch<ByteSource>(
&self,
digest: Digest,
bytes: ByteSource,
) -> Result<(), ByteStoreError>
where
ByteSource: Fn(Range<usize>) -> Bytes + Send + Sync + 'static,
{
let request = BatchUpdateBlobsRequest {
instance_name: self.instance_name.clone().unwrap_or_default(),
requests: vec![remexec::batch_update_blobs_request::Request {
digest: Some(digest.into()),
data: bytes(0..digest.size_bytes),
}],
};
let mut client = self.cas_client.as_ref().clone();
client
.batch_update_blobs(request)
.await
.map_err(ByteStoreError::Grpc)?;
Ok(())
}
async fn store_bytes_source_stream<ByteSource>(
&self,
digest: Digest,
bytes: ByteSource,
) -> Result<(), ByteStoreError>
where
ByteSource: Fn(Range<usize>) -> Bytes + Send + Sync + 'static,
{
let len = digest.size_bytes;
let instance_name = self.instance_name.clone().unwrap_or_default();
let resource_name = format!(
"{}{}uploads/{}/blobs/{}/{}",
&instance_name,
if instance_name.is_empty() { "" } else { "/" },
uuid::Uuid::new_v4(),
digest.hash,
digest.size_bytes,
);
let workunit_name = format!("store_bytes({})", resource_name.clone());
let workunit_metadata = WorkunitMetadata {
level: Level::Debug,
..WorkunitMetadata::default()
};
let store = self.clone();
let mut client = self.byte_stream_client.as_ref().clone();
let chunk_size_bytes = store.chunk_size_bytes;
let stream = futures::stream::unfold((0, false), move |(offset, has_sent_any)| {
if offset >= len && has_sent_any {
futures::future::ready(None)
} else {
let next_offset = min(offset + chunk_size_bytes, len);
let req = protos::gen::google::bytestream::WriteRequest {
resource_name: resource_name.clone(),
write_offset: offset as i64,
finish_write: next_offset == len,
// TODO(tonic): Explore using the unreleased `Bytes` support in Prost from:
// https://github.com/danburkert/prost/pull/341
data: bytes(offset..next_offset),
};
futures::future::ready(Some((req, (next_offset, true))))
}
});
// NB: We must box the future to avoid a stack overflow.
let result_future = Box::pin(async move {
let response = client
.write(Request::new(stream))
.await
.map_err(ByteStoreError::Grpc)?;
let response = response.into_inner();
if response.committed_size == len as i64 {
Ok(())
} else {
Err(ByteStoreError::Other(format!(
"Uploading file with digest {:?}: want committed size {} but got {}",
digest, len, response.committed_size
)))
}
});
if let Some(workunit_store_handle) = workunit_store::get_workunit_store_handle() {
let workunit_store = workunit_store_handle.store;
in_workunit!(
workunit_store,
workunit_name,
workunit_metadata,
|workunit| async move {
let result = result_future.await;
if result.is_ok() {
workunit.increment_counter(Metric::RemoteStoreBlobBytesUploaded, len as u64);
}
result
},
)
.await
} else {
result_future.await
}
}
pub async fn load_bytes_with<
T: Send + 'static,
F: Fn(Bytes) -> Result<T, String> + Send + Sync + Clone + 'static,
>(
&self,
digest: Digest,
f: F,
) -> Result<Option<T>, ByteStoreError> {
let start = Instant::now();
let store = self.clone();
let instance_name = store.instance_name.clone().unwrap_or_default();
let resource_name = format!(
"{}{}blobs/{}/{}",
&instance_name,
if instance_name.is_empty() { "" } else { "/" },
digest.hash,
digest.size_bytes
);
let workunit_metadata = WorkunitMetadata {
level: Level::Trace,
desc: Some(format!("Loading bytes at: {resource_name}")),
..WorkunitMetadata::default()
};
let resource_name = resource_name.clone();
let f = f.clone();
let mut client = self.byte_stream_client.as_ref().clone();
let result_future = async move {
let start_time = Instant::now();
let stream_result = client
.read({
protos::gen::google::bytestream::ReadRequest {
resource_name: resource_name.clone(),
read_offset: 0,
// 0 means no limit.
read_limit: 0,
}
})
.await;
let mut stream = match stream_result {
Ok(response) => response.into_inner(),
Err(status) => {
return match status.code() {
Code::NotFound => Ok(None),
_ => Err(ByteStoreError::Grpc(status)),
}
}
};
let read_result_closure = async {
let mut got_first_response = false;
let mut buf = BytesMut::with_capacity(digest.size_bytes);
while let Some(response) = stream.next().await {
// Record the observed time to receive the first response for this read.
if !got_first_response {
got_first_response = true;
if let Some(workunit_store_handle) = workunit_store::get_workunit_store_handle() {
let timing: Result<u64, _> = Instant::now()
.duration_since(start_time)
.as_micros()
.try_into();
if let Ok(obs) = timing {
workunit_store_handle
.store
.record_observation(ObservationMetric::RemoteStoreTimeToFirstByte, obs);
}
}
}
buf.extend_from_slice(&(response?).data);
}
Ok(buf.freeze())
};
let read_result: Result<Bytes, tonic::Status> = read_result_closure.await;
let maybe_bytes = match read_result {
Ok(bytes) => Some(bytes),
Err(status) => {
if status.code() == tonic::Code::NotFound {
None
} else {
return Err(ByteStoreError::Grpc(status));
}
}
};
match maybe_bytes {
Some(b) => f(b).map(Some).map_err(ByteStoreError::Other),
None => Ok(None),
}
};
if let Some(workunit_store_handle) = workunit_store::get_workunit_store_handle() {
workunit_store_handle.store.record_observation(
ObservationMetric::RemoteStoreReadBlobTimeMicros,
start.elapsed().as_micros() as u64,
);
in_workunit!(
workunit_store_handle.store,
"load_bytes_with".to_owned(),
workunit_metadata,
|workunit| async move {
let result = result_future.await;
if result.is_ok() {
workunit.increment_counter(
Metric::RemoteStoreBlobBytesDownloaded,
digest.size_bytes as u64,
);
}
result
},
)
.await
} else {
result_future.await
}
}
///
/// Given a collection of Digests (digests),
/// returns the set of digests from that collection not present in the CAS.
///
pub fn list_missing_digests(
&self,
request: remexec::FindMissingBlobsRequest,
) -> impl Future<Output = Result<HashSet<Digest>, String>> {
let store = self.clone();
let workunit_name = format!(
"list_missing_digests({})",
store.instance_name.clone().unwrap_or_default()
);
let workunit_metadata = WorkunitMetadata {
level: Level::Debug,
..WorkunitMetadata::default()
};
let result_future = async move {
let store2 = store.clone();
let client = store2.cas_client.as_ref().clone();
let response = retry_call(
client,
move |mut client| {
let request = request.clone();
async move { client.find_missing_blobs(request).await }
},
status_is_retryable,
)
.await
.map_err(status_to_str)?;
response
.into_inner()
.missing_blob_digests
.iter()
.map(|digest| digest.try_into())
.collect::<Result<HashSet<_>, _>>()
};
async {
if let Some(workunit_store_handle) = workunit_store::get_workunit_store_handle() {
in_workunit!(
workunit_store_handle.store,
workunit_name,
workunit_metadata,
|_workunit| result_future,
)
.await
} else {
result_future.await
}
}
}
pub fn | <'a, Digests: Iterator<Item = &'a Digest>>(
&self,
digests: Digests,
) -> remexec::FindMissingBlobsRequest {
remexec::FindMissingBlobsRequest {
instance_name: self.instance_name.as_ref().cloned().unwrap_or_default(),
blob_digests: digests.map(|d| d.into()).collect::<Vec<_>>(),
}
}
async fn get_capabilities(&self) -> Result<&remexec::ServerCapabilities, ByteStoreError> {
let capabilities_fut = async {
let mut request = remexec::GetCapabilitiesRequest::default();
if let Some(s) = self.instance_name.as_ref() {
request.instance_name = s.clone();
}
let mut client = self.capabilities_client.as_ref().clone();
client
.get_capabilities(request)
.await
.map(|r| r.into_inner())
.map_err(ByteStoreError::Grpc)
};
self
.capabilities_cell
.get_or_try_init(capabilities_fut)
.await
}
}
| find_missing_blobs_request |
config.go | package run
import (
"fmt"
"io/ioutil"
"log"
"os"
"os/user"
"path/filepath"
"regexp"
"strings"
"github.com/BurntSushi/toml"
"github.com/influxdata/influxdb/coordinator"
"github.com/influxdata/influxdb/logger"
"github.com/influxdata/influxdb/monitor"
"github.com/influxdata/influxdb/monitor/diagnostics"
"github.com/influxdata/influxdb/pkg/tlsconfig"
"github.com/influxdata/influxdb/services/collectd"
"github.com/influxdata/influxdb/services/continuous_querier"
"github.com/influxdata/influxdb/services/graphite"
"github.com/influxdata/influxdb/services/httpd"
"github.com/influxdata/influxdb/services/meta"
"github.com/influxdata/influxdb/services/opentsdb"
"github.com/influxdata/influxdb/services/precreator"
"github.com/influxdata/influxdb/services/retention"
"github.com/influxdata/influxdb/services/subscriber"
"github.com/influxdata/influxdb/services/udp"
itoml "github.com/influxdata/influxdb/toml"
"github.com/influxdata/influxdb/tsdb"
"golang.org/x/text/encoding/unicode"
"golang.org/x/text/transform"
)
const (
// DefaultBindAddress is the default address for various RPC services.
DefaultBindAddress = "127.0.0.1:8088"
)
// Config represents the configuration format for the influxd binary.
type Config struct {
Meta *meta.Config `toml:"meta"`
Data tsdb.Config `toml:"data"`
Coordinator coordinator.Config `toml:"coordinator"`
Retention retention.Config `toml:"retention"`
Precreator precreator.Config `toml:"shard-precreation"`
Monitor monitor.Config `toml:"monitor"`
Subscriber subscriber.Config `toml:"subscriber"`
HTTPD httpd.Config `toml:"http"`
Logging logger.Config `toml:"logging"`
GraphiteInputs []graphite.Config `toml:"graphite"`
CollectdInputs []collectd.Config `toml:"collectd"`
OpenTSDBInputs []opentsdb.Config `toml:"opentsdb"`
UDPInputs []udp.Config `toml:"udp"`
ContinuousQuery continuous_querier.Config `toml:"continuous_queries"`
// Server reporting
ReportingDisabled bool `toml:"reporting-disabled"`
// BindAddress is the address that all TCP services use (Raft, Snapshot, Cluster, etc.)
BindAddress string `toml:"bind-address"`
// TLS provides configuration options for all https endpoints.
TLS tlsconfig.Config `toml:"tls"`
}
// NewConfig returns an instance of Config with reasonable defaults.
func NewConfig() *Config {
c := &Config{}
c.Meta = meta.NewConfig()
c.Data = tsdb.NewConfig()
c.Coordinator = coordinator.NewConfig()
c.Precreator = precreator.NewConfig()
c.Monitor = monitor.NewConfig()
c.Subscriber = subscriber.NewConfig()
c.HTTPD = httpd.NewConfig()
c.Logging = logger.NewConfig()
c.GraphiteInputs = []graphite.Config{graphite.NewConfig()}
c.CollectdInputs = []collectd.Config{collectd.NewConfig()}
c.OpenTSDBInputs = []opentsdb.Config{opentsdb.NewConfig()}
c.UDPInputs = []udp.Config{udp.NewConfig()}
c.ContinuousQuery = continuous_querier.NewConfig()
c.Retention = retention.NewConfig()
c.BindAddress = DefaultBindAddress
return c
}
// NewDemoConfig returns the config that runs when no config is specified.
func NewDemoConfig() (*Config, error) |
// FromTomlFile loads the config from a TOML file.
func (c *Config) FromTomlFile(fpath string) error {
bs, err := ioutil.ReadFile(fpath)
if err != nil {
return err
}
// Handle any potential Byte-Order-Marks that may be in the config file.
// This is for Windows compatibility only.
// See https://github.com/influxdata/telegraf/issues/1378 and
// https://github.com/influxdata/influxdb/issues/8965.
bom := unicode.BOMOverride(transform.Nop)
bs, _, err = transform.Bytes(bom, bs)
if err != nil {
return err
}
return c.FromToml(string(bs))
}
// FromToml loads the config from TOML.
func (c *Config) FromToml(input string) error {
// Replace deprecated [cluster] with [coordinator]
re := regexp.MustCompile(`(?m)^\s*\[cluster\]`)
input = re.ReplaceAllStringFunc(input, func(in string) string {
in = strings.TrimSpace(in)
out := "[coordinator]"
log.Printf("deprecated config option %s replaced with %s; %s will not be supported in a future release\n", in, out, in)
return out
})
_, err := toml.Decode(input, c)
return err
}
// Validate returns an error if the config is invalid.
func (c *Config) Validate() error {
if err := c.Meta.Validate(); err != nil {
return err
}
if err := c.Data.Validate(); err != nil {
return err
}
if err := c.Monitor.Validate(); err != nil {
return err
}
if err := c.ContinuousQuery.Validate(); err != nil {
return err
}
if err := c.Retention.Validate(); err != nil {
return err
}
if err := c.Precreator.Validate(); err != nil {
return err
}
if err := c.Subscriber.Validate(); err != nil {
return err
}
for _, graphite := range c.GraphiteInputs {
if err := graphite.Validate(); err != nil {
return fmt.Errorf("invalid graphite config: %v", err)
}
}
for _, collectd := range c.CollectdInputs {
if err := collectd.Validate(); err != nil {
return fmt.Errorf("invalid collectd config: %v", err)
}
}
if err := c.TLS.Validate(); err != nil {
return err
}
return nil
}
// ApplyEnvOverrides apply the environment configuration on top of the config.
func (c *Config) ApplyEnvOverrides(getenv func(string) string) error {
return itoml.ApplyEnvOverrides(getenv, "INFLUXDB", c)
}
// Diagnostics returns a diagnostics representation of Config.
func (c *Config) Diagnostics() (*diagnostics.Diagnostics, error) {
return diagnostics.RowFromMap(map[string]interface{}{
"reporting-disabled": c.ReportingDisabled,
"bind-address": c.BindAddress,
}), nil
}
func (c *Config) diagnosticsClients() map[string]diagnostics.Client {
// Config settings that are always present.
m := map[string]diagnostics.Client{
"config": c,
"config-data": c.Data,
"config-meta": c.Meta,
"config-coordinator": c.Coordinator,
"config-retention": c.Retention,
"config-precreator": c.Precreator,
"config-monitor": c.Monitor,
"config-subscriber": c.Subscriber,
"config-httpd": c.HTTPD,
"config-cqs": c.ContinuousQuery,
}
// Config settings that can be repeated and can be disabled.
if g := graphite.Configs(c.GraphiteInputs); g.Enabled() {
m["config-graphite"] = g
}
if cc := collectd.Configs(c.CollectdInputs); cc.Enabled() {
m["config-collectd"] = cc
}
if t := opentsdb.Configs(c.OpenTSDBInputs); t.Enabled() {
m["config-opentsdb"] = t
}
if u := udp.Configs(c.UDPInputs); u.Enabled() {
m["config-udp"] = u
}
return m
}
// registerDiagnostics registers the config settings with the Monitor.
func (c *Config) registerDiagnostics(m *monitor.Monitor) {
m.DataDir = c.Data.Dir
for name, dc := range c.diagnosticsClients() {
m.RegisterDiagnosticsClient(name, dc)
}
}
// registerDiagnostics deregisters the config settings from the Monitor.
func (c *Config) deregisterDiagnostics(m *monitor.Monitor) {
for name := range c.diagnosticsClients() {
m.DeregisterDiagnosticsClient(name)
}
}
| {
c := NewConfig()
var homeDir string
// By default, store meta and data files in current users home directory
u, err := user.Current()
if err == nil {
homeDir = u.HomeDir
} else if os.Getenv("HOME") != "" {
homeDir = os.Getenv("HOME")
} else {
return nil, fmt.Errorf("failed to determine current user for storage")
}
c.Meta.Dir = filepath.Join(homeDir, ".influxdb/meta")
c.Data.Dir = filepath.Join(homeDir, ".influxdb/data")
c.Data.WALDir = filepath.Join(homeDir, ".influxdb/wal")
return c, nil
} |
app.js | 'use strict';
console.log('app,js loaded');
let photoTemplateId = "#photo-template";
let animals = [];
function Animal(animal) {
for (let key in animal)
this[key] = animal[key];
};
Animal.prototype.toHtml = function () {
let template = $(photoTemplateId).html();
let html = Mustache.render(template, this);
return html;
};
Animal.prototype.addOption = function () {
if (($('#animalType').find('.option-' + this.keyword)).length) {
return;
}
let $option = $(`<option value='${this.keyword}' class='option-${this.keyword}'>${this.keyword}</option>`);
$('#animalType').append($option);
};
Animal.readJson = (pgNum) => {
$.getJSON(`data/page-${pgNum}.json`)
.then(data => {
data.forEach(animal => {
let thisAnimal = new Animal(animal);
thisAnimal.addOption();
animals.push(thisAnimal);
// $('main').append(thisAnimal.toHtml());
});
$('#animalSort').trigger('change');
})
};
// read in page-1.json intitially
$(() => Animal.readJson(1));
// event for select drop-down menu
$('#animalType').on('change', function () {
if (this.value === 'default') {
$('section').show();
$('#photo-template').hide();
}
else {
$('section').hide();
$(`.${this.value}`).show();
console.log(this.value);
}
});
// event for clicking on page1 | page2 buttons
$('button').on('click', function () {
$("section").remove();
animals = [];
$(() => Animal.readJson(this.value));
});
$('#animalSort').on('change', function(){
$('section').remove();
console.log(this.value)
if (this.value === 'sortByTitle'){
animals.sort((a,b) => a.title > b.title ? 1:-1);
}else if (this.value === 'sortByHornAscend'){
animals.sort((a,b) => a.horns - b.horns);
}else if (this.value === 'sortByHornDescend'){ | $('#animalType').trigger('change');
}); | animals.sort((a,b) => b.horns - a.horns);
};
animals.forEach(animal => $('main').append(animal.toHtml())); |
index.ts | import {
insert,
spread,
assign,
createComponent,
delegateEvents
} from "dom-expressions/src/runtime";
interface Runtime {
insert: typeof insert;
spread: typeof spread;
assign: typeof assign;
createComponent: typeof createComponent;
delegateEvents: typeof delegateEvents;
}
type ExpandableNode = Node & { [key: string]: any };
type Props = { [key: string]: any };
export type HyperScript = {
(...args: any[]): ExpandableNode | ExpandableNode[];
};
// Inspired by https://github.com/hyperhype/hyperscript
export function createHyperScript(r: Runtime): HyperScript {
function h() {
let args: any = [].slice.call(arguments),
e: ExpandableNode | undefined,
multiExpression = false,
delegatedEvents = new Set<string>();
function item(l: any) {
const type = typeof l;
if (l == null) void 0;
else if ("string" === type) {
if (!e) parseClass(l);
else e.appendChild(document.createTextNode(l));
} else if (
"number" === type ||
"boolean" === type ||
l instanceof Date ||
l instanceof RegExp
) {
(e as Node).appendChild(document.createTextNode(l.toString()));
} else if (Array.isArray(l)) {
for (let i = 0; i < l.length; i++) item(l[i]);
} else if (l instanceof Element) {
r.insert(e as Element, l, undefined, multiExpression ? null : undefined);
} else if ("object" === type) {
let dynamic = false;
for (const k in l) {
if (typeof l[k] === "function" && k !== "ref" && k.slice(0, 2) !== "on") {
dynamicProperty(l, k);
dynamic = true;
}
}
dynamic
? r.spread(e as Element, l, e instanceof SVGElement, !!args.length)
: r.assign(e as Element, l, e instanceof SVGElement, !!args.length);
} else if ("function" === type) {
if (!e) {
let props: Props = {},
dynamic = [],
next = args[0];
if (typeof next === "object" && !Array.isArray(next) && !(next instanceof Element))
props = args.shift();
for (const k in props) {
if (typeof props[k] === "function") dynamic.push(k);
}
props.children = args.length > 1 ? args : args[0];
if (props.children && typeof props.children === "function" && !props.children.length)
dynamic.push("children");
e = r.createComponent(l, props, dynamic);
args = [];
} else r.insert(e as Element, l, undefined, multiExpression ? null : undefined);
}
}
typeof args[0] === "string" && detectMultiExpression(args);
while (args.length) item(args.shift());
r.delegateEvents(Array.from(delegatedEvents));
return e as ExpandableNode;
function | (string: string) {
// Our minimal parser doesn’t understand escaping CSS special
// characters like `#`. Don’t use them. More reading:
// https://mathiasbynens.be/notes/css-escapes .
const m = string.split(/([\.#]?[^\s#.]+)/);
if (/^\.|#/.test(m[1])) e = document.createElement("div");
for (let i = 0; i < m.length; i++) {
const v = m[i],
s = v.substring(1, v.length);
if (!v) continue;
if (!e) e = document.createElement(v);
else if (v[0] === ".") e.classList.add(s);
else if (v[0] === "#") e.setAttribute("id", s);
}
}
function detectMultiExpression(list: any[]) {
for (let i = 1; i < list.length; i++) {
if (typeof list[i] === "function") {
multiExpression = true;
return;
} else if (Array.isArray(list[i])) {
detectMultiExpression(list[i]);
}
}
}
}
return h;
}
function dynamicProperty(props: any, key: string) {
const src = props[key];
Object.defineProperty(props, key, {
get() {
return src();
},
enumerable: true
});
}
| parseClass |
index.js | import React from "react";
import ReactDOM from "react-dom";
import MUIDataTable from "../../src/";
class Example extends React.Component {
state = {
filterList: [
['Franky Miles'],
['Business Analyst'],
[],
[],
[]
],
filterOptions: ['this', 'test', 'is', 'working'],
display: ['true', 'true', 'true', 'true', 'true'],
data: [
["Gabby George", "Business Analyst", "Minneapolis", 30, 100000],
["Business Analyst", "Business Consultant", "Dallas", 55, 200000],
["Jaden Collins", "Attorney", "Santa Ana", 27, 500000],
["Franky Rees", "Business Analyst", "St. Petersburg", 22, 50000],
["Aaren Rose", "Business Consultant", "Toledo", 28, 75000],
["Blake Duncan", "Business Management Analyst", "San Diego", 65, 94000],
["Frankie Parry", "Agency Legal Counsel", "Jacksonville", 71, 210000],
["Lane Wilson", "Commercial Specialist", "Omaha", 19, 65000],
["Robin Duncan", "Business Analyst", "Los Angeles", 20, 77000],
["Mel Brooks", "Business Consultant", "Oklahoma City", 37, 135000],
["Harper White", "Attorney", "Pittsburgh", 52, 420000],
["Kris Humphrey", "Agency Legal Counsel", "Laredo", 30, 150000],
["Frankie Long", "Industrial Analyst", "Austin", 31, 170000],
["Brynn Robbins", "Business Analyst", "Norfolk", 22, 90000],
["Justice Mann", "Business Consultant", "Chicago", 24, 133000],
["Addison Navarro", "Business Management Analyst", "New York", 50, 295000],
["Jesse Welch", "Agency Legal Counsel", "Seattle", 28, 200000],
["Eli Mejia", "Commercial Specialist", "Long Beach", 65, 400000],
["Gene Leblanc", "Industrial Analyst", "Hartford", 34, 110000],
["Danny Leon", "Computer Scientist", "Newark", 60, 220000],
["Lane Lee", "Corporate Counselor", "Cincinnati", 52, 180000],
["Jesse Hall", "Business Analyst", "Baltimore", 44, 99000],
["Danni Hudson", "Agency Legal Counsel", "Tampa", 37, 90000],
["Terry Macdonald", "Commercial Specialist", "Miami", 39, 140000],
["Justice Mccarthy", "Attorney", "Tucson", 26, 330000],
["Silver Carey", "Computer Scientist", "Memphis", 47, 250000],
["Franky Miles", "Industrial Analyst", "Buffalo", 49, 190000],
["Glen Nixon", "Corporate Counselor", "Arlington", 44, 80000],
["Gabby Strickland", "Business Process Consultant", "Scottsdale", 26, 45000],
["Mason Ray", "Computer Scientist", "San Francisco", 39, 142000]
]
}
handleFilterNameChange = (event) => {
let string = prompt("Write a semicolon-separated string to change filter names in the first column!");
if (string) this.setState({ filterOptions: string.split(';') });
}
handleAddData = (event) => {
const string = prompt("Write a semicolon-separated string with values for 'Name', 'Title', 'Location', 'Age' and 'Salary' to add a new row of data!");
if (string) this.setState({ data: [string.split(';'), ...this.state.data] });
}
handleChangeDisplay = (event) => {
const string = prompt("Write a semicolon-separated string of display options for each of the 5 columns. Options are either 'true', 'false', or 'excluded'");
if (string) this.setState({ display: string.split(';') });
}
render() {
const { data, filterList, filterOptions } = this.state;
const columns = [
{
name: "Name",
options: { | customFilterListOptions: { render: v => `Name: ${v}` },
filterOptions: {
names: filterOptions
},
}
},
{
name: "Title",
options: {
display: this.state.display[1],
filter: true,
filterList: filterList[1].length ? filterList[1] : null,
customFilterListOptions: { render: v => `Title: ${v}` },
filterType: 'textField' // set filterType's at the column level
}
},
{
name: "Location",
options: {
display: this.state.display[2],
filter: false,
filterList: filterList[2].length ? filterList[2] : null,
}
},
{
name: "Age",
options: {
display: this.state.display[3],
filter: true,
filterList: filterList[3].length ? filterList[3] : null,
customFilterListOptions: { render: v => `Age: ${v}` },
}
},
{
name: "Salary",
options: {
display: this.state.display[4],
filter: true,
filterList: filterList[4].length ? filterList[4] : null,
customFilterListOptions: { render: v => `Salary: ${v}` },
sort: false
}
}
];
const options = {
filter: true,
onFilterChange: (changedColumn, newFilterList) => {
this.setState({ filterList: newFilterList });
},
selectableRows: 'multiple',
filterType: 'dropdown',
responsive: 'stacked',
rowsPerPage: 10,
page: 1,
};
return (
<React.Fragment>
<MUIDataTable title={"ACME Employee list"} data={data} columns={columns} options={options} />
<button onClick={() => this.setState({
filterList: [
['Franky Miles'],
['Business Analyst'],
[],
[],
[]
]
})}
>
Set starter filters!
</button>
<button onClick={this.handleFilterNameChange}>Change filter names for first column!</button>
<button onClick={this.handleAddData}>Add row data!</button>
<button onClick={this.handleChangeDisplay}>Change which columns are displayed!</button>
</React.Fragment>
);
}
}
export default Example; | filter: true,
display: this.state.display[0],
filterList: filterList[0].length ? filterList[0] : null, |
lib.rs | extern crate vecmath;
extern crate xml;
#[macro_use]
extern crate log;
pub use obj::*;
pub use vecmath::Matrix4;
pub mod document;
mod obj;
mod utils;
#[derive(Debug, Clone)]
pub struct Skeleton {
///
/// All joints in the skeleton
///
pub joints: Vec<Joint>,
///
/// Default parent-relative transforms for each joint (at time of vertex binding)
/// Column-major.
///
pub bind_poses: Vec<Matrix4<f32>>,
}
#[derive(Debug, Clone)]
pub struct Joint {
///
/// Name of joint
///
pub name: String,
///
/// Index of parent joint in Skeleton's 'joints' vector
///
pub parent_index: JointIndex,
///
/// Matrix transforming vertex coordinates from model-space to joint-space
/// Column-major.
///
pub inverse_bind_pose: Matrix4<f32>,
}
impl Joint {
pub fn is_root(&self) -> bool {
self.parent_index == ROOT_JOINT_PARENT_INDEX
}
}
///
/// A COLLADA animation consists of mapping of sample times to pose transforms
/// for a single node in the scene (usually a skeleton joint)
///
/// Note - COLLADA supports animating arbitrary 'outputs', not just pose transforms,
/// (eg colors, texture offsets, etc), but we'll leave those unsupported for now.
///
#[derive(Debug)]
pub struct Animation {
///
/// The node (joint) this animation is targeting
///
pub target: String,
///
/// Times for each sample (in seconds)
///
pub sample_times: Vec<f32>,
///
/// Node pose transforms for each sample.
/// Column-major.
///
pub sample_poses: Vec<Matrix4<f32>>,
}
///
/// Skeleton-Mesh Binding Data
///
#[derive(Debug)]
pub struct BindDataSet {
pub bind_data: Vec<BindData>,
}
#[derive(Debug)]
pub struct BindData {
pub object_name: String,
pub skeleton_name: Option<String>,
pub joint_names: Vec<String>,
/// Vertex weights, for vertex by index in mesh and joint by index in 'joint_names'
/// and weight by index in 'weights'
pub vertex_weights: Vec<VertexWeight>,
/// Weight values that are indexed by VertexWeights
pub weights: Vec<f32>,
/// Inverse bind pose matrices listed in order of joint_names
/// Column-major
pub inverse_bind_poses: Vec<Matrix4<f32>>,
}
#[derive(Debug, Copy, Clone)]
pub struct VertexWeight {
pub vertex: VertexIndex,
pub joint: JointIndex,
pub weight: WeightIndex,
} |
pub type JointIndex = u8;
pub const ROOT_JOINT_PARENT_INDEX: JointIndex = 255u8; |
pub type WeightIndex = usize; |
dataflow_runner.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A runner implementation that submits a job for remote execution.
The runner will create a JSON description of the job graph and then submit it
to the Dataflow Service for remote execution by a worker.
"""
# pytype: skip-file
from __future__ import absolute_import
from __future__ import division
import base64
import json
import logging
import os
import subprocess
import sys
import threading
import time
import traceback
import urllib
from builtins import hex
from collections import defaultdict
from typing import TYPE_CHECKING
from typing import List
from future.utils import iteritems
import apache_beam as beam
from apache_beam import coders
from apache_beam import error
from apache_beam import pvalue
from apache_beam.internal import pickler
from apache_beam.internal.gcp import json_value
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import GoogleCloudOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.options.pipeline_options import TestOptions
from apache_beam.options.pipeline_options import WorkerOptions
from apache_beam.portability import common_urns
from apache_beam.portability.api import beam_runner_api_pb2
from apache_beam.pvalue import AsSideInput
from apache_beam.runners.common import DoFnSignature
from apache_beam.runners.dataflow.internal import names
from apache_beam.runners.dataflow.internal.clients import dataflow as dataflow_api
from apache_beam.runners.dataflow.internal.names import PropertyNames
from apache_beam.runners.dataflow.internal.names import TransformNames
from apache_beam.runners.runner import PipelineResult
from apache_beam.runners.runner import PipelineRunner
from apache_beam.runners.runner import PipelineState
from apache_beam.runners.runner import PValueCache
from apache_beam.transforms import window
from apache_beam.transforms.core import RunnerAPIPTransformHolder
from apache_beam.transforms.display import DisplayData
from apache_beam.transforms.sideinputs import SIDE_INPUT_PREFIX
from apache_beam.typehints import typehints
from apache_beam.utils import processes
from apache_beam.utils import proto_utils
from apache_beam.utils.interactive_utils import is_in_notebook
from apache_beam.utils.plugin import BeamPlugin
if TYPE_CHECKING:
from apache_beam.pipeline import PTransformOverride
if sys.version_info[0] > 2:
unquote_to_bytes = urllib.parse.unquote_to_bytes
quote = urllib.parse.quote
else:
unquote_to_bytes = urllib.unquote # pylint: disable=deprecated-urllib-function
quote = urllib.quote # pylint: disable=deprecated-urllib-function
__all__ = ['DataflowRunner']
_LOGGER = logging.getLogger(__name__)
BQ_SOURCE_UW_ERROR = (
'The Read(BigQuerySource(...)) transform is not supported with newer stack '
'features (Fn API, Dataflow Runner V2, etc). Please use the transform '
'apache_beam.io.gcp.bigquery.ReadFromBigQuery instead.')
class DataflowRunner(PipelineRunner):
"""A runner that creates job graphs and submits them for remote execution.
Every execution of the run() method will submit an independent job for
remote execution that consists of the nodes reachable from the passed in
node argument or entire graph if node is None. The run() method returns
after the service created the job and will not wait for the job to finish
if blocking is set to False.
"""
# A list of PTransformOverride objects to be applied before running a pipeline
# using DataflowRunner.
# Currently this only works for overrides where the input and output types do
# not change.
# For internal SDK use only. This should not be updated by Beam pipeline
# authors.
# Imported here to avoid circular dependencies.
# TODO: Remove the apache_beam.pipeline dependency in CreatePTransformOverride
from apache_beam.runners.dataflow.ptransform_overrides import CombineValuesPTransformOverride
from apache_beam.runners.dataflow.ptransform_overrides import CreatePTransformOverride
from apache_beam.runners.dataflow.ptransform_overrides import ReadPTransformOverride
from apache_beam.runners.dataflow.ptransform_overrides import JrhReadPTransformOverride
# Thesse overrides should be applied before the proto representation of the
# graph is created.
_PTRANSFORM_OVERRIDES = [
CombineValuesPTransformOverride()
] # type: List[PTransformOverride]
_JRH_PTRANSFORM_OVERRIDES = [
JrhReadPTransformOverride(),
] # type: List[PTransformOverride]
# These overrides should be applied after the proto representation of the
# graph is created.
_NON_PORTABLE_PTRANSFORM_OVERRIDES = [
CreatePTransformOverride(),
ReadPTransformOverride(),
] # type: List[PTransformOverride]
def __init__(self, cache=None):
# Cache of CloudWorkflowStep protos generated while the runner
# "executes" a pipeline.
self._cache = cache if cache is not None else PValueCache()
self._unique_step_id = 0
def is_fnapi_compatible(self):
return False
def apply(self, transform, input, options):
self._maybe_add_unified_worker_missing_options(options)
return super(DataflowRunner, self).apply(transform, input, options)
def _get_unique_step_name(self):
self._unique_step_id += 1
return 's%s' % self._unique_step_id
@staticmethod
def poll_for_job_completion(runner, result, duration):
"""Polls for the specified job to finish running (successfully or not).
Updates the result with the new job information before returning.
Args:
runner: DataflowRunner instance to use for polling job state.
result: DataflowPipelineResult instance used for job information.
duration (int): The time to wait (in milliseconds) for job to finish.
If it is set to :data:`None`, it will wait indefinitely until the job
is finished.
"""
last_message_time = None
current_seen_messages = set()
last_error_rank = float('-inf')
last_error_msg = None
last_job_state = None
# How long to wait after pipeline failure for the error
# message to show up giving the reason for the failure.
# It typically takes about 30 seconds.
final_countdown_timer_secs = 50.0
sleep_secs = 5.0
# Try to prioritize the user-level traceback, if any.
def rank_error(msg):
if 'work item was attempted' in msg:
return -1
elif 'Traceback' in msg:
return 1
return 0
if duration:
start_secs = time.time()
duration_secs = duration // 1000
job_id = result.job_id()
while True:
response = runner.dataflow_client.get_job(job_id)
# If get() is called very soon after Create() the response may not contain
# an initialized 'currentState' field.
if response.currentState is not None:
if response.currentState != last_job_state:
_LOGGER.info('Job %s is in state %s', job_id, response.currentState)
last_job_state = response.currentState
if str(response.currentState) != 'JOB_STATE_RUNNING':
# Stop checking for new messages on timeout, explanatory
# message received, success, or a terminal job state caused
# by the user that therefore doesn't require explanation.
if (final_countdown_timer_secs <= 0.0 or last_error_msg is not None or
str(response.currentState) == 'JOB_STATE_DONE' or
str(response.currentState) == 'JOB_STATE_CANCELLED' or
str(response.currentState) == 'JOB_STATE_UPDATED' or
str(response.currentState) == 'JOB_STATE_DRAINED'):
break
# Check that job is in a post-preparation state before starting the
# final countdown.
if (str(response.currentState) not in ('JOB_STATE_PENDING',
'JOB_STATE_QUEUED')):
# The job has failed; ensure we see any final error messages.
sleep_secs = 1.0 # poll faster during the final countdown
final_countdown_timer_secs -= sleep_secs
time.sleep(sleep_secs)
# Get all messages since beginning of the job run or since last message.
page_token = None
while True:
messages, page_token = runner.dataflow_client.list_messages(
job_id, page_token=page_token, start_time=last_message_time)
for m in messages:
message = '%s: %s: %s' % (m.time, m.messageImportance, m.messageText)
if not last_message_time or m.time > last_message_time:
last_message_time = m.time
current_seen_messages = set()
if message in current_seen_messages:
# Skip the message if it has already been seen at the current
# time. This could be the case since the list_messages API is
# queried starting at last_message_time.
continue
else:
current_seen_messages.add(message)
# Skip empty messages.
if m.messageImportance is None:
continue
_LOGGER.info(message)
if str(m.messageImportance) == 'JOB_MESSAGE_ERROR':
if rank_error(m.messageText) >= last_error_rank:
last_error_rank = rank_error(m.messageText)
last_error_msg = m.messageText
if not page_token:
break
if duration:
passed_secs = time.time() - start_secs
if passed_secs > duration_secs:
_LOGGER.warning(
'Timing out on waiting for job %s after %d seconds',
job_id,
passed_secs)
break
result._job = response
runner.last_error_msg = last_error_msg
@staticmethod
def _only_element(iterable):
# type: (Iterable[T]) -> T
|
@staticmethod
def group_by_key_input_visitor():
# Imported here to avoid circular dependencies.
from apache_beam.pipeline import PipelineVisitor
class GroupByKeyInputVisitor(PipelineVisitor):
"""A visitor that replaces `Any` element type for input `PCollection` of
a `GroupByKey` or `_GroupByKeyOnly` with a `KV` type.
TODO(BEAM-115): Once Python SDk is compatible with the new Runner API,
we could directly replace the coder instead of mutating the element type.
"""
def enter_composite_transform(self, transform_node):
self.visit_transform(transform_node)
def visit_transform(self, transform_node):
# Imported here to avoid circular dependencies.
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.transforms.core import GroupByKey, _GroupByKeyOnly
if isinstance(transform_node.transform, (GroupByKey, _GroupByKeyOnly)):
pcoll = transform_node.inputs[0]
pcoll.element_type = typehints.coerce_to_kv_type(
pcoll.element_type, transform_node.full_label)
key_type, value_type = pcoll.element_type.tuple_types
if transform_node.outputs:
key = DataflowRunner._only_element(transform_node.outputs.keys())
transform_node.outputs[key].element_type = typehints.KV[
key_type, typehints.Iterable[value_type]]
return GroupByKeyInputVisitor()
@staticmethod
def _set_pdone_visitor(pipeline):
# Imported here to avoid circular dependencies.
from apache_beam.pipeline import PipelineVisitor
class SetPDoneVisitor(PipelineVisitor):
def __init__(self, pipeline):
self._pipeline = pipeline
@staticmethod
def _maybe_fix_output(transform_node, pipeline):
if not transform_node.outputs:
pval = pvalue.PDone(pipeline)
pval.producer = transform_node
transform_node.outputs = {None: pval}
def enter_composite_transform(self, transform_node):
SetPDoneVisitor._maybe_fix_output(transform_node, self._pipeline)
def visit_transform(self, transform_node):
SetPDoneVisitor._maybe_fix_output(transform_node, self._pipeline)
return SetPDoneVisitor(pipeline)
@staticmethod
def side_input_visitor(use_unified_worker=False):
# Imported here to avoid circular dependencies.
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.pipeline import PipelineVisitor
from apache_beam.transforms.core import ParDo
class SideInputVisitor(PipelineVisitor):
"""Ensures input `PCollection` used as a side inputs has a `KV` type.
TODO(BEAM-115): Once Python SDK is compatible with the new Runner API,
we could directly replace the coder instead of mutating the element type.
"""
def visit_transform(self, transform_node):
if isinstance(transform_node.transform, ParDo):
new_side_inputs = []
for ix, side_input in enumerate(transform_node.side_inputs):
access_pattern = side_input._side_input_data().access_pattern
if access_pattern == common_urns.side_inputs.ITERABLE.urn:
if use_unified_worker:
# TODO(BEAM-9173): Stop patching up the access pattern to
# appease Dataflow when using the UW and hardcode the output
# type to be Any since the Dataflow JSON and pipeline proto
# can differ in coders which leads to encoding/decoding issues
# within the runner.
side_input.pvalue.element_type = typehints.Any
new_side_input = _DataflowIterableSideInput(side_input)
else:
# Add a map to ('', value) as Dataflow currently only handles
# keyed side inputs when using the JRH.
pipeline = side_input.pvalue.pipeline
new_side_input = _DataflowIterableAsMultimapSideInput(
side_input)
new_side_input.pvalue = beam.pvalue.PCollection(
pipeline,
element_type=typehints.KV[bytes,
side_input.pvalue.element_type],
is_bounded=side_input.pvalue.is_bounded)
parent = transform_node.parent or pipeline._root_transform()
map_to_void_key = beam.pipeline.AppliedPTransform(
pipeline,
beam.Map(lambda x: (b'', x)),
transform_node.full_label + '/MapToVoidKey%s' % ix,
(side_input.pvalue, ))
new_side_input.pvalue.producer = map_to_void_key
map_to_void_key.add_output(new_side_input.pvalue, None)
parent.add_part(map_to_void_key)
elif access_pattern == common_urns.side_inputs.MULTIMAP.urn:
# Ensure the input coder is a KV coder and patch up the
# access pattern to appease Dataflow.
side_input.pvalue.element_type = typehints.coerce_to_kv_type(
side_input.pvalue.element_type, transform_node.full_label)
new_side_input = _DataflowMultimapSideInput(side_input)
else:
raise ValueError(
'Unsupported access pattern for %r: %r' %
(transform_node.full_label, access_pattern))
new_side_inputs.append(new_side_input)
transform_node.side_inputs = new_side_inputs
transform_node.transform.side_inputs = new_side_inputs
return SideInputVisitor()
@staticmethod
def flatten_input_visitor():
# Imported here to avoid circular dependencies.
from apache_beam.pipeline import PipelineVisitor
class FlattenInputVisitor(PipelineVisitor):
"""A visitor that replaces the element type for input ``PCollections``s of
a ``Flatten`` transform with that of the output ``PCollection``.
"""
def visit_transform(self, transform_node):
# Imported here to avoid circular dependencies.
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam import Flatten
if isinstance(transform_node.transform, Flatten):
output_pcoll = DataflowRunner._only_element(
transform_node.outputs.values())
for input_pcoll in transform_node.inputs:
input_pcoll.element_type = output_pcoll.element_type
return FlattenInputVisitor()
def _check_for_unsupported_fnapi_features(self, pipeline_proto):
components = pipeline_proto.components
for windowing_strategy in components.windowing_strategies.values():
if (windowing_strategy.merge_status ==
beam_runner_api_pb2.MergeStatus.NEEDS_MERGE and
windowing_strategy.window_fn.urn not in (
common_urns.session_windows.urn, )):
raise RuntimeError(
'Unsupported merging windowing strategy: %s' %
windowing_strategy.window_fn.urn)
elif components.coders[
windowing_strategy.window_coder_id].spec.urn not in (
common_urns.coders.GLOBAL_WINDOW.urn,
common_urns.coders.INTERVAL_WINDOW.urn):
raise RuntimeError(
'Unsupported window coder %s for window fn %s' % (
components.coders[windowing_strategy.window_coder_id].spec.urn,
windowing_strategy.window_fn.urn))
def run_pipeline(self, pipeline, options):
"""Remotely executes entire pipeline or parts reachable from node."""
# Label goog-dataflow-notebook if job is started from notebook.
if is_in_notebook():
notebook_version = (
'goog-dataflow-notebook=' +
beam.version.__version__.replace('.', '_'))
if options.view_as(GoogleCloudOptions).labels:
options.view_as(GoogleCloudOptions).labels.append(notebook_version)
else:
options.view_as(GoogleCloudOptions).labels = [notebook_version]
# Import here to avoid adding the dependency for local running scenarios.
try:
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.runners.dataflow.internal import apiclient
except ImportError:
raise ImportError(
'Google Cloud Dataflow runner not available, '
'please install apache_beam[gcp]')
self._maybe_add_unified_worker_missing_options(options)
# Convert all side inputs into a form acceptable to Dataflow.
if apiclient._use_fnapi(options):
pipeline.visit(
self.side_input_visitor(apiclient._use_unified_worker(options)))
# Performing configured PTransform overrides. Note that this is currently
# done before Runner API serialization, since the new proto needs to contain
# any added PTransforms.
pipeline.replace_all(DataflowRunner._PTRANSFORM_OVERRIDES)
if (apiclient._use_fnapi(options) and
not apiclient._use_unified_worker(options)):
pipeline.replace_all(DataflowRunner._JRH_PTRANSFORM_OVERRIDES)
use_fnapi = apiclient._use_fnapi(options)
from apache_beam.transforms import environments
default_environment = environments.DockerEnvironment.from_container_image(
apiclient.get_container_image_from_options(options))
# Snapshot the pipeline in a portable proto.
self.proto_pipeline, self.proto_context = pipeline.to_runner_api(
return_context=True, default_environment=default_environment)
if use_fnapi:
self._check_for_unsupported_fnapi_features(self.proto_pipeline)
# Cross language transform require using a pipeline object constructed
# from the full pipeline proto to make sure that expanded version of
# external transforms are reflected in the Pipeline job graph.
from apache_beam import Pipeline
pipeline = Pipeline.from_runner_api(
self.proto_pipeline,
pipeline.runner,
options,
allow_proto_holders=True)
# Pipelines generated from proto do not have output set to PDone set for
# leaf elements.
pipeline.visit(self._set_pdone_visitor(pipeline))
# We need to generate a new context that maps to the new pipeline object.
self.proto_pipeline, self.proto_context = pipeline.to_runner_api(
return_context=True, default_environment=default_environment)
else:
# Performing configured PTransform overrides which should not be reflected
# in the proto representation of the graph.
pipeline.replace_all(DataflowRunner._NON_PORTABLE_PTRANSFORM_OVERRIDES)
# Add setup_options for all the BeamPlugin imports
setup_options = options.view_as(SetupOptions)
plugins = BeamPlugin.get_all_plugin_paths()
if setup_options.beam_plugins is not None:
plugins = list(set(plugins + setup_options.beam_plugins))
setup_options.beam_plugins = plugins
# Elevate "min_cpu_platform" to pipeline option, but using the existing
# experiment.
debug_options = options.view_as(DebugOptions)
worker_options = options.view_as(WorkerOptions)
if worker_options.min_cpu_platform:
debug_options.add_experiment(
'min_cpu_platform=' + worker_options.min_cpu_platform)
# Elevate "enable_streaming_engine" to pipeline option, but using the
# existing experiment.
google_cloud_options = options.view_as(GoogleCloudOptions)
if google_cloud_options.enable_streaming_engine:
debug_options.add_experiment("enable_windmill_service")
debug_options.add_experiment("enable_streaming_engine")
else:
if (debug_options.lookup_experiment("enable_windmill_service") or
debug_options.lookup_experiment("enable_streaming_engine")):
raise ValueError(
"""Streaming engine both disabled and enabled:
enable_streaming_engine flag is not set, but enable_windmill_service
and/or enable_streaming_engine experiments are present.
It is recommended you only set the enable_streaming_engine flag.""")
dataflow_worker_jar = getattr(worker_options, 'dataflow_worker_jar', None)
if dataflow_worker_jar is not None:
if not apiclient._use_fnapi(options):
_LOGGER.warning(
'Typical end users should not use this worker jar feature. '
'It can only be used when FnAPI is enabled.')
else:
debug_options.add_experiment('use_staged_dataflow_worker_jar')
# Make Dataflow workers use FastAvro on Python 3 unless use_avro experiment
# is set. Note that use_avro is only interpreted by the Dataflow runner
# at job submission and is not interpreted by Dataflow service or workers,
# which by default use avro library unless use_fastavro experiment is set.
if sys.version_info[0] > 2 and (
not debug_options.lookup_experiment('use_avro')):
debug_options.add_experiment('use_fastavro')
self.job = apiclient.Job(options, self.proto_pipeline)
# Dataflow runner requires a KV type for GBK inputs, hence we enforce that
# here.
pipeline.visit(self.group_by_key_input_visitor())
# Dataflow runner requires output type of the Flatten to be the same as the
# inputs, hence we enforce that here.
pipeline.visit(self.flatten_input_visitor())
# Trigger a traversal of all reachable nodes.
self.visit_transforms(pipeline, options)
test_options = options.view_as(TestOptions)
# If it is a dry run, return without submitting the job.
if test_options.dry_run:
result = PipelineResult(PipelineState.DONE)
result.wait_until_finish = lambda duration=None: None
return result
# Get a Dataflow API client and set its options
self.dataflow_client = apiclient.DataflowApplicationClient(options)
# Create the job description and send a request to the service. The result
# can be None if there is no need to send a request to the service (e.g.
# template creation). If a request was sent and failed then the call will
# raise an exception.
result = DataflowPipelineResult(
self.dataflow_client.create_job(self.job), self)
# TODO(BEAM-4274): Circular import runners-metrics. Requires refactoring.
from apache_beam.runners.dataflow.dataflow_metrics import DataflowMetrics
self._metrics = DataflowMetrics(self.dataflow_client, result, self.job)
result.metric_results = self._metrics
return result
def _maybe_add_unified_worker_missing_options(self, options):
# set default beam_fn_api and use_beam_bq_sink experiment if use unified
# worker experiment flag exists, no-op otherwise.
debug_options = options.view_as(DebugOptions)
from apache_beam.runners.dataflow.internal import apiclient
if apiclient._use_unified_worker(options):
if not debug_options.lookup_experiment('beam_fn_api'):
debug_options.add_experiment('beam_fn_api')
if not debug_options.lookup_experiment('use_beam_bq_sink'):
debug_options.add_experiment('use_beam_bq_sink')
def _get_typehint_based_encoding(self, typehint, window_coder):
"""Returns an encoding based on a typehint object."""
return self._get_cloud_encoding(
self._get_coder(typehint, window_coder=window_coder))
@staticmethod
def _get_coder(typehint, window_coder):
"""Returns a coder based on a typehint object."""
if window_coder:
return coders.WindowedValueCoder(
coders.registry.get_coder(typehint), window_coder=window_coder)
return coders.registry.get_coder(typehint)
def _get_cloud_encoding(self, coder, unused=None):
"""Returns an encoding based on a coder object."""
if not isinstance(coder, coders.Coder):
raise TypeError(
'Coder object must inherit from coders.Coder: %s.' % str(coder))
return coder.as_cloud_object(self.proto_context.coders)
def _get_side_input_encoding(self, input_encoding):
"""Returns an encoding for the output of a view transform.
Args:
input_encoding: encoding of current transform's input. Side inputs need
this because the service will check that input and output types match.
Returns:
An encoding that matches the output and input encoding. This is essential
for the View transforms introduced to produce side inputs to a ParDo.
"""
return {
'@type': 'kind:stream',
'component_encodings': [input_encoding],
'is_stream_like': {
'value': True
},
}
def _get_encoded_output_coder(
self, transform_node, window_value=True, output_tag=None):
"""Returns the cloud encoding of the coder for the output of a transform."""
is_external_transform = isinstance(
transform_node.transform, RunnerAPIPTransformHolder)
if output_tag in transform_node.outputs:
element_type = transform_node.outputs[output_tag].element_type
elif len(transform_node.outputs) == 1:
output_tag = DataflowRunner._only_element(transform_node.outputs.keys())
# TODO(robertwb): Handle type hints for multi-output transforms.
element_type = transform_node.outputs[output_tag].element_type
elif is_external_transform:
raise ValueError(
'For external transforms, output_tag must be specified '
'since we cannot fallback to a Python only coder.')
else:
# TODO(silviuc): Remove this branch (and assert) when typehints are
# propagated everywhere. Returning an 'Any' as type hint will trigger
# usage of the fallback coder (i.e., cPickler).
element_type = typehints.Any
if window_value:
# All outputs have the same windowing. So getting the coder from an
# arbitrary window is fine.
output_tag = next(iter(transform_node.outputs.keys()))
window_coder = (
transform_node.outputs[output_tag].windowing.windowfn.
get_window_coder())
else:
window_coder = None
return self._get_typehint_based_encoding(element_type, window_coder)
def _add_step(self, step_kind, step_label, transform_node, side_tags=()):
"""Creates a Step object and adds it to the cache."""
# Import here to avoid adding the dependency for local running scenarios.
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.runners.dataflow.internal import apiclient
step = apiclient.Step(step_kind, self._get_unique_step_name())
self.job.proto.steps.append(step.proto)
step.add_property(PropertyNames.USER_NAME, step_label)
# Cache the node/step association for the main output of the transform node.
# Main output key of external transforms can be ambiguous, so we only tag if
# there's only one tag instead of None.
output_tag = (
DataflowRunner._only_element(transform_node.outputs.keys()) if len(
transform_node.outputs.keys()) == 1 else None)
self._cache.cache_output(transform_node, output_tag, step)
# If side_tags is not () then this is a multi-output transform node and we
# need to cache the (node, tag, step) for each of the tags used to access
# the outputs. This is essential because the keys used to search in the
# cache always contain the tag.
for tag in side_tags:
self._cache.cache_output(transform_node, tag, step)
# Finally, we add the display data items to the pipeline step.
# If the transform contains no display data then an empty list is added.
step.add_property(
PropertyNames.DISPLAY_DATA,
[
item.get_dict()
for item in DisplayData.create_from(transform_node.transform).items
])
return step
def _add_singleton_step(
self,
label,
full_label,
tag,
input_step,
windowing_strategy,
access_pattern):
"""Creates a CollectionToSingleton step used to handle ParDo side inputs."""
# Import here to avoid adding the dependency for local running scenarios.
from apache_beam.runners.dataflow.internal import apiclient
step = apiclient.Step(TransformNames.COLLECTION_TO_SINGLETON, label)
self.job.proto.steps.append(step.proto)
step.add_property(PropertyNames.USER_NAME, full_label)
step.add_property(
PropertyNames.PARALLEL_INPUT,
{
'@type': 'OutputReference',
PropertyNames.STEP_NAME: input_step.proto.name,
PropertyNames.OUTPUT_NAME: input_step.get_output(tag)
})
step.encoding = self._get_side_input_encoding(input_step.encoding)
output_info = {
PropertyNames.USER_NAME: '%s.%s' % (full_label, PropertyNames.OUTPUT),
PropertyNames.ENCODING: step.encoding,
PropertyNames.OUTPUT_NAME: PropertyNames.OUT
}
if common_urns.side_inputs.MULTIMAP.urn == access_pattern:
output_info[PropertyNames.USE_INDEXED_FORMAT] = True
step.add_property(PropertyNames.OUTPUT_INFO, [output_info])
step.add_property(
PropertyNames.WINDOWING_STRATEGY,
self.serialize_windowing_strategy(windowing_strategy))
return step
def run_Impulse(self, transform_node, options):
standard_options = options.view_as(StandardOptions)
debug_options = options.view_as(DebugOptions)
use_fn_api = (
debug_options.experiments and
'beam_fn_api' in debug_options.experiments)
use_streaming_engine = (
debug_options.experiments and
'enable_streaming_engine' in debug_options.experiments and
'enable_windmill_service' in debug_options.experiments)
step = self._add_step(
TransformNames.READ, transform_node.full_label, transform_node)
if (standard_options.streaming and
(not use_fn_api or not use_streaming_engine)):
step.add_property(PropertyNames.FORMAT, 'pubsub')
step.add_property(PropertyNames.PUBSUB_SUBSCRIPTION, '_starting_signal/')
else:
step.add_property(PropertyNames.FORMAT, 'impulse')
encoded_impulse_element = coders.WindowedValueCoder(
coders.BytesCoder(),
coders.coders.GlobalWindowCoder()).get_impl().encode_nested(
window.GlobalWindows.windowed_value(b''))
if use_fn_api:
encoded_impulse_as_str = self.byte_array_to_json_string(
encoded_impulse_element)
else:
encoded_impulse_as_str = base64.b64encode(
encoded_impulse_element).decode('ascii')
step.add_property(PropertyNames.IMPULSE_ELEMENT, encoded_impulse_as_str)
step.encoding = self._get_encoded_output_coder(transform_node)
step.add_property(
PropertyNames.OUTPUT_INFO,
[{
PropertyNames.USER_NAME: (
'%s.%s' % (transform_node.full_label, PropertyNames.OUT)),
PropertyNames.ENCODING: step.encoding,
PropertyNames.OUTPUT_NAME: PropertyNames.OUT
}])
def run_Flatten(self, transform_node, options):
step = self._add_step(
TransformNames.FLATTEN, transform_node.full_label, transform_node)
inputs = []
for one_input in transform_node.inputs:
input_step = self._cache.get_pvalue(one_input)
inputs.append({
'@type': 'OutputReference',
PropertyNames.STEP_NAME: input_step.proto.name,
PropertyNames.OUTPUT_NAME: input_step.get_output(one_input.tag)
})
step.add_property(PropertyNames.INPUTS, inputs)
step.encoding = self._get_encoded_output_coder(transform_node)
step.add_property(
PropertyNames.OUTPUT_INFO,
[{
PropertyNames.USER_NAME: (
'%s.%s' % (transform_node.full_label, PropertyNames.OUT)),
PropertyNames.ENCODING: step.encoding,
PropertyNames.OUTPUT_NAME: PropertyNames.OUT
}])
def apply_WriteToBigQuery(self, transform, pcoll, options):
# Make sure this is the WriteToBigQuery class that we expected, and that
# users did not specifically request the new BQ sink by passing experiment
# flag.
# TODO(BEAM-6928): Remove this function for release 2.14.0.
experiments = options.view_as(DebugOptions).experiments or []
from apache_beam.runners.dataflow.internal import apiclient
use_fnapi = apiclient._use_fnapi(options)
if (not isinstance(transform, beam.io.WriteToBigQuery) or use_fnapi or
'use_beam_bq_sink' in experiments):
return self.apply_PTransform(transform, pcoll, options)
if transform.schema == beam.io.gcp.bigquery.SCHEMA_AUTODETECT:
raise RuntimeError(
'Schema auto-detection is not supported on the native sink')
standard_options = options.view_as(StandardOptions)
if standard_options.streaming:
if (transform.write_disposition ==
beam.io.BigQueryDisposition.WRITE_TRUNCATE):
raise RuntimeError('Can not use write truncation mode in streaming')
return self.apply_PTransform(transform, pcoll, options)
else:
from apache_beam.io.gcp.bigquery_tools import parse_table_schema_from_json
schema = None
if transform.schema:
schema = parse_table_schema_from_json(json.dumps(transform.schema))
return pcoll | 'WriteToBigQuery' >> beam.io.Write(
beam.io.BigQuerySink(
transform.table_reference.tableId,
transform.table_reference.datasetId,
transform.table_reference.projectId,
schema,
transform.create_disposition,
transform.write_disposition,
kms_key=transform.kms_key))
def apply_GroupByKey(self, transform, pcoll, options):
# Infer coder of parent.
#
# TODO(ccy): make Coder inference and checking less specialized and more
# comprehensive.
parent = pcoll.producer
if parent:
coder = parent.transform._infer_output_coder() # pylint: disable=protected-access
if not coder:
coder = self._get_coder(pcoll.element_type or typehints.Any, None)
if not coder.is_kv_coder():
raise ValueError((
'Coder for the GroupByKey operation "%s" is not a '
'key-value coder: %s.') % (transform.label, coder))
# TODO(robertwb): Update the coder itself if it changed.
coders.registry.verify_deterministic(
coder.key_coder(), 'GroupByKey operation "%s"' % transform.label)
return pvalue.PCollection.from_(pcoll)
def run_GroupByKey(self, transform_node, options):
input_tag = transform_node.inputs[0].tag
input_step = self._cache.get_pvalue(transform_node.inputs[0])
step = self._add_step(
TransformNames.GROUP, transform_node.full_label, transform_node)
step.add_property(
PropertyNames.PARALLEL_INPUT,
{
'@type': 'OutputReference',
PropertyNames.STEP_NAME: input_step.proto.name,
PropertyNames.OUTPUT_NAME: input_step.get_output(input_tag)
})
step.encoding = self._get_encoded_output_coder(transform_node)
step.add_property(
PropertyNames.OUTPUT_INFO,
[{
PropertyNames.USER_NAME: (
'%s.%s' % (transform_node.full_label, PropertyNames.OUT)),
PropertyNames.ENCODING: step.encoding,
PropertyNames.OUTPUT_NAME: PropertyNames.OUT
}])
windowing = transform_node.transform.get_windowing(transform_node.inputs)
step.add_property(
PropertyNames.SERIALIZED_FN,
self.serialize_windowing_strategy(windowing))
def run_RunnerAPIPTransformHolder(self, transform_node, options):
"""Adding Dataflow runner job description for transform holder objects.
These holder transform objects are generated for some of the transforms that
become available after a cross-language transform expansion, usually if the
corresponding transform object cannot be generated in Python SDK (for
example, a python `ParDo` transform cannot be generated without a serialized
Python `DoFn` object).
"""
urn = transform_node.transform.proto().urn
assert urn
# TODO(chamikara): support other transforms that requires holder objects in
# Python SDk.
if common_urns.primitives.PAR_DO.urn == urn:
self.run_ParDo(transform_node, options)
else:
raise NotImplementedError(
'%s uses unsupported URN: %s' % (transform_node.full_label, urn))
def run_ParDo(self, transform_node, options):
transform = transform_node.transform
input_tag = transform_node.inputs[0].tag
input_step = self._cache.get_pvalue(transform_node.inputs[0])
is_external_transform = isinstance(transform, RunnerAPIPTransformHolder)
# Attach side inputs.
si_dict = {}
all_input_labels = transform_node.input_tags_to_preserve
si_labels = {}
full_label_counts = defaultdict(int)
lookup_label = lambda side_pval: si_labels[side_pval]
named_inputs = transform_node.named_inputs()
label_renames = {}
for ix, side_pval in enumerate(transform_node.side_inputs):
assert isinstance(side_pval, AsSideInput)
step_name = 'SideInput-' + self._get_unique_step_name()
si_label = ((SIDE_INPUT_PREFIX + '%d-%s') %
(ix, transform_node.full_label)
if side_pval.pvalue not in all_input_labels else
all_input_labels[side_pval.pvalue])
old_label = (SIDE_INPUT_PREFIX + '%d') % ix
if not is_external_transform:
label_renames[old_label] = si_label
assert old_label in named_inputs
pcollection_label = '%s.%s' % (
side_pval.pvalue.producer.full_label.split('/')[-1],
side_pval.pvalue.tag if side_pval.pvalue.tag else 'out')
si_full_label = '%s/%s(%s.%s)' % (
transform_node.full_label,
side_pval.__class__.__name__,
pcollection_label,
full_label_counts[pcollection_label])
# Count the number of times the same PCollection is a side input
# to the same ParDo.
full_label_counts[pcollection_label] += 1
self._add_singleton_step(
step_name,
si_full_label,
side_pval.pvalue.tag,
self._cache.get_pvalue(side_pval.pvalue),
side_pval.pvalue.windowing,
side_pval._side_input_data().access_pattern)
si_dict[si_label] = {
'@type': 'OutputReference',
PropertyNames.STEP_NAME: step_name,
PropertyNames.OUTPUT_NAME: PropertyNames.OUT
}
si_labels[side_pval] = si_label
# Now create the step for the ParDo transform being handled.
transform_name = transform_node.full_label.rsplit('/', 1)[-1]
step = self._add_step(
TransformNames.DO,
transform_node.full_label +
('/{}'.format(transform_name) if transform_node.side_inputs else ''),
transform_node,
transform_node.transform.output_tags)
# Import here to avoid adding the dependency for local running scenarios.
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.runners.dataflow.internal import apiclient
transform_proto = self.proto_context.transforms.get_proto(transform_node)
transform_id = self.proto_context.transforms.get_id(transform_node)
use_fnapi = apiclient._use_fnapi(options)
use_unified_worker = apiclient._use_unified_worker(options)
# The data transmitted in SERIALIZED_FN is different depending on whether
# this is a fnapi pipeline or not.
if (use_fnapi and
(transform_proto.spec.urn == common_urns.primitives.PAR_DO.urn or
use_unified_worker)):
# Patch side input ids to be unique across a given pipeline.
if (label_renames and
transform_proto.spec.urn == common_urns.primitives.PAR_DO.urn):
# Patch PTransform proto.
for old, new in iteritems(label_renames):
transform_proto.inputs[new] = transform_proto.inputs[old]
del transform_proto.inputs[old]
# Patch ParDo proto.
proto_type, _ = beam.PTransform._known_urns[transform_proto.spec.urn]
proto = proto_utils.parse_Bytes(
transform_proto.spec.payload, proto_type)
for old, new in iteritems(label_renames):
proto.side_inputs[new].CopyFrom(proto.side_inputs[old])
del proto.side_inputs[old]
transform_proto.spec.payload = proto.SerializeToString()
# We need to update the pipeline proto.
del self.proto_pipeline.components.transforms[transform_id]
(
self.proto_pipeline.components.transforms[transform_id].CopyFrom(
transform_proto))
serialized_data = transform_id
else:
serialized_data = pickler.dumps(
self._pardo_fn_data(transform_node, lookup_label))
step.add_property(PropertyNames.SERIALIZED_FN, serialized_data)
# TODO(BEAM-8882): Enable once dataflow service doesn't reject this.
# step.add_property(PropertyNames.PIPELINE_PROTO_TRANSFORM_ID, transform_id)
step.add_property(
PropertyNames.PARALLEL_INPUT,
{
'@type': 'OutputReference',
PropertyNames.STEP_NAME: input_step.proto.name,
PropertyNames.OUTPUT_NAME: input_step.get_output(input_tag)
})
# Add side inputs if any.
step.add_property(PropertyNames.NON_PARALLEL_INPUTS, si_dict)
# Generate description for the outputs. The output names
# will be 'None' for main output and '<tag>' for a tagged output.
outputs = []
all_output_tags = transform_proto.outputs.keys()
# Some external transforms require output tags to not be modified.
# So we randomly select one of the output tags as the main output and
# leave others as side outputs. Transform execution should not change
# dependending on which output tag we choose as the main output here.
# Also, some SDKs do not work correctly if output tags are modified. So for
# external transforms, we leave tags unmodified.
#
# Python SDK uses 'None' as the tag of the main output.
main_output_tag = (all_output_tags[0] if is_external_transform else 'None')
step.encoding = self._get_encoded_output_coder(
transform_node, output_tag=main_output_tag)
side_output_tags = set(all_output_tags).difference({main_output_tag})
# Add the main output to the description.
outputs.append({
PropertyNames.USER_NAME: (
'%s.%s' % (transform_node.full_label, PropertyNames.OUT)),
PropertyNames.ENCODING: step.encoding,
PropertyNames.OUTPUT_NAME: main_output_tag
})
for side_tag in side_output_tags:
# The assumption here is that all outputs will have the same typehint
# and coder as the main output. This is certainly the case right now
# but conceivably it could change in the future.
encoding = self._get_encoded_output_coder(
transform_node, output_tag=side_tag)
outputs.append({
PropertyNames.USER_NAME: (
'%s.%s' % (transform_node.full_label, side_tag)),
PropertyNames.ENCODING: encoding,
PropertyNames.OUTPUT_NAME: side_tag
})
step.add_property(PropertyNames.OUTPUT_INFO, outputs)
# Add the restriction encoding if we are a splittable DoFn
# and are using the Fn API on the unified worker.
restriction_coder = transform.get_restriction_coder()
if restriction_coder:
step.add_property(
PropertyNames.RESTRICTION_ENCODING,
self._get_cloud_encoding(restriction_coder))
if options.view_as(StandardOptions).streaming:
is_stateful_dofn = (
transform.is_pardo_with_stateful_dofn if is_external_transform else
DoFnSignature(transform.dofn).is_stateful_dofn())
if is_stateful_dofn:
step.add_property(PropertyNames.USES_KEYED_STATE, 'true')
@staticmethod
def _pardo_fn_data(transform_node, get_label):
transform = transform_node.transform
si_tags_and_types = [ # pylint: disable=protected-access
(get_label(side_pval), side_pval.__class__, side_pval._view_options())
for side_pval in transform_node.side_inputs]
return (
transform.fn,
transform.args,
transform.kwargs,
si_tags_and_types,
transform_node.inputs[0].windowing)
def run_CombineValuesReplacement(self, transform_node, options):
transform = transform_node.transform.transform
input_tag = transform_node.inputs[0].tag
input_step = self._cache.get_pvalue(transform_node.inputs[0])
step = self._add_step(
TransformNames.COMBINE, transform_node.full_label, transform_node)
transform_id = self.proto_context.transforms.get_id(transform_node.parent)
# The data transmitted in SERIALIZED_FN is different depending on whether
# this is a fnapi pipeline or not.
from apache_beam.runners.dataflow.internal import apiclient
use_fnapi = apiclient._use_fnapi(options)
if use_fnapi:
# Fnapi pipelines send the transform ID of the CombineValues transform's
# parent composite because Dataflow expects the ID of a CombinePerKey
# transform.
serialized_data = transform_id
else:
# Combiner functions do not take deferred side-inputs (i.e. PValues) and
# therefore the code to handle extra args/kwargs is simpler than for the
# DoFn's of the ParDo transform. In the last, empty argument is where
# side inputs information would go.
serialized_data = pickler.dumps(
(transform.fn, transform.args, transform.kwargs, ()))
step.add_property(PropertyNames.SERIALIZED_FN, serialized_data)
# TODO(BEAM-8882): Enable once dataflow service doesn't reject this.
# step.add_property(PropertyNames.PIPELINE_PROTO_TRANSFORM_ID, transform_id)
step.add_property(
PropertyNames.PARALLEL_INPUT,
{
'@type': 'OutputReference',
PropertyNames.STEP_NAME: input_step.proto.name,
PropertyNames.OUTPUT_NAME: input_step.get_output(input_tag)
})
# Note that the accumulator must not have a WindowedValue encoding, while
# the output of this step does in fact have a WindowedValue encoding.
accumulator_encoding = self._get_cloud_encoding(
transform.fn.get_accumulator_coder())
output_encoding = self._get_encoded_output_coder(transform_node)
step.encoding = output_encoding
step.add_property(PropertyNames.ENCODING, accumulator_encoding)
# Generate description for main output 'out.'
outputs = []
# Add the main output to the description.
outputs.append({
PropertyNames.USER_NAME: (
'%s.%s' % (transform_node.full_label, PropertyNames.OUT)),
PropertyNames.ENCODING: step.encoding,
PropertyNames.OUTPUT_NAME: PropertyNames.OUT
})
step.add_property(PropertyNames.OUTPUT_INFO, outputs)
def apply_Read(self, transform, pbegin, options):
if hasattr(transform.source, 'format'):
# Consider native Read to be a primitive for dataflow.
return beam.pvalue.PCollection.from_(pbegin)
else:
return self.apply_PTransform(transform, pbegin, options)
def run_Read(self, transform_node, options):
transform = transform_node.transform
step = self._add_step(
TransformNames.READ, transform_node.full_label, transform_node)
# TODO(mairbek): refactor if-else tree to use registerable functions.
# Initialize the source specific properties.
standard_options = options.view_as(StandardOptions)
if not hasattr(transform.source, 'format'):
# If a format is not set, we assume the source to be a custom source.
source_dict = {}
source_dict['spec'] = {
'@type': names.SOURCE_TYPE,
names.SERIALIZED_SOURCE_KEY: pickler.dumps(transform.source)
}
try:
source_dict['metadata'] = {
'estimated_size_bytes': json_value.get_typed_value_descriptor(
transform.source.estimate_size())
}
except error.RuntimeValueProviderError:
# Size estimation is best effort, and this error is by value provider.
_LOGGER.info(
'Could not estimate size of source %r due to ' + \
'RuntimeValueProviderError', transform.source)
except Exception: # pylint: disable=broad-except
# Size estimation is best effort. So we log the error and continue.
_LOGGER.info(
'Could not estimate size of source %r due to an exception: %s',
transform.source,
traceback.format_exc())
step.add_property(PropertyNames.SOURCE_STEP_INPUT, source_dict)
elif transform.source.format == 'text':
step.add_property(PropertyNames.FILE_PATTERN, transform.source.path)
elif transform.source.format == 'bigquery':
if standard_options.streaming:
raise ValueError(
'BigQuery source is not currently available for use '
'in streaming pipelines.')
debug_options = options.view_as(DebugOptions)
use_fn_api = (
debug_options.experiments and
'beam_fn_api' in debug_options.experiments)
if use_fn_api:
raise ValueError(BQ_SOURCE_UW_ERROR)
step.add_property(PropertyNames.BIGQUERY_EXPORT_FORMAT, 'FORMAT_AVRO')
# TODO(silviuc): Add table validation if transform.source.validate.
if transform.source.table_reference is not None:
step.add_property(
PropertyNames.BIGQUERY_DATASET,
transform.source.table_reference.datasetId)
step.add_property(
PropertyNames.BIGQUERY_TABLE,
transform.source.table_reference.tableId)
# If project owning the table was not specified then the project owning
# the workflow (current project) will be used.
if transform.source.table_reference.projectId is not None:
step.add_property(
PropertyNames.BIGQUERY_PROJECT,
transform.source.table_reference.projectId)
elif transform.source.query is not None:
step.add_property(PropertyNames.BIGQUERY_QUERY, transform.source.query)
step.add_property(
PropertyNames.BIGQUERY_USE_LEGACY_SQL,
transform.source.use_legacy_sql)
step.add_property(
PropertyNames.BIGQUERY_FLATTEN_RESULTS,
transform.source.flatten_results)
else:
raise ValueError(
'BigQuery source %r must specify either a table or'
' a query' % transform.source)
if transform.source.kms_key is not None:
step.add_property(
PropertyNames.BIGQUERY_KMS_KEY, transform.source.kms_key)
elif transform.source.format == 'pubsub':
if not standard_options.streaming:
raise ValueError(
'Cloud Pub/Sub is currently available for use '
'only in streaming pipelines.')
# Only one of topic or subscription should be set.
if transform.source.full_subscription:
step.add_property(
PropertyNames.PUBSUB_SUBSCRIPTION,
transform.source.full_subscription)
elif transform.source.full_topic:
step.add_property(
PropertyNames.PUBSUB_TOPIC, transform.source.full_topic)
if transform.source.id_label:
step.add_property(
PropertyNames.PUBSUB_ID_LABEL, transform.source.id_label)
if transform.source.with_attributes:
# Setting this property signals Dataflow runner to return full
# PubsubMessages instead of just the data part of the payload.
step.add_property(PropertyNames.PUBSUB_SERIALIZED_ATTRIBUTES_FN, '')
if transform.source.timestamp_attribute is not None:
step.add_property(
PropertyNames.PUBSUB_TIMESTAMP_ATTRIBUTE,
transform.source.timestamp_attribute)
else:
raise ValueError(
'Source %r has unexpected format %s.' %
(transform.source, transform.source.format))
if not hasattr(transform.source, 'format'):
step.add_property(PropertyNames.FORMAT, names.SOURCE_FORMAT)
else:
step.add_property(PropertyNames.FORMAT, transform.source.format)
# Wrap coder in WindowedValueCoder: this is necessary as the encoding of a
# step should be the type of value outputted by each step. Read steps
# automatically wrap output values in a WindowedValue wrapper, if necessary.
# This is also necessary for proper encoding for size estimation.
# Using a GlobalWindowCoder as a place holder instead of the default
# PickleCoder because GlobalWindowCoder is known coder.
# TODO(robertwb): Query the collection for the windowfn to extract the
# correct coder.
coder = coders.WindowedValueCoder(
coders.registry.get_coder(transform_node.outputs[None].element_type),
coders.coders.GlobalWindowCoder())
step.encoding = self._get_cloud_encoding(coder)
step.add_property(
PropertyNames.OUTPUT_INFO,
[{
PropertyNames.USER_NAME: (
'%s.%s' % (transform_node.full_label, PropertyNames.OUT)),
PropertyNames.ENCODING: step.encoding,
PropertyNames.OUTPUT_NAME: PropertyNames.OUT
}])
def run__NativeWrite(self, transform_node, options):
transform = transform_node.transform
input_tag = transform_node.inputs[0].tag
input_step = self._cache.get_pvalue(transform_node.inputs[0])
step = self._add_step(
TransformNames.WRITE, transform_node.full_label, transform_node)
# TODO(mairbek): refactor if-else tree to use registerable functions.
# Initialize the sink specific properties.
if transform.sink.format == 'text':
# Note that it is important to use typed properties (@type/value dicts)
# for non-string properties and also for empty strings. For example,
# in the code below the num_shards must have type and also
# file_name_suffix and shard_name_template (could be empty strings).
step.add_property(
PropertyNames.FILE_NAME_PREFIX,
transform.sink.file_name_prefix,
with_type=True)
step.add_property(
PropertyNames.FILE_NAME_SUFFIX,
transform.sink.file_name_suffix,
with_type=True)
step.add_property(
PropertyNames.SHARD_NAME_TEMPLATE,
transform.sink.shard_name_template,
with_type=True)
if transform.sink.num_shards > 0:
step.add_property(
PropertyNames.NUM_SHARDS, transform.sink.num_shards, with_type=True)
# TODO(silviuc): Implement sink validation.
step.add_property(PropertyNames.VALIDATE_SINK, False, with_type=True)
elif transform.sink.format == 'bigquery':
# TODO(silviuc): Add table validation if transform.sink.validate.
step.add_property(
PropertyNames.BIGQUERY_DATASET,
transform.sink.table_reference.datasetId)
step.add_property(
PropertyNames.BIGQUERY_TABLE, transform.sink.table_reference.tableId)
# If project owning the table was not specified then the project owning
# the workflow (current project) will be used.
if transform.sink.table_reference.projectId is not None:
step.add_property(
PropertyNames.BIGQUERY_PROJECT,
transform.sink.table_reference.projectId)
step.add_property(
PropertyNames.BIGQUERY_CREATE_DISPOSITION,
transform.sink.create_disposition)
step.add_property(
PropertyNames.BIGQUERY_WRITE_DISPOSITION,
transform.sink.write_disposition)
if transform.sink.table_schema is not None:
step.add_property(
PropertyNames.BIGQUERY_SCHEMA, transform.sink.schema_as_json())
if transform.sink.kms_key is not None:
step.add_property(
PropertyNames.BIGQUERY_KMS_KEY, transform.sink.kms_key)
elif transform.sink.format == 'pubsub':
standard_options = options.view_as(StandardOptions)
if not standard_options.streaming:
raise ValueError(
'Cloud Pub/Sub is currently available for use '
'only in streaming pipelines.')
step.add_property(PropertyNames.PUBSUB_TOPIC, transform.sink.full_topic)
if transform.sink.id_label:
step.add_property(
PropertyNames.PUBSUB_ID_LABEL, transform.sink.id_label)
if transform.sink.with_attributes:
# Setting this property signals Dataflow runner that the PCollection
# contains PubsubMessage objects instead of just raw data.
step.add_property(PropertyNames.PUBSUB_SERIALIZED_ATTRIBUTES_FN, '')
if transform.sink.timestamp_attribute is not None:
step.add_property(
PropertyNames.PUBSUB_TIMESTAMP_ATTRIBUTE,
transform.sink.timestamp_attribute)
else:
raise ValueError(
'Sink %r has unexpected format %s.' %
(transform.sink, transform.sink.format))
step.add_property(PropertyNames.FORMAT, transform.sink.format)
# Wrap coder in WindowedValueCoder: this is necessary for proper encoding
# for size estimation. Using a GlobalWindowCoder as a place holder instead
# of the default PickleCoder because GlobalWindowCoder is known coder.
# TODO(robertwb): Query the collection for the windowfn to extract the
# correct coder.
coder = coders.WindowedValueCoder(
transform.sink.coder, coders.coders.GlobalWindowCoder())
step.encoding = self._get_cloud_encoding(coder)
step.add_property(PropertyNames.ENCODING, step.encoding)
step.add_property(
PropertyNames.PARALLEL_INPUT,
{
'@type': 'OutputReference',
PropertyNames.STEP_NAME: input_step.proto.name,
PropertyNames.OUTPUT_NAME: input_step.get_output(input_tag)
})
def run_TestStream(self, transform_node, options):
from apache_beam.testing.test_stream import ElementEvent
from apache_beam.testing.test_stream import ProcessingTimeEvent
from apache_beam.testing.test_stream import WatermarkEvent
standard_options = options.view_as(StandardOptions)
if not standard_options.streaming:
raise ValueError(
'TestStream is currently available for use '
'only in streaming pipelines.')
transform = transform_node.transform
step = self._add_step(
TransformNames.READ, transform_node.full_label, transform_node)
step.add_property(PropertyNames.FORMAT, 'test_stream')
test_stream_payload = beam_runner_api_pb2.TestStreamPayload()
# TestStream source doesn't do any decoding of elements,
# so we won't set test_stream_payload.coder_id.
output_coder = transform._infer_output_coder() # pylint: disable=protected-access
for event in transform._events:
new_event = test_stream_payload.events.add()
if isinstance(event, ElementEvent):
for tv in event.timestamped_values:
element = new_event.element_event.elements.add()
element.encoded_element = output_coder.encode(tv.value)
element.timestamp = tv.timestamp.micros
elif isinstance(event, ProcessingTimeEvent):
new_event.processing_time_event.advance_duration = (
event.advance_by.micros)
elif isinstance(event, WatermarkEvent):
new_event.watermark_event.new_watermark = event.new_watermark.micros
serialized_payload = self.byte_array_to_json_string(
test_stream_payload.SerializeToString())
step.add_property(PropertyNames.SERIALIZED_TEST_STREAM, serialized_payload)
step.encoding = self._get_encoded_output_coder(transform_node)
step.add_property(
PropertyNames.OUTPUT_INFO,
[{
PropertyNames.USER_NAME: (
'%s.%s' % (transform_node.full_label, PropertyNames.OUT)),
PropertyNames.ENCODING: step.encoding,
PropertyNames.OUTPUT_NAME: PropertyNames.OUT
}])
# We must mark this method as not a test or else its name is a matcher for
# nosetest tests.
run_TestStream.__test__ = False # type: ignore[attr-defined]
@classmethod
def serialize_windowing_strategy(cls, windowing):
from apache_beam.runners import pipeline_context
context = pipeline_context.PipelineContext()
windowing_proto = windowing.to_runner_api(context)
return cls.byte_array_to_json_string(
beam_runner_api_pb2.MessageWithComponents(
components=context.to_runner_api(),
windowing_strategy=windowing_proto).SerializeToString())
@classmethod
def deserialize_windowing_strategy(cls, serialized_data):
# Imported here to avoid circular dependencies.
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.runners import pipeline_context
from apache_beam.transforms.core import Windowing
proto = beam_runner_api_pb2.MessageWithComponents()
proto.ParseFromString(cls.json_string_to_byte_array(serialized_data))
return Windowing.from_runner_api(
proto.windowing_strategy,
pipeline_context.PipelineContext(proto.components))
@staticmethod
def byte_array_to_json_string(raw_bytes):
"""Implements org.apache.beam.sdk.util.StringUtils.byteArrayToJsonString."""
return quote(raw_bytes)
@staticmethod
def json_string_to_byte_array(encoded_string):
"""Implements org.apache.beam.sdk.util.StringUtils.jsonStringToByteArray."""
return unquote_to_bytes(encoded_string)
def get_default_gcp_region(self):
"""Get a default value for Google Cloud region according to
https://cloud.google.com/compute/docs/gcloud-compute/#default-properties.
If no default can be found, returns None.
"""
environment_region = os.environ.get('CLOUDSDK_COMPUTE_REGION')
if environment_region:
_LOGGER.info(
'Using default GCP region %s from $CLOUDSDK_COMPUTE_REGION',
environment_region)
return environment_region
try:
cmd = ['gcloud', 'config', 'get-value', 'compute/region']
# Use subprocess.DEVNULL in Python 3.3+.
if hasattr(subprocess, 'DEVNULL'):
DEVNULL = subprocess.DEVNULL
else:
DEVNULL = open(os.devnull, 'ab')
raw_output = processes.check_output(cmd, stderr=DEVNULL)
formatted_output = raw_output.decode('utf-8').strip()
if formatted_output:
_LOGGER.info(
'Using default GCP region %s from `%s`',
formatted_output,
' '.join(cmd))
return formatted_output
except RuntimeError:
pass
return None
class _DataflowSideInput(beam.pvalue.AsSideInput):
"""Wraps a side input as a dataflow-compatible side input."""
def _view_options(self):
return {
'data': self._data,
}
def _side_input_data(self):
return self._data
class _DataflowIterableAsMultimapSideInput(_DataflowSideInput):
"""Wraps an iterable side input as dataflow-compatible side input."""
def __init__(self, side_input):
# pylint: disable=protected-access
side_input_data = side_input._side_input_data()
assert (
side_input_data.access_pattern == common_urns.side_inputs.ITERABLE.urn)
iterable_view_fn = side_input_data.view_fn
self._data = beam.pvalue.SideInputData(
common_urns.side_inputs.MULTIMAP.urn,
side_input_data.window_mapping_fn,
lambda multimap: iterable_view_fn(multimap[b'']))
class _DataflowIterableSideInput(_DataflowSideInput):
"""Wraps an iterable side input as dataflow-compatible side input."""
def __init__(self, side_input):
# pylint: disable=protected-access
self.pvalue = side_input.pvalue
side_input_data = side_input._side_input_data()
assert (
side_input_data.access_pattern == common_urns.side_inputs.ITERABLE.urn)
self._data = beam.pvalue.SideInputData(
common_urns.side_inputs.ITERABLE.urn,
side_input_data.window_mapping_fn,
side_input_data.view_fn)
class _DataflowMultimapSideInput(_DataflowSideInput):
"""Wraps a multimap side input as dataflow-compatible side input."""
def __init__(self, side_input):
# pylint: disable=protected-access
self.pvalue = side_input.pvalue
side_input_data = side_input._side_input_data()
assert (
side_input_data.access_pattern == common_urns.side_inputs.MULTIMAP.urn)
self._data = beam.pvalue.SideInputData(
common_urns.side_inputs.MULTIMAP.urn,
side_input_data.window_mapping_fn,
side_input_data.view_fn)
class DataflowPipelineResult(PipelineResult):
"""Represents the state of a pipeline run on the Dataflow service."""
def __init__(self, job, runner):
"""Initialize a new DataflowPipelineResult instance.
Args:
job: Job message from the Dataflow API. Could be :data:`None` if a job
request was not sent to Dataflow service (e.g. template jobs).
runner: DataflowRunner instance.
"""
self._job = job
self._runner = runner
self.metric_results = None
def _update_job(self):
# We need the job id to be able to update job information. There is no need
# to update the job if we are in a known terminal state.
if self.has_job and not self.is_in_terminal_state():
self._job = self._runner.dataflow_client.get_job(self.job_id())
def job_id(self):
return self._job.id
def metrics(self):
return self.metric_results
@property
def has_job(self):
return self._job is not None
def _get_job_state(self):
values_enum = dataflow_api.Job.CurrentStateValueValuesEnum
# Ordered by the enum values. Values that may be introduced in
# future versions of Dataflow API are considered UNRECOGNIZED by the SDK.
api_jobstate_map = defaultdict(
lambda: PipelineState.UNRECOGNIZED,
{
values_enum.JOB_STATE_UNKNOWN: PipelineState.UNKNOWN,
values_enum.JOB_STATE_STOPPED: PipelineState.STOPPED,
values_enum.JOB_STATE_RUNNING: PipelineState.RUNNING,
values_enum.JOB_STATE_DONE: PipelineState.DONE,
values_enum.JOB_STATE_FAILED: PipelineState.FAILED,
values_enum.JOB_STATE_CANCELLED: PipelineState.CANCELLED,
values_enum.JOB_STATE_UPDATED: PipelineState.UPDATED,
values_enum.JOB_STATE_DRAINING: PipelineState.DRAINING,
values_enum.JOB_STATE_DRAINED: PipelineState.DRAINED,
values_enum.JOB_STATE_PENDING: PipelineState.PENDING,
values_enum.JOB_STATE_CANCELLING: PipelineState.CANCELLING,
})
return (
api_jobstate_map[self._job.currentState]
if self._job.currentState else PipelineState.UNKNOWN)
@property
def state(self):
"""Return the current state of the remote job.
Returns:
A PipelineState object.
"""
if not self.has_job:
return PipelineState.UNKNOWN
self._update_job()
return self._get_job_state()
def is_in_terminal_state(self):
if not self.has_job:
return True
return PipelineState.is_terminal(self._get_job_state())
def wait_until_finish(self, duration=None):
if not self.is_in_terminal_state():
if not self.has_job:
raise IOError('Failed to get the Dataflow job id.')
thread = threading.Thread(
target=DataflowRunner.poll_for_job_completion,
args=(self._runner, self, duration))
# Mark the thread as a daemon thread so a keyboard interrupt on the main
# thread will terminate everything. This is also the reason we will not
# use thread.join() to wait for the polling thread.
thread.daemon = True
thread.start()
while thread.is_alive():
time.sleep(5.0)
# TODO: Merge the termination code in poll_for_job_completion and
# is_in_terminal_state.
terminated = self.is_in_terminal_state()
assert duration or terminated, (
'Job did not reach to a terminal state after waiting indefinitely.')
if terminated and self.state != PipelineState.DONE:
# TODO(BEAM-1290): Consider converting this to an error log based on
# theresolution of the issue.
raise DataflowRuntimeException(
'Dataflow pipeline failed. State: %s, Error:\n%s' %
(self.state, getattr(self._runner, 'last_error_msg', None)),
self)
return self.state
def cancel(self):
if not self.has_job:
raise IOError('Failed to get the Dataflow job id.')
self._update_job()
if self.is_in_terminal_state():
_LOGGER.warning(
'Cancel failed because job %s is already terminated in state %s.',
self.job_id(),
self.state)
else:
if not self._runner.dataflow_client.modify_job_state(
self.job_id(), 'JOB_STATE_CANCELLED'):
cancel_failed_message = (
'Failed to cancel job %s, please go to the Developers Console to '
'cancel it manually.') % self.job_id()
_LOGGER.error(cancel_failed_message)
raise DataflowRuntimeException(cancel_failed_message, self)
return self.state
def __str__(self):
return '<%s %s %s>' % (self.__class__.__name__, self.job_id(), self.state)
def __repr__(self):
return '<%s %s at %s>' % (self.__class__.__name__, self._job, hex(id(self)))
class DataflowRuntimeException(Exception):
"""Indicates an error has occurred in running this pipeline."""
def __init__(self, msg, result):
super(DataflowRuntimeException, self).__init__(msg)
self.result = result
| element, = iterable
return element |
logger.rs | use log::LevelFilter;
use simplelog::{Config, TermLogger, TerminalMode};
pub fn init() -> anyhow::Result<()> | {
TermLogger::init(LevelFilter::Info, Config::default(), TerminalMode::Mixed)?;
Ok(())
} |
|
any.guard.ts | import {
ExecutionContext,
Injectable,
UnauthorizedException,
} from '@nestjs/common';
import { AuthGuard } from '@nestjs/passport';
import { ERROR } from 'src/constants';
@Injectable()
export class | extends AuthGuard('jwt') {
canActivate(context: ExecutionContext) {
return super.canActivate(context);
}
handleRequest(err, user, info) {
if (err) {
throw err ;
}
return user;
}
} | AnyAuthGuard |
mod.rs | //! This query borrow-checks the MIR to (further) ensure it is not broken.
use crate::borrow_check::nll::region_infer::RegionInferenceContext;
use rustc::hir;
use rustc::hir::Node;
use rustc::hir::def_id::DefId;
use rustc::infer::InferCtxt;
use rustc::lint::builtin::UNUSED_MUT;
use rustc::middle::borrowck::SignalledError;
use rustc::mir::{AggregateKind, BasicBlock, BorrowCheckResult, BorrowKind};
use rustc::mir::{ClearCrossCrate, Local, Location, Mir, Mutability, Operand, Place};
use rustc::mir::{Field, Projection, ProjectionElem, Rvalue, Statement, StatementKind};
use rustc::mir::{Terminator, TerminatorKind};
use rustc::ty::query::Providers;
use rustc::ty::{self, TyCtxt};
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, Level};
use rustc_data_structures::bit_set::BitSet;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::graph::dominators::Dominators;
use smallvec::SmallVec;
use std::rc::Rc;
use std::collections::BTreeMap;
use syntax_pos::Span;
use crate::dataflow::indexes::{BorrowIndex, InitIndex, MoveOutIndex, MovePathIndex};
use crate::dataflow::move_paths::{HasMoveData, LookupResult, MoveData, MoveError};
use crate::dataflow::Borrows;
use crate::dataflow::DataflowResultsConsumer;
use crate::dataflow::FlowAtLocation;
use crate::dataflow::MoveDataParamEnv;
use crate::dataflow::{do_dataflow, DebugFormatted};
use crate::dataflow::EverInitializedPlaces;
use crate::dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
use crate::util::borrowck_errors::{BorrowckErrors, Origin};
use self::borrow_set::{BorrowData, BorrowSet};
use self::flows::Flows;
use self::location::LocationTable;
use self::prefixes::PrefixSet;
use self::MutateMode::{JustWrite, WriteAndRead};
use self::mutability_errors::AccessKind;
use self::path_utils::*;
crate mod borrow_set;
mod error_reporting;
mod flows;
mod location;
mod move_errors;
mod mutability_errors;
mod path_utils;
crate mod place_ext;
crate mod places_conflict;
mod prefixes;
mod used_muts;
pub(crate) mod nll;
pub fn provide(providers: &mut Providers<'_>) {
*providers = Providers {
mir_borrowck,
..*providers
};
}
fn mir_borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> BorrowCheckResult<'tcx> {
let input_mir = tcx.mir_validated(def_id);
debug!("run query mir_borrowck: {}", tcx.item_path_str(def_id));
let mut return_early;
// Return early if we are not supposed to use MIR borrow checker for this function.
return_early = !tcx.has_attr(def_id, "rustc_mir") && !tcx.use_mir_borrowck();
if tcx.is_struct_constructor(def_id) {
// We are not borrow checking the automatically generated struct constructors
// because we want to accept structs such as this (taken from the `linked-hash-map`
// crate):
// ```rust
// struct Qey<Q: ?Sized>(Q);
// ```
// MIR of this struct constructor looks something like this:
// ```rust
// fn Qey(_1: Q) -> Qey<Q>{
// let mut _0: Qey<Q>; // return place
//
// bb0: {
// (_0.0: Q) = move _1; // bb0[0]: scope 0 at src/main.rs:1:1: 1:26
// return; // bb0[1]: scope 0 at src/main.rs:1:1: 1:26
// }
// }
// ```
// The problem here is that `(_0.0: Q) = move _1;` is valid only if `Q` is
// of statically known size, which is not known to be true because of the
// `Q: ?Sized` constraint. However, it is true because the constructor can be
// called only when `Q` is of statically known size.
return_early = true;
}
if return_early {
return BorrowCheckResult {
closure_requirements: None,
used_mut_upvars: SmallVec::new(),
};
}
let opt_closure_req = tcx.infer_ctxt().enter(|infcx| {
let input_mir: &Mir<'_> = &input_mir.borrow();
do_mir_borrowck(&infcx, input_mir, def_id)
});
debug!("mir_borrowck done");
opt_closure_req
}
fn do_mir_borrowck<'a, 'gcx, 'tcx>(
infcx: &InferCtxt<'a, 'gcx, 'tcx>,
input_mir: &Mir<'gcx>,
def_id: DefId,
) -> BorrowCheckResult<'gcx> {
debug!("do_mir_borrowck(def_id = {:?})", def_id);
let tcx = infcx.tcx;
let attributes = tcx.get_attrs(def_id);
let param_env = tcx.param_env(def_id);
let id = tcx
.hir()
.as_local_node_id(def_id)
.expect("do_mir_borrowck: non-local DefId");
// Replace all regions with fresh inference variables. This
// requires first making our own copy of the MIR. This copy will
// be modified (in place) to contain non-lexical lifetimes. It
// will have a lifetime tied to the inference context.
let mut mir: Mir<'tcx> = input_mir.clone();
let free_regions = nll::replace_regions_in_mir(infcx, def_id, param_env, &mut mir);
let mir = &mir; // no further changes
let location_table = &LocationTable::new(mir);
let mut errors_buffer = Vec::new();
let (move_data, move_errors): (MoveData<'tcx>, Option<Vec<(Place<'tcx>, MoveError<'tcx>)>>) =
match MoveData::gather_moves(mir, tcx) {
Ok(move_data) => (move_data, None),
Err((move_data, move_errors)) => (move_data, Some(move_errors)),
};
let mdpe = MoveDataParamEnv {
move_data: move_data,
param_env: param_env,
};
let dead_unwinds = BitSet::new_empty(mir.basic_blocks().len());
let mut flow_inits = FlowAtLocation::new(do_dataflow(
tcx,
mir,
id,
&attributes,
&dead_unwinds,
MaybeInitializedPlaces::new(tcx, mir, &mdpe),
|bd, i| DebugFormatted::new(&bd.move_data().move_paths[i]),
));
let locals_are_invalidated_at_exit = tcx.hir().body_owner_kind(id).is_fn_or_closure();
let borrow_set = Rc::new(BorrowSet::build(
tcx, mir, locals_are_invalidated_at_exit, &mdpe.move_data));
// If we are in non-lexical mode, compute the non-lexical lifetimes.
let (regioncx, polonius_output, opt_closure_req) = nll::compute_regions(
infcx,
def_id,
free_regions,
mir,
location_table,
param_env,
&mut flow_inits,
&mdpe.move_data,
&borrow_set,
&mut errors_buffer,
);
// The various `flow_*` structures can be large. We drop `flow_inits` here
// so it doesn't overlap with the others below. This reduces peak memory
// usage significantly on some benchmarks.
drop(flow_inits);
let regioncx = Rc::new(regioncx);
let flow_borrows = FlowAtLocation::new(do_dataflow(
tcx,
mir,
id,
&attributes,
&dead_unwinds,
Borrows::new(tcx, mir, regioncx.clone(), &borrow_set),
|rs, i| DebugFormatted::new(&rs.location(i)),
));
let flow_uninits = FlowAtLocation::new(do_dataflow(
tcx,
mir,
id,
&attributes,
&dead_unwinds,
MaybeUninitializedPlaces::new(tcx, mir, &mdpe),
|bd, i| DebugFormatted::new(&bd.move_data().move_paths[i]),
));
let flow_ever_inits = FlowAtLocation::new(do_dataflow(
tcx,
mir,
id,
&attributes,
&dead_unwinds,
EverInitializedPlaces::new(tcx, mir, &mdpe),
|bd, i| DebugFormatted::new(&bd.move_data().inits[i]),
));
let movable_generator = match tcx.hir().get(id) {
Node::Expr(&hir::Expr {
node: hir::ExprKind::Closure(.., Some(hir::GeneratorMovability::Static)),
..
}) => false,
_ => true,
};
let dominators = mir.dominators();
let mut mbcx = MirBorrowckCtxt {
infcx,
mir,
mir_def_id: def_id,
move_data: &mdpe.move_data,
location_table,
movable_generator,
locals_are_invalidated_at_exit,
access_place_error_reported: Default::default(),
reservation_error_reported: Default::default(),
move_error_reported: BTreeMap::new(),
uninitialized_error_reported: Default::default(),
errors_buffer,
nonlexical_regioncx: regioncx,
used_mut: Default::default(),
used_mut_upvars: SmallVec::new(),
borrow_set,
dominators,
};
let mut state = Flows::new(
flow_borrows,
flow_uninits,
flow_ever_inits,
polonius_output,
);
if let Some(errors) = move_errors {
mbcx.report_move_errors(errors);
}
mbcx.analyze_results(&mut state); // entry point for DataflowResultsConsumer
// For each non-user used mutable variable, check if it's been assigned from
// a user-declared local. If so, then put that local into the used_mut set.
// Note that this set is expected to be small - only upvars from closures
// would have a chance of erroneously adding non-user-defined mutable vars
// to the set.
let temporary_used_locals: FxHashSet<Local> = mbcx.used_mut.iter()
.filter(|&local| mbcx.mir.local_decls[*local].is_user_variable.is_none())
.cloned()
.collect();
// For the remaining unused locals that are marked as mutable, we avoid linting any that
// were never initialized. These locals may have been removed as unreachable code; or will be
// linted as unused variables.
let unused_mut_locals = mbcx.mir.mut_vars_iter()
.filter(|local| !mbcx.used_mut.contains(local))
.collect();
mbcx.gather_used_muts(temporary_used_locals, unused_mut_locals);
debug!("mbcx.used_mut: {:?}", mbcx.used_mut);
let used_mut = mbcx.used_mut;
for local in mbcx.mir.mut_vars_and_args_iter().filter(|local| !used_mut.contains(local)) {
if let ClearCrossCrate::Set(ref vsi) = mbcx.mir.source_scope_local_data {
let local_decl = &mbcx.mir.local_decls[local];
// Skip implicit `self` argument for closures
if local.index() == 1 && tcx.is_closure(mbcx.mir_def_id) {
continue;
}
// Skip over locals that begin with an underscore or have no name
match local_decl.name {
Some(name) => if name.as_str().starts_with("_") {
continue;
},
None => continue,
}
let span = local_decl.source_info.span;
if span.compiler_desugaring_kind().is_some() {
// If the `mut` arises as part of a desugaring, we should ignore it.
continue;
}
let mut_span = tcx.sess.source_map().span_until_non_whitespace(span);
tcx.struct_span_lint_node(
UNUSED_MUT,
vsi[local_decl.source_info.scope].lint_root,
span,
"variable does not need to be mutable",
)
.span_suggestion_short(
mut_span,
"remove this `mut`",
String::new(),
Applicability::MachineApplicable,
)
.emit();
}
}
// Buffer any move errors that we collected and de-duplicated.
for (_, (_, diag)) in mbcx.move_error_reported {
diag.buffer(&mut mbcx.errors_buffer);
}
if !mbcx.errors_buffer.is_empty() {
mbcx.errors_buffer.sort_by_key(|diag| diag.span.primary_span());
if tcx.migrate_borrowck() {
// When borrowck=migrate, check if AST-borrowck would
// error on the given code.
// rust-lang/rust#55492: loop over parents to ensure that
// errors that AST-borrowck only detects in some parent of
// a closure still allows NLL to signal an error.
let mut curr_def_id = def_id;
let signalled_any_error = loop {
match tcx.borrowck(curr_def_id).signalled_any_error {
SignalledError::NoErrorsSeen => {
// keep traversing (and borrow-checking) parents
}
SignalledError::SawSomeError => {
// stop search here
break SignalledError::SawSomeError;
}
}
if tcx.is_closure(curr_def_id) {
curr_def_id = tcx.parent_def_id(curr_def_id)
.expect("a closure must have a parent_def_id");
} else {
break SignalledError::NoErrorsSeen;
}
};
match signalled_any_error {
SignalledError::NoErrorsSeen => {
// if AST-borrowck signalled no errors, then
// downgrade all the buffered MIR-borrowck errors
// to warnings.
for err in &mut mbcx.errors_buffer {
if err.is_error() {
err.level = Level::Warning;
err.warn(
"this error has been downgraded to a warning for backwards \
compatibility with previous releases",
);
err.warn(
"this represents potential undefined behavior in your code and \
this warning will become a hard error in the future",
);
}
}
}
SignalledError::SawSomeError => {
// if AST-borrowck signalled a (cancelled) error,
// then we will just emit the buffered
// MIR-borrowck errors as normal.
}
}
}
for diag in mbcx.errors_buffer.drain(..) {
DiagnosticBuilder::new_diagnostic(mbcx.infcx.tcx.sess.diagnostic(), diag).emit();
}
}
let result = BorrowCheckResult {
closure_requirements: opt_closure_req,
used_mut_upvars: mbcx.used_mut_upvars,
};
debug!("do_mir_borrowck: result = {:#?}", result);
result
}
pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
mir: &'cx Mir<'tcx>,
mir_def_id: DefId,
move_data: &'cx MoveData<'tcx>,
/// Map from MIR `Location` to `LocationIndex`; created
/// when MIR borrowck begins.
location_table: &'cx LocationTable,
movable_generator: bool,
/// This keeps track of whether local variables are free-ed when the function
/// exits even without a `StorageDead`, which appears to be the case for
/// constants.
///
/// I'm not sure this is the right approach - @eddyb could you try and
/// figure this out?
locals_are_invalidated_at_exit: bool,
/// This field keeps track of when borrow errors are reported in the access_place function
/// so that there is no duplicate reporting. This field cannot also be used for the conflicting
/// borrow errors that is handled by the `reservation_error_reported` field as the inclusion
/// of the `Span` type (while required to mute some errors) stops the muting of the reservation
/// errors.
access_place_error_reported: FxHashSet<(Place<'tcx>, Span)>,
/// This field keeps track of when borrow conflict errors are reported
/// for reservations, so that we don't report seemingly duplicate
/// errors for corresponding activations.
//
// FIXME: ideally this would be a set of `BorrowIndex`, not `Place`s,
// but it is currently inconvenient to track down the `BorrowIndex`
// at the time we detect and report a reservation error.
reservation_error_reported: FxHashSet<Place<'tcx>>,
/// This field keeps track of move errors that are to be reported for given move indicies.
///
/// There are situations where many errors can be reported for a single move out (see #53807)
/// and we want only the best of those errors.
///
/// The `report_use_of_moved_or_uninitialized` function checks this map and replaces the
/// diagnostic (if there is one) if the `Place` of the error being reported is a prefix of the
/// `Place` of the previous most diagnostic. This happens instead of buffering the error. Once
/// all move errors have been reported, any diagnostics in this map are added to the buffer
/// to be emitted.
///
/// `BTreeMap` is used to preserve the order of insertions when iterating. This is necessary
/// when errors in the map are being re-added to the error buffer so that errors with the
/// same primary span come out in a consistent order.
move_error_reported: BTreeMap<Vec<MoveOutIndex>, (Place<'tcx>, DiagnosticBuilder<'cx>)>,
/// This field keeps track of errors reported in the checking of uninitialized variables,
/// so that we don't report seemingly duplicate errors.
uninitialized_error_reported: FxHashSet<Place<'tcx>>,
/// Errors to be reported buffer
errors_buffer: Vec<Diagnostic>,
/// This field keeps track of all the local variables that are declared mut and are mutated.
/// Used for the warning issued by an unused mutable local variable.
used_mut: FxHashSet<Local>,
/// If the function we're checking is a closure, then we'll need to report back the list of
/// mutable upvars that have been used. This field keeps track of them.
used_mut_upvars: SmallVec<[Field; 8]>,
/// Non-lexical region inference context, if NLL is enabled. This
/// contains the results from region inference and lets us e.g.
/// find out which CFG points are contained in each borrow region.
nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
/// The set of borrows extracted from the MIR
borrow_set: Rc<BorrowSet<'tcx>>,
/// Dominators for MIR
dominators: Dominators<BasicBlock>,
}
// Check that:
// 1. assignments are always made to mutable locations (FIXME: does that still really go here?)
// 2. loans made in overlapping scopes do not conflict
// 3. assignments do not affect things loaned out as immutable
// 4. moves do not affect things loaned out in any way
impl<'cx, 'gcx, 'tcx> DataflowResultsConsumer<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
type FlowState = Flows<'cx, 'gcx, 'tcx>;
fn mir(&self) -> &'cx Mir<'tcx> {
self.mir
}
fn visit_block_entry(&mut self, bb: BasicBlock, flow_state: &Self::FlowState) {
debug!("MirBorrowckCtxt::process_block({:?}): {}", bb, flow_state);
}
fn visit_statement_entry(
&mut self,
location: Location,
stmt: &Statement<'tcx>,
flow_state: &Self::FlowState,
) {
debug!(
"MirBorrowckCtxt::process_statement({:?}, {:?}): {}",
location, stmt, flow_state
);
let span = stmt.source_info.span;
self.check_activations(location, span, flow_state);
match stmt.kind {
StatementKind::Assign(ref lhs, ref rhs) => {
self.consume_rvalue(
ContextKind::AssignRhs.new(location),
(rhs, span),
location,
flow_state,
);
self.mutate_place(
ContextKind::AssignLhs.new(location),
(lhs, span),
Shallow(None),
JustWrite,
flow_state,
);
}
StatementKind::FakeRead(_, ref place) => {
// Read for match doesn't access any memory and is used to
// assert that a place is safe and live. So we don't have to
// do any checks here.
//
// FIXME: Remove check that the place is initialized. This is
// needed for now because matches don't have never patterns yet.
// So this is the only place we prevent
// let x: !;
// match x {};
// from compiling.
self.check_if_path_or_subpath_is_moved(
ContextKind::FakeRead.new(location),
InitializationRequiringAction::Use,
(place, span),
flow_state,
);
}
StatementKind::SetDiscriminant {
ref place,
variant_index: _,
} => {
self.mutate_place(
ContextKind::SetDiscrim.new(location),
(place, span),
Shallow(None),
JustWrite,
flow_state,
);
}
StatementKind::InlineAsm {
ref asm,
ref outputs,
ref inputs,
} => {
let context = ContextKind::InlineAsm.new(location);
for (o, output) in asm.outputs.iter().zip(outputs.iter()) {
if o.is_indirect {
// FIXME(eddyb) indirect inline asm outputs should
// be encoeded through MIR place derefs instead.
self.access_place(
context,
(output, o.span),
(Deep, Read(ReadKind::Copy)),
LocalMutationIsAllowed::No,
flow_state,
);
self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Use,
(output, o.span),
flow_state,
);
} else {
self.mutate_place(
context,
(output, o.span),
if o.is_rw { Deep } else { Shallow(None) },
if o.is_rw { WriteAndRead } else { JustWrite },
flow_state,
);
}
}
for (_, input) in inputs.iter() {
self.consume_operand(context, (input, span), flow_state);
}
}
StatementKind::Nop
| StatementKind::AscribeUserType(..)
| StatementKind::Retag { .. }
| StatementKind::StorageLive(..) => {
// `Nop`, `AscribeUserType`, `Retag`, and `StorageLive` are irrelevant
// to borrow check.
}
StatementKind::StorageDead(local) => {
self.access_place(
ContextKind::StorageDead.new(location),
(&Place::Local(local), span),
(Shallow(None), Write(WriteKind::StorageDeadOrDrop)),
LocalMutationIsAllowed::Yes,
flow_state,
);
}
}
}
fn visit_terminator_entry(
&mut self,
location: Location,
term: &Terminator<'tcx>,
flow_state: &Self::FlowState,
) {
let loc = location;
debug!(
"MirBorrowckCtxt::process_terminator({:?}, {:?}): {}",
location, term, flow_state
);
let span = term.source_info.span;
self.check_activations(location, span, flow_state);
match term.kind {
TerminatorKind::SwitchInt {
ref discr,
switch_ty: _,
values: _,
targets: _,
} => {
self.consume_operand(ContextKind::SwitchInt.new(loc), (discr, span), flow_state);
}
TerminatorKind::Drop {
location: ref drop_place,
target: _,
unwind: _,
} => {
let gcx = self.infcx.tcx.global_tcx();
// Compute the type with accurate region information.
let drop_place_ty = drop_place.ty(self.mir, self.infcx.tcx);
// Erase the regions.
let drop_place_ty = self.infcx.tcx.erase_regions(&drop_place_ty)
.to_ty(self.infcx.tcx);
// "Lift" into the gcx -- once regions are erased, this type should be in the
// global arenas; this "lift" operation basically just asserts that is true, but
// that is useful later.
let drop_place_ty = gcx.lift(&drop_place_ty).unwrap();
debug!("visit_terminator_drop \
loc: {:?} term: {:?} drop_place: {:?} drop_place_ty: {:?} span: {:?}",
loc, term, drop_place, drop_place_ty, span);
self.access_place(
ContextKind::Drop.new(loc),
(drop_place, span),
(AccessDepth::Drop, Write(WriteKind::StorageDeadOrDrop)),
LocalMutationIsAllowed::Yes,
flow_state,
);
}
TerminatorKind::DropAndReplace {
location: ref drop_place,
value: ref new_value,
target: _,
unwind: _,
} => {
self.mutate_place(
ContextKind::DropAndReplace.new(loc),
(drop_place, span),
Deep,
JustWrite,
flow_state,
);
self.consume_operand(
ContextKind::DropAndReplace.new(loc),
(new_value, span),
flow_state,
);
}
TerminatorKind::Call {
ref func,
ref args,
ref destination,
cleanup: _,
from_hir_call: _,
} => {
self.consume_operand(ContextKind::CallOperator.new(loc), (func, span), flow_state);
for arg in args {
self.consume_operand(
ContextKind::CallOperand.new(loc),
(arg, span),
flow_state,
);
}
if let Some((ref dest, _ /*bb*/)) = *destination {
self.mutate_place(
ContextKind::CallDest.new(loc),
(dest, span),
Deep,
JustWrite,
flow_state,
);
}
}
TerminatorKind::Assert {
ref cond,
expected: _,
ref msg,
target: _,
cleanup: _,
} => {
self.consume_operand(ContextKind::Assert.new(loc), (cond, span), flow_state);
use rustc::mir::interpret::EvalErrorKind::BoundsCheck;
if let BoundsCheck { ref len, ref index } = *msg {
self.consume_operand(ContextKind::Assert.new(loc), (len, span), flow_state);
self.consume_operand(ContextKind::Assert.new(loc), (index, span), flow_state);
}
}
TerminatorKind::Yield {
ref value,
resume: _,
drop: _,
} => {
self.consume_operand(ContextKind::Yield.new(loc), (value, span), flow_state);
if self.movable_generator {
// Look for any active borrows to locals
let borrow_set = self.borrow_set.clone();
flow_state.with_outgoing_borrows(|borrows| {
for i in borrows {
let borrow = &borrow_set[i];
self.check_for_local_borrow(borrow, span);
}
});
}
}
TerminatorKind::Resume | TerminatorKind::Return | TerminatorKind::GeneratorDrop => {
// Returning from the function implicitly kills storage for all locals and statics.
// Often, the storage will already have been killed by an explicit
// StorageDead, but we don't always emit those (notably on unwind paths),
// so this "extra check" serves as a kind of backup.
let borrow_set = self.borrow_set.clone();
flow_state.with_outgoing_borrows(|borrows| {
for i in borrows {
let borrow = &borrow_set[i];
let context = ContextKind::StorageDead.new(loc);
self.check_for_invalidation_at_exit(context, borrow, span);
}
});
}
TerminatorKind::Goto { target: _ }
| TerminatorKind::Abort
| TerminatorKind::Unreachable
| TerminatorKind::FalseEdges {
real_target: _,
imaginary_targets: _,
}
| TerminatorKind::FalseUnwind {
real_target: _,
unwind: _,
} => {
// no data used, thus irrelevant to borrowck
}
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum MutateMode {
JustWrite,
WriteAndRead,
}
use self::ReadOrWrite::{Activation, Read, Reservation, Write};
use self::AccessDepth::{Deep, Shallow};
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum ArtificialField {
ArrayLength,
ShallowBorrow,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum AccessDepth {
/// From the RFC: "A *shallow* access means that the immediate
/// fields reached at P are accessed, but references or pointers
/// found within are not dereferenced. Right now, the only access
/// that is shallow is an assignment like `x = ...;`, which would
/// be a *shallow write* of `x`."
Shallow(Option<ArtificialField>),
/// From the RFC: "A *deep* access means that all data reachable
/// through the given place may be invalidated or accesses by
/// this action."
Deep,
/// Access is Deep only when there is a Drop implementation that
/// can reach the data behind the reference.
Drop,
}
/// Kind of access to a value: read or write
/// (For informational purposes only)
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum ReadOrWrite {
/// From the RFC: "A *read* means that the existing data may be
/// read, but will not be changed."
Read(ReadKind),
/// From the RFC: "A *write* means that the data may be mutated to
/// new values or otherwise invalidated (for example, it could be
/// de-initialized, as in a move operation).
Write(WriteKind),
/// For two-phase borrows, we distinguish a reservation (which is treated
/// like a Read) from an activation (which is treated like a write), and
/// each of those is furthermore distinguished from Reads/Writes above.
Reservation(WriteKind),
Activation(WriteKind, BorrowIndex),
}
/// Kind of read access to a value
/// (For informational purposes only)
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum ReadKind {
Borrow(BorrowKind),
Copy,
}
/// Kind of write access to a value
/// (For informational purposes only)
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum WriteKind {
StorageDeadOrDrop,
MutableBorrow(BorrowKind),
Mutate,
Move,
}
/// When checking permissions for a place access, this flag is used to indicate that an immutable
/// local place can be mutated.
//
// FIXME: @nikomatsakis suggested that this flag could be removed with the following modifications:
// - Merge `check_access_permissions()` and `check_if_reassignment_to_immutable_state()`.
// - Split `is_mutable()` into `is_assignable()` (can be directly assigned) and
// `is_declared_mutable()`.
// - Take flow state into consideration in `is_assignable()` for local variables.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum LocalMutationIsAllowed {
Yes,
/// We want use of immutable upvars to cause a "write to immutable upvar"
/// error, not an "reassignment" error.
ExceptUpvars,
No,
}
#[derive(Copy, Clone, Debug)]
enum InitializationRequiringAction {
Update,
Borrow,
MatchOn,
Use,
Assignment,
PartialAssignment,
}
struct RootPlace<'d, 'tcx: 'd> {
place: &'d Place<'tcx>,
is_local_mutation_allowed: LocalMutationIsAllowed,
}
impl InitializationRequiringAction {
fn as_noun(self) -> &'static str {
match self {
InitializationRequiringAction::Update => "update",
InitializationRequiringAction::Borrow => "borrow",
InitializationRequiringAction::MatchOn => "use", // no good noun
InitializationRequiringAction::Use => "use",
InitializationRequiringAction::Assignment => "assign",
InitializationRequiringAction::PartialAssignment => "assign to part",
}
}
fn as_verb_in_past_tense(self) -> &'static str {
match self {
InitializationRequiringAction::Update => "updated",
InitializationRequiringAction::Borrow => "borrowed",
InitializationRequiringAction::MatchOn => "matched on",
InitializationRequiringAction::Use => "used",
InitializationRequiringAction::Assignment => "assigned",
InitializationRequiringAction::PartialAssignment => "partially assigned",
}
}
}
impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
/// Checks an access to the given place to see if it is allowed. Examines the set of borrows
/// that are in scope, as well as which paths have been initialized, to ensure that (a) the
/// place is initialized and (b) it is not borrowed in some way that would prevent this
/// access.
///
/// Returns `true` if an error is reported.
fn access_place(
&mut self,
context: Context,
place_span: (&Place<'tcx>, Span),
kind: (AccessDepth, ReadOrWrite),
is_local_mutation_allowed: LocalMutationIsAllowed,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
let (sd, rw) = kind;
if let Activation(_, borrow_index) = rw {
if self.reservation_error_reported.contains(&place_span.0) {
debug!(
"skipping access_place for activation of invalid reservation \
place: {:?} borrow_index: {:?}",
place_span.0, borrow_index
);
return;
}
}
// Check is_empty() first because it's the common case, and doing that
// way we avoid the clone() call.
if !self.access_place_error_reported.is_empty() &&
self
.access_place_error_reported
.contains(&(place_span.0.clone(), place_span.1))
{
debug!(
"access_place: suppressing error place_span=`{:?}` kind=`{:?}`",
place_span, kind
);
return;
}
let mutability_error =
self.check_access_permissions(
place_span,
rw,
is_local_mutation_allowed,
flow_state,
context.loc,
);
let conflict_error =
self.check_access_for_conflict(context, place_span, sd, rw, flow_state);
if conflict_error || mutability_error {
debug!(
"access_place: logging error place_span=`{:?}` kind=`{:?}`",
place_span, kind
);
self.access_place_error_reported
.insert((place_span.0.clone(), place_span.1));
}
}
fn check_access_for_conflict(
&mut self,
context: Context,
place_span: (&Place<'tcx>, Span),
sd: AccessDepth,
rw: ReadOrWrite,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) -> bool {
debug!(
"check_access_for_conflict(context={:?}, place_span={:?}, sd={:?}, rw={:?})",
context, place_span, sd, rw,
);
let mut error_reported = false;
let tcx = self.infcx.tcx;
let mir = self.mir;
let location = self.location_table.start_index(context.loc);
let borrow_set = self.borrow_set.clone();
each_borrow_involving_path(
self,
tcx,
mir,
context,
(sd, place_span.0),
&borrow_set,
flow_state.borrows_in_scope(location),
|this, borrow_index, borrow| match (rw, borrow.kind) {
// Obviously an activation is compatible with its own
// reservation (or even prior activating uses of same
// borrow); so don't check if they interfere.
//
// NOTE: *reservations* do conflict with themselves;
// thus aren't injecting unsoundenss w/ this check.)
(Activation(_, activating), _) if activating == borrow_index => {
debug!(
"check_access_for_conflict place_span: {:?} sd: {:?} rw: {:?} \
skipping {:?} b/c activation of same borrow_index",
place_span,
sd,
rw,
(borrow_index, borrow),
);
Control::Continue
}
(Read(_), BorrowKind::Shared) | (Reservation(..), BorrowKind::Shared)
| (Read(_), BorrowKind::Shallow) | (Reservation(..), BorrowKind::Shallow) => {
Control::Continue
}
(Write(WriteKind::Move), BorrowKind::Shallow) => {
// Handled by initialization checks.
Control::Continue
}
(Read(kind), BorrowKind::Unique) | (Read(kind), BorrowKind::Mut { .. }) => {
// Reading from mere reservations of mutable-borrows is OK.
if !is_active(&this.dominators, borrow, context.loc) {
assert!(allow_two_phase_borrow(&this.infcx.tcx, borrow.kind));
return Control::Continue;
}
error_reported = true;
match kind {
ReadKind::Copy => {
this.report_use_while_mutably_borrowed(context, place_span, borrow)
}
ReadKind::Borrow(bk) => {
this.report_conflicting_borrow(context, place_span, bk, &borrow)
}
}
Control::Break
}
(Reservation(kind), BorrowKind::Unique)
| (Reservation(kind), BorrowKind::Mut { .. })
| (Activation(kind, _), _)
| (Write(kind), _) => {
match rw {
Reservation(_) => {
debug!(
"recording invalid reservation of \
place: {:?}",
place_span.0
);
this.reservation_error_reported.insert(place_span.0.clone());
}
Activation(_, activating) => {
debug!(
"observing check_place for activation of \
borrow_index: {:?}",
activating
);
}
Read(..) | Write(..) => {}
}
error_reported = true;
match kind {
WriteKind::MutableBorrow(bk) => {
this.report_conflicting_borrow(context, place_span, bk, &borrow)
}
WriteKind::StorageDeadOrDrop => {
this.report_borrowed_value_does_not_live_long_enough(
context,
borrow,
place_span,
Some(kind))
}
WriteKind::Mutate => {
this.report_illegal_mutation_of_borrowed(context, place_span, borrow)
}
WriteKind::Move => {
this.report_move_out_while_borrowed(context, place_span, &borrow)
}
}
Control::Break
}
},
);
error_reported
}
fn mutate_place(
&mut self,
context: Context,
place_span: (&Place<'tcx>, Span),
kind: AccessDepth,
mode: MutateMode,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
// Write of P[i] or *P, or WriteAndRead of any P, requires P init'd.
match mode {
MutateMode::WriteAndRead => {
self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Update,
place_span,
flow_state,
);
}
MutateMode::JustWrite => {
self.check_if_assigned_path_is_moved(context, place_span, flow_state);
}
}
// Special case: you can assign a immutable local variable
// (e.g., `x = ...`) so long as it has never been initialized
// before (at this point in the flow).
if let &Place::Local(local) = place_span.0 {
if let Mutability::Not = self.mir.local_decls[local].mutability {
// check for reassignments to immutable local variables
self.check_if_reassignment_to_immutable_state(
context,
local,
place_span,
flow_state,
);
return;
}
}
// Otherwise, use the normal access permission rules.
self.access_place(
context,
place_span,
(kind, Write(WriteKind::Mutate)),
LocalMutationIsAllowed::No,
flow_state,
);
}
fn consume_rvalue(
&mut self,
context: Context,
(rvalue, span): (&Rvalue<'tcx>, Span),
_location: Location,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
match *rvalue {
Rvalue::Ref(_ /*rgn*/, bk, ref place) => {
let access_kind = match bk {
BorrowKind::Shallow => {
(Shallow(Some(ArtificialField::ShallowBorrow)), Read(ReadKind::Borrow(bk)))
},
BorrowKind::Shared => (Deep, Read(ReadKind::Borrow(bk))),
BorrowKind::Unique | BorrowKind::Mut { .. } => {
let wk = WriteKind::MutableBorrow(bk);
if allow_two_phase_borrow(&self.infcx.tcx, bk) {
(Deep, Reservation(wk))
} else {
(Deep, Write(wk))
}
}
};
self.access_place(
context,
(place, span),
access_kind,
LocalMutationIsAllowed::No,
flow_state,
);
let action = if bk == BorrowKind::Shallow {
InitializationRequiringAction::MatchOn
} else {
InitializationRequiringAction::Borrow
};
self.check_if_path_or_subpath_is_moved(
context,
action,
(place, span),
flow_state,
);
}
Rvalue::Use(ref operand)
| Rvalue::Repeat(ref operand, _)
| Rvalue::UnaryOp(_ /*un_op*/, ref operand)
| Rvalue::Cast(_ /*cast_kind*/, ref operand, _ /*ty*/) => {
self.consume_operand(context, (operand, span), flow_state)
}
Rvalue::Len(ref place) | Rvalue::Discriminant(ref place) => {
let af = match *rvalue {
Rvalue::Len(..) => Some(ArtificialField::ArrayLength),
Rvalue::Discriminant(..) => None,
_ => unreachable!(),
};
self.access_place(
context,
(place, span),
(Shallow(af), Read(ReadKind::Copy)),
LocalMutationIsAllowed::No,
flow_state,
);
self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Use,
(place, span),
flow_state,
);
}
Rvalue::BinaryOp(_bin_op, ref operand1, ref operand2)
| Rvalue::CheckedBinaryOp(_bin_op, ref operand1, ref operand2) => {
self.consume_operand(context, (operand1, span), flow_state);
self.consume_operand(context, (operand2, span), flow_state);
}
Rvalue::NullaryOp(_op, _ty) => {
// nullary ops take no dynamic input; no borrowck effect.
//
// FIXME: is above actually true? Do we want to track
// the fact that uninitialized data can be created via
// `NullOp::Box`?
}
Rvalue::Aggregate(ref aggregate_kind, ref operands) => {
// We need to report back the list of mutable upvars that were
// moved into the closure and subsequently used by the closure,
// in order to populate our used_mut set.
match **aggregate_kind {
AggregateKind::Closure(def_id, _)
| AggregateKind::Generator(def_id, _, _) => {
let BorrowCheckResult {
used_mut_upvars, ..
} = self.infcx.tcx.mir_borrowck(def_id);
debug!("{:?} used_mut_upvars={:?}", def_id, used_mut_upvars);
for field in used_mut_upvars {
// This relies on the current way that by-value
// captures of a closure are copied/moved directly
// when generating MIR.
match operands[field.index()] {
Operand::Move(Place::Local(local))
| Operand::Copy(Place::Local(local)) => {
self.used_mut.insert(local);
}
Operand::Move(ref place @ Place::Projection(_))
| Operand::Copy(ref place @ Place::Projection(_)) => {
if let Some(field) = place.is_upvar_field_projection(
self.mir, &self.infcx.tcx) {
self.used_mut_upvars.push(field);
}
}
Operand::Move(Place::Static(..))
| Operand::Copy(Place::Static(..))
| Operand::Move(Place::Promoted(..))
| Operand::Copy(Place::Promoted(..))
| Operand::Constant(..) => {}
}
}
}
AggregateKind::Adt(..)
| AggregateKind::Array(..)
| AggregateKind::Tuple { .. } => (),
}
for operand in operands {
self.consume_operand(context, (operand, span), flow_state);
}
}
}
}
fn consume_operand(
&mut self,
context: Context,
(operand, span): (&Operand<'tcx>, Span),
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
match *operand {
Operand::Copy(ref place) => {
// copy of place: check if this is "copy of frozen path"
// (FIXME: see check_loans.rs)
self.access_place(
context,
(place, span),
(Deep, Read(ReadKind::Copy)),
LocalMutationIsAllowed::No,
flow_state,
);
// Finally, check if path was already moved.
self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Use,
(place, span),
flow_state,
);
}
Operand::Move(ref place) => {
// move of place: check if this is move of already borrowed path
self.access_place(
context,
(place, span),
(Deep, Write(WriteKind::Move)),
LocalMutationIsAllowed::Yes,
flow_state,
);
// Finally, check if path was already moved.
self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Use,
(place, span),
flow_state,
);
}
Operand::Constant(_) => {}
}
}
/// Checks whether a borrow of this place is invalidated when the function
/// exits
fn check_for_invalidation_at_exit(
&mut self,
context: Context,
borrow: &BorrowData<'tcx>,
span: Span,
) {
debug!("check_for_invalidation_at_exit({:?})", borrow);
let place = &borrow.borrowed_place;
let root_place = self.prefixes(place, PrefixSet::All).last().unwrap();
// FIXME(nll-rfc#40): do more precise destructor tracking here. For now
// we just know that all locals are dropped at function exit (otherwise
// we'll have a memory leak) and assume that all statics have a destructor.
//
// FIXME: allow thread-locals to borrow other thread locals?
let (might_be_alive, will_be_dropped) = match root_place {
Place::Promoted(_) => (true, false),
Place::Static(_) => {
// Thread-locals might be dropped after the function exits, but
// "true" statics will never be.
let is_thread_local = self.is_place_thread_local(&root_place);
(true, is_thread_local)
}
Place::Local(_) => {
// Locals are always dropped at function exit, and if they
// have a destructor it would've been called already.
(false, self.locals_are_invalidated_at_exit)
}
Place::Projection(..) => {
bug!("root of {:?} is a projection ({:?})?", place, root_place)
}
};
if !will_be_dropped {
debug!(
"place_is_invalidated_at_exit({:?}) - won't be dropped",
place
);
return;
}
let sd = if might_be_alive { Deep } else { Shallow(None) };
if places_conflict::borrow_conflicts_with_place(
self.infcx.tcx,
self.mir,
place,
borrow.kind,
root_place,
sd,
places_conflict::PlaceConflictBias::Overlap,
) {
debug!("check_for_invalidation_at_exit({:?}): INVALID", place);
// FIXME: should be talking about the region lifetime instead
// of just a span here.
let span = self.infcx.tcx.sess.source_map().end_point(span);
self.report_borrowed_value_does_not_live_long_enough(
context,
borrow,
(place, span),
None,
)
}
}
/// Reports an error if this is a borrow of local data.
/// This is called for all Yield statements on movable generators
fn check_for_local_borrow(&mut self, borrow: &BorrowData<'tcx>, yield_span: Span) {
debug!("check_for_local_borrow({:?})", borrow);
if borrow_of_local_data(&borrow.borrowed_place) {
let err = self.infcx.tcx
.cannot_borrow_across_generator_yield(
self.retrieve_borrow_spans(borrow).var_or_use(),
yield_span,
Origin::Mir,
);
err.buffer(&mut self.errors_buffer);
}
}
fn check_activations(
&mut self,
location: Location,
span: Span,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
if !self.infcx.tcx.two_phase_borrows() {
return;
}
// Two-phase borrow support: For each activation that is newly
// generated at this statement, check if it interferes with
// another borrow.
let borrow_set = self.borrow_set.clone();
for &borrow_index in borrow_set.activations_at_location(location) {
let borrow = &borrow_set[borrow_index];
// only mutable borrows should be 2-phase
assert!(match borrow.kind {
BorrowKind::Shared | BorrowKind::Shallow => false,
BorrowKind::Unique | BorrowKind::Mut { .. } => true,
});
self.access_place(
ContextKind::Activation.new(location),
(&borrow.borrowed_place, span),
(
Deep,
Activation(WriteKind::MutableBorrow(borrow.kind), borrow_index),
),
LocalMutationIsAllowed::No,
flow_state,
);
// We do not need to call `check_if_path_or_subpath_is_moved`
// again, as we already called it when we made the
// initial reservation.
}
}
}
impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
fn check_if_reassignment_to_immutable_state(
&mut self,
context: Context,
local: Local,
place_span: (&Place<'tcx>, Span),
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
debug!("check_if_reassignment_to_immutable_state({:?})", local);
// Check if any of the initializiations of `local` have happened yet:
if let Some(init_index) = self.is_local_ever_initialized(local, flow_state) {
// And, if so, report an error.
let init = &self.move_data.inits[init_index];
let span = init.span(&self.mir);
self.report_illegal_reassignment(
context, place_span, span, place_span.0
);
}
}
fn check_if_full_path_is_moved(
&mut self,
context: Context,
desired_action: InitializationRequiringAction,
place_span: (&Place<'tcx>, Span),
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
let maybe_uninits = &flow_state.uninits;
// Bad scenarios:
//
// 1. Move of `a.b.c`, use of `a.b.c`
// 2. Move of `a.b.c`, use of `a.b.c.d` (without first reinitializing `a.b.c.d`)
// 3. Uninitialized `(a.b.c: &_)`, use of `*a.b.c`; note that with
// partial initialization support, one might have `a.x`
// initialized but not `a.b`.
//
// OK scenarios:
//
// 4. Move of `a.b.c`, use of `a.b.d`
// 5. Uninitialized `a.x`, initialized `a.b`, use of `a.b`
// 6. Copied `(a.b: &_)`, use of `*(a.b).c`; note that `a.b`
// must have been initialized for the use to be sound.
// 7. Move of `a.b.c` then reinit of `a.b.c.d`, use of `a.b.c.d`
// The dataflow tracks shallow prefixes distinctly (that is,
// field-accesses on P distinctly from P itself), in order to
// track substructure initialization separately from the whole
// structure.
//
// E.g., when looking at (*a.b.c).d, if the closest prefix for
// which we have a MovePath is `a.b`, then that means that the
// initialization state of `a.b` is all we need to inspect to
// know if `a.b.c` is valid (and from that we infer that the
// dereference and `.d` access is also valid, since we assume
// `a.b.c` is assigned a reference to a initialized and
// well-formed record structure.)
// Therefore, if we seek out the *closest* prefix for which we
// have a MovePath, that should capture the initialization
// state for the place scenario.
//
// This code covers scenarios 1, 2, and 3.
debug!("check_if_full_path_is_moved place: {:?}", place_span.0);
match self.move_path_closest_to(place_span.0) {
Ok((prefix, mpi)) => {
if maybe_uninits.contains(mpi) {
self.report_use_of_moved_or_uninitialized(
context,
desired_action,
(prefix, place_span.0, place_span.1),
mpi,
);
return; // don't bother finding other problems.
}
}
Err(NoMovePathFound::ReachedStatic) => {
// Okay: we do not build MoveData for static variables
} // Only query longest prefix with a MovePath, not further
// ancestors; dataflow recurs on children when parents
// move (to support partial (re)inits).
//
// (I.e., querying parents breaks scenario 7; but may want
// to do such a query based on partial-init feature-gate.)
}
}
fn check_if_path_or_subpath_is_moved(
&mut self,
context: Context,
desired_action: InitializationRequiringAction,
place_span: (&Place<'tcx>, Span),
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
let maybe_uninits = &flow_state.uninits;
// Bad scenarios:
//
// 1. Move of `a.b.c`, use of `a` or `a.b`
// partial initialization support, one might have `a.x`
// initialized but not `a.b`.
// 2. All bad scenarios from `check_if_full_path_is_moved`
//
// OK scenarios:
//
// 3. Move of `a.b.c`, use of `a.b.d`
// 4. Uninitialized `a.x`, initialized `a.b`, use of `a.b`
// 5. Copied `(a.b: &_)`, use of `*(a.b).c`; note that `a.b`
// must have been initialized for the use to be sound.
// 6. Move of `a.b.c` then reinit of `a.b.c.d`, use of `a.b.c.d`
self.check_if_full_path_is_moved(context, desired_action, place_span, flow_state);
// A move of any shallow suffix of `place` also interferes
// with an attempt to use `place`. This is scenario 3 above.
//
// (Distinct from handling of scenarios 1+2+4 above because
// `place` does not interfere with suffixes of its prefixes,
// e.g., `a.b.c` does not interfere with `a.b.d`)
//
// This code covers scenario 1.
debug!("check_if_path_or_subpath_is_moved place: {:?}", place_span.0);
if let Some(mpi) = self.move_path_for_place(place_span.0) {
if let Some(child_mpi) = maybe_uninits.has_any_child_of(mpi) {
self.report_use_of_moved_or_uninitialized(
context,
desired_action,
(place_span.0, place_span.0, place_span.1),
child_mpi,
);
return; // don't bother finding other problems.
}
}
}
/// Currently MoveData does not store entries for all places in
/// the input MIR. For example it will currently filter out
/// places that are Copy; thus we do not track places of shared
/// reference type. This routine will walk up a place along its
/// prefixes, searching for a foundational place that *is*
/// tracked in the MoveData.
///
/// An Err result includes a tag indicated why the search failed.
/// Currently this can only occur if the place is built off of a
/// static variable, as we do not track those in the MoveData.
fn move_path_closest_to<'a>(
&mut self,
place: &'a Place<'tcx>,
) -> Result<(&'a Place<'tcx>, MovePathIndex), NoMovePathFound> where 'cx: 'a {
let mut last_prefix = place;
for prefix in self.prefixes(place, PrefixSet::All) {
if let Some(mpi) = self.move_path_for_place(prefix) {
return Ok((prefix, mpi));
}
last_prefix = prefix;
}
match *last_prefix {
Place::Local(_) => panic!("should have move path for every Local"),
Place::Projection(_) => panic!("PrefixSet::All meant don't stop for Projection"),
Place::Promoted(_) |
Place::Static(_) => Err(NoMovePathFound::ReachedStatic),
}
}
fn move_path_for_place(&mut self, place: &Place<'tcx>) -> Option<MovePathIndex> {
// If returns None, then there is no move path corresponding
// to a direct owner of `place` (which means there is nothing
// that borrowck tracks for its analysis).
match self.move_data.rev_lookup.find(place) {
LookupResult::Parent(_) => None,
LookupResult::Exact(mpi) => Some(mpi),
}
}
fn check_if_assigned_path_is_moved(
&mut self,
context: Context,
(place, span): (&Place<'tcx>, Span),
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
debug!("check_if_assigned_path_is_moved place: {:?}", place);
// recur down place; dispatch to external checks when necessary
let mut place = place;
loop {
match *place {
Place::Promoted(_) |
Place::Local(_) | Place::Static(_) => {
// assigning to `x` does not require `x` be initialized.
break;
}
Place::Projection(ref proj) => {
let Projection { ref base, ref elem } = **proj;
match *elem {
ProjectionElem::Index(_/*operand*/) |
ProjectionElem::ConstantIndex { .. } |
// assigning to P[i] requires P to be valid.
ProjectionElem::Downcast(_/*adt_def*/, _/*variant_idx*/) =>
// assigning to (P->variant) is okay if assigning to `P` is okay
//
// FIXME: is this true even if P is a adt with a dtor?
{ }
// assigning to (*P) requires P to be initialized
ProjectionElem::Deref => {
self.check_if_full_path_is_moved(
context, InitializationRequiringAction::Use,
(base, span), flow_state);
// (base initialized; no need to
// recur further)
break;
}
ProjectionElem::Subslice { .. } => {
panic!("we don't allow assignments to subslices, context: {:?}",
context);
}
ProjectionElem::Field(..) => {
// if type of `P` has a dtor, then
// assigning to `P.f` requires `P` itself
// be already initialized
let tcx = self.infcx.tcx;
match base.ty(self.mir, tcx).to_ty(tcx).sty {
ty::Adt(def, _) if def.has_dtor(tcx) => {
self.check_if_path_or_subpath_is_moved(
context, InitializationRequiringAction::Assignment,
(base, span), flow_state);
// (base initialized; no need to
// recur further)
break;
}
// Once `let s; s.x = V; read(s.x);`,
// is allowed, remove this match arm.
ty::Adt(..) | ty::Tuple(..) => {
check_parent_of_field(self, context, base, span, flow_state);
if let Some(local) = place.base_local() {
// rust-lang/rust#21232,
// #54499, #54986: during
// period where we reject
// partial initialization, do
// not complain about
// unnecessary `mut` on an
// attempt to do a partial
// initialization.
self.used_mut.insert(local);
}
}
_ => {}
}
}
}
place = base;
continue;
}
}
}
fn check_parent_of_field<'cx, 'gcx, 'tcx>(
this: &mut MirBorrowckCtxt<'cx, 'gcx, 'tcx>,
context: Context,
base: &Place<'tcx>,
span: Span,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
// rust-lang/rust#21232: Until Rust allows reads from the
// initialized parts of partially initialized structs, we
// will, starting with the 2018 edition, reject attempts
// to write to structs that are not fully initialized.
//
// In other words, *until* we allow this:
//
// 1. `let mut s; s.x = Val; read(s.x);`
//
// we will for now disallow this:
//
// 2. `let mut s; s.x = Val;`
//
// and also this:
//
// 3. `let mut s = ...; drop(s); s.x=Val;`
//
// This does not use check_if_path_or_subpath_is_moved,
// because we want to *allow* reinitializations of fields:
// e.g., want to allow
//
// `let mut s = ...; drop(s.x); s.x=Val;`
//
// This does not use check_if_full_path_is_moved on
// `base`, because that would report an error about the
// `base` as a whole, but in this scenario we *really*
// want to report an error about the actual thing that was
// moved, which may be some prefix of `base`.
// Shallow so that we'll stop at any dereference; we'll
// report errors about issues with such bases elsewhere.
let maybe_uninits = &flow_state.uninits;
// Find the shortest uninitialized prefix you can reach
// without going over a Deref.
let mut shortest_uninit_seen = None;
for prefix in this.prefixes(base, PrefixSet::Shallow) {
let mpi = match this.move_path_for_place(prefix) {
Some(mpi) => mpi, None => continue,
};
if maybe_uninits.contains(mpi) {
debug!("check_parent_of_field updating shortest_uninit_seen from {:?} to {:?}",
shortest_uninit_seen, Some((prefix, mpi)));
shortest_uninit_seen = Some((prefix, mpi));
} else {
debug!("check_parent_of_field {:?} is definitely initialized", (prefix, mpi));
}
}
if let Some((prefix, mpi)) = shortest_uninit_seen {
// Check for a reassignment into a uninitialized field of a union (for example,
// after a move out). In this case, do not report a error here. There is an
// exception, if this is the first assignment into the union (that is, there is
// no move out from an earlier location) then this is an attempt at initialization
// of the union - we should error in that case.
let tcx = this.infcx.tcx;
if let ty::TyKind::Adt(def, _) = base.ty(this.mir, tcx).to_ty(tcx).sty {
if def.is_union() {
if this.move_data.path_map[mpi].iter().any(|moi| {
this.move_data.moves[*moi].source.is_predecessor_of(
context.loc, this.mir,
)
}) {
return;
}
}
}
this.report_use_of_moved_or_uninitialized(
context,
InitializationRequiringAction::PartialAssignment,
(prefix, base, span),
mpi,
);
}
}
}
/// Checks the permissions for the given place and read or write kind
///
/// Returns `true` if an error is reported.
fn check_access_permissions(
&mut self,
(place, span): (&Place<'tcx>, Span),
kind: ReadOrWrite,
is_local_mutation_allowed: LocalMutationIsAllowed,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
location: Location,
) -> bool {
debug!(
"check_access_permissions({:?}, {:?}, is_local_mutation_allowed: {:?})",
place, kind, is_local_mutation_allowed
);
let error_access;
let the_place_err;
// rust-lang/rust#21232, #54986: during period where we reject
// partial initialization, do not complain about mutability
// errors except for actual mutation (as opposed to an attempt
// to do a partial initialization).
let previously_initialized = if let Some(local) = place.base_local() {
self.is_local_ever_initialized(local, flow_state).is_some()
} else {
true
};
match kind {
Reservation(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Unique))
| Reservation(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Mut { .. }))
| Write(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Unique))
| Write(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Mut { .. })) => {
let is_local_mutation_allowed = match borrow_kind {
BorrowKind::Unique => LocalMutationIsAllowed::Yes,
BorrowKind::Mut { .. } => is_local_mutation_allowed,
BorrowKind::Shared | BorrowKind::Shallow => unreachable!(),
};
match self.is_mutable(place, is_local_mutation_allowed) {
Ok(root_place) => {
self.add_used_mut(root_place, flow_state);
return false;
}
Err(place_err) => {
error_access = AccessKind::MutableBorrow;
the_place_err = place_err;
}
}
}
Reservation(WriteKind::Mutate) | Write(WriteKind::Mutate) => {
match self.is_mutable(place, is_local_mutation_allowed) {
Ok(root_place) => {
self.add_used_mut(root_place, flow_state);
return false;
}
Err(place_err) => {
error_access = AccessKind::Mutate;
the_place_err = place_err;
}
}
}
Reservation(wk @ WriteKind::Move)
| Write(wk @ WriteKind::Move)
| Reservation(wk @ WriteKind::StorageDeadOrDrop)
| Reservation(wk @ WriteKind::MutableBorrow(BorrowKind::Shared))
| Reservation(wk @ WriteKind::MutableBorrow(BorrowKind::Shallow))
| Write(wk @ WriteKind::StorageDeadOrDrop)
| Write(wk @ WriteKind::MutableBorrow(BorrowKind::Shared))
| Write(wk @ WriteKind::MutableBorrow(BorrowKind::Shallow)) => {
if let (Err(_place_err), true) = (
self.is_mutable(place, is_local_mutation_allowed),
self.errors_buffer.is_empty()
) {
if self.infcx.tcx.migrate_borrowck() {
// rust-lang/rust#46908: In pure NLL mode this
// code path should be unreachable (and thus
// we signal an ICE in the else branch
// here). But we can legitimately get here
// under borrowck=migrate mode, so instead of
// ICE'ing we instead report a legitimate
// error (which will then be downgraded to a
// warning by the migrate machinery).
error_access = match wk {
WriteKind::MutableBorrow(_) => AccessKind::MutableBorrow,
WriteKind::Move => AccessKind::Move,
WriteKind::StorageDeadOrDrop |
WriteKind::Mutate => AccessKind::Mutate,
};
self.report_mutability_error(
place,
span,
_place_err,
error_access,
location,
);
} else |
}
return false;
}
Activation(..) => {
// permission checks are done at Reservation point.
return false;
}
Read(ReadKind::Borrow(BorrowKind::Unique))
| Read(ReadKind::Borrow(BorrowKind::Mut { .. }))
| Read(ReadKind::Borrow(BorrowKind::Shared))
| Read(ReadKind::Borrow(BorrowKind::Shallow))
| Read(ReadKind::Copy) => {
// Access authorized
return false;
}
}
// at this point, we have set up the error reporting state.
return if previously_initialized {
self.report_mutability_error(
place,
span,
the_place_err,
error_access,
location,
);
true
} else {
false
};
}
fn is_local_ever_initialized(&self,
local: Local,
flow_state: &Flows<'cx, 'gcx, 'tcx>)
-> Option<InitIndex>
{
let mpi = self.move_data.rev_lookup.find_local(local);
let ii = &self.move_data.init_path_map[mpi];
for &index in ii {
if flow_state.ever_inits.contains(index) {
return Some(index);
}
}
None
}
/// Adds the place into the used mutable variables set
fn add_used_mut<'d>(
&mut self,
root_place: RootPlace<'d, 'tcx>,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
match root_place {
RootPlace {
place: Place::Local(local),
is_local_mutation_allowed,
} => {
// If the local may have been initialized, and it is now currently being
// mutated, then it is justified to be annotated with the `mut`
// keyword, since the mutation may be a possible reassignment.
if is_local_mutation_allowed != LocalMutationIsAllowed::Yes &&
self.is_local_ever_initialized(*local, flow_state).is_some()
{
self.used_mut.insert(*local);
}
}
RootPlace {
place: _,
is_local_mutation_allowed: LocalMutationIsAllowed::Yes,
} => {}
RootPlace {
place: place @ Place::Projection(_),
is_local_mutation_allowed: _,
} => {
if let Some(field) = place.is_upvar_field_projection(self.mir, &self.infcx.tcx) {
self.used_mut_upvars.push(field);
}
}
RootPlace {
place: Place::Promoted(..),
is_local_mutation_allowed: _,
} => {}
RootPlace {
place: Place::Static(..),
is_local_mutation_allowed: _,
} => {}
}
}
/// Whether this value can be written or borrowed mutably.
/// Returns the root place if the place passed in is a projection.
fn is_mutable<'d>(
&self,
place: &'d Place<'tcx>,
is_local_mutation_allowed: LocalMutationIsAllowed,
) -> Result<RootPlace<'d, 'tcx>, &'d Place<'tcx>> {
match *place {
Place::Local(local) => {
let local = &self.mir.local_decls[local];
match local.mutability {
Mutability::Not => match is_local_mutation_allowed {
LocalMutationIsAllowed::Yes => Ok(RootPlace {
place,
is_local_mutation_allowed: LocalMutationIsAllowed::Yes,
}),
LocalMutationIsAllowed::ExceptUpvars => Ok(RootPlace {
place,
is_local_mutation_allowed: LocalMutationIsAllowed::ExceptUpvars,
}),
LocalMutationIsAllowed::No => Err(place),
},
Mutability::Mut => Ok(RootPlace {
place,
is_local_mutation_allowed,
}),
}
}
// The rules for promotion are made by `qualify_consts`, there wouldn't even be a
// `Place::Promoted` if the promotion weren't 100% legal. So we just forward this
Place::Promoted(_) => Ok(RootPlace {
place,
is_local_mutation_allowed,
}),
Place::Static(ref static_) => {
if self.infcx.tcx.is_static(static_.def_id) != Some(hir::Mutability::MutMutable) {
Err(place)
} else {
Ok(RootPlace {
place,
is_local_mutation_allowed,
})
}
}
Place::Projection(ref proj) => {
match proj.elem {
ProjectionElem::Deref => {
let base_ty = proj.base.ty(self.mir, self.infcx.tcx).to_ty(self.infcx.tcx);
// Check the kind of deref to decide
match base_ty.sty {
ty::Ref(_, _, mutbl) => {
match mutbl {
// Shared borrowed data is never mutable
hir::MutImmutable => Err(place),
// Mutably borrowed data is mutable, but only if we have a
// unique path to the `&mut`
hir::MutMutable => {
let mode = match place.is_upvar_field_projection(
self.mir, &self.infcx.tcx)
{
Some(field)
if {
self.mir.upvar_decls[field.index()].by_ref
} =>
{
is_local_mutation_allowed
}
_ => LocalMutationIsAllowed::Yes,
};
self.is_mutable(&proj.base, mode)
}
}
}
ty::RawPtr(tnm) => {
match tnm.mutbl {
// `*const` raw pointers are not mutable
hir::MutImmutable => Err(place),
// `*mut` raw pointers are always mutable, regardless of
// context. The users have to check by themselves.
hir::MutMutable => {
Ok(RootPlace {
place,
is_local_mutation_allowed,
})
}
}
}
// `Box<T>` owns its content, so mutable if its location is mutable
_ if base_ty.is_box() => {
self.is_mutable(&proj.base, is_local_mutation_allowed)
}
// Deref should only be for reference, pointers or boxes
_ => bug!("Deref of unexpected type: {:?}", base_ty),
}
}
// All other projections are owned by their base path, so mutable if
// base path is mutable
ProjectionElem::Field(..)
| ProjectionElem::Index(..)
| ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Subslice { .. }
| ProjectionElem::Downcast(..) => {
let upvar_field_projection = place.is_upvar_field_projection(
self.mir, &self.infcx.tcx);
if let Some(field) = upvar_field_projection {
let decl = &self.mir.upvar_decls[field.index()];
debug!(
"decl.mutability={:?} local_mutation_is_allowed={:?} place={:?}",
decl, is_local_mutation_allowed, place
);
match (decl.mutability, is_local_mutation_allowed) {
(Mutability::Not, LocalMutationIsAllowed::No)
| (Mutability::Not, LocalMutationIsAllowed::ExceptUpvars) => {
Err(place)
}
(Mutability::Not, LocalMutationIsAllowed::Yes)
| (Mutability::Mut, _) => {
// Subtle: this is an upvar
// reference, so it looks like
// `self.foo` -- we want to double
// check that the context `*self`
// is mutable (i.e., this is not a
// `Fn` closure). But if that
// check succeeds, we want to
// *blame* the mutability on
// `place` (that is,
// `self.foo`). This is used to
// propagate the info about
// whether mutability declarations
// are used outwards, so that we register
// the outer variable as mutable. Otherwise a
// test like this fails to record the `mut`
// as needed:
//
// ```
// fn foo<F: FnOnce()>(_f: F) { }
// fn main() {
// let var = Vec::new();
// foo(move || {
// var.push(1);
// });
// }
// ```
let _ = self.is_mutable(&proj.base, is_local_mutation_allowed)?;
Ok(RootPlace {
place,
is_local_mutation_allowed,
})
}
}
} else {
self.is_mutable(&proj.base, is_local_mutation_allowed)
}
}
}
}
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum NoMovePathFound {
ReachedStatic,
}
/// The degree of overlap between 2 places for borrow-checking.
enum Overlap {
/// The places might partially overlap - in this case, we give
/// up and say that they might conflict. This occurs when
/// different fields of a union are borrowed. For example,
/// if `u` is a union, we have no way of telling how disjoint
/// `u.a.x` and `a.b.y` are.
Arbitrary,
/// The places have the same type, and are either completely disjoint
/// or equal - i.e., they can't "partially" overlap as can occur with
/// unions. This is the "base case" on which we recur for extensions
/// of the place.
EqualOrDisjoint,
/// The places are disjoint, so we know all extensions of them
/// will also be disjoint.
Disjoint,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
struct Context {
kind: ContextKind,
loc: Location,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum ContextKind {
Activation,
AssignLhs,
AssignRhs,
SetDiscrim,
InlineAsm,
SwitchInt,
Drop,
DropAndReplace,
CallOperator,
CallOperand,
CallDest,
Assert,
Yield,
FakeRead,
StorageDead,
}
impl ContextKind {
fn new(self, loc: Location) -> Context {
Context {
kind: self,
loc,
}
}
}
| {
span_bug!(
span,
"Accessing `{:?}` with the kind `{:?}` shouldn't be possible",
place,
kind,
);
} |
train.py | from core.loss import d_wasserstein_loss
from core.loss import g_wasserstein_loss
from core.nn.conv.wgan import generator
from core.nn.conv.wgan import critic
from core.callbacks import GANMonitor
from core.model import WGAN_GP
| train_images = tf.keras.utils.image_dataset_from_directory(
"dataset/images/", label_mode=None, image_size=(config.IMAGE_WIDTH, config.IMAGE_HEIGHT), batch_size=config.BATCH_SIZE
)
train_images = train_images.map(lambda x: (x - 127.5) / 127.5)
generator = generator(config.LATENT_DIM, tf.keras.initializers.RandomNormal(mean=0.0, stddev=0.02), channels=config.CHANNELS)
critic = critic(config.IMAGE_HEIGHT, config.IMAGE_WIDTH, config.CHANNELS)
wgan = WGAN_GP(critic=critic, generator=generator, latent_dim=config.LATENT_DIM, critic_extra_steps=config.EXTRA_STEPS)
d_opt = tf.keras.optimizers.Adam(learning_rate=config.LR, beta_1=0.5, beta_2=0.9)
g_opt = tf.keras.optimizers.Adam(learning_rate=config.LR, beta_1=0.5, beta_2=0.9)
wgan.compile(
d_optimiser=d_opt,
g_optimiser=g_opt,
d_loss_fn=d_wasserstein_loss,
g_loss_fn=g_wasserstein_loss,
)
callback = [GANMonitor(num_images=16, latent_dim=config.LATENT_DIM)]
wgan.fit(train_images, epochs=config.EPOCHS, callbacks=callback) | import tensorflow as tf
import numpy as np
import config
|
test_connection.py | import unittest.mock as mock
import pytest
import requests_mock
from openeo.rest.auth.auth import NullAuth, BearerAuth
from openeo.rest.connection import Connection, RestApiConnection, connect, OpenEoApiError
API_URL = "https://oeo.net/"
@pytest.mark.parametrize(
["base", "paths", "expected_path"],
[
# Simple
("https://oeo.net", ["foo", "/foo"], "https://oeo.net/foo"),
("https://oeo.net/", ["foo", "/foo"], "https://oeo.net/foo"),
# With trailing slash
("https://oeo.net", ["foo/", "/foo/"], "https://oeo.net/foo/"),
("https://oeo.net/", ["foo/", "/foo/"], "https://oeo.net/foo/"),
# Deeper
("https://oeo.net/api/v04", ["foo/bar", "/foo/bar"], "https://oeo.net/api/v04/foo/bar"),
("https://oeo.net/api/v04/", ["foo/bar", "/foo/bar"], "https://oeo.net/api/v04/foo/bar"),
("https://oeo.net/api/v04", ["foo/bar/", "/foo/bar/"], "https://oeo.net/api/v04/foo/bar/"),
("https://oeo.net/api/v04/", ["foo/bar/", "/foo/bar/"], "https://oeo.net/api/v04/foo/bar/"),
]
)
def test_rest_api_connection_url_handling(requests_mock, base, paths, expected_path):
"""Test connection __init__ and proper joining of root url and API path"""
conn = RestApiConnection(base)
requests_mock.get(expected_path, text="payload")
requests_mock.post(expected_path, text="payload")
for path in paths:
assert conn.get(path).text == "payload"
assert conn.post(path, {"foo": "bar"}).text == "payload"
def test_rest_api_headers():
conn = RestApiConnection(API_URL)
with requests_mock.Mocker() as m:
def text(request, context):
assert request.headers["User-Agent"].startswith("openeo-python-client")
assert request.headers["X-Openeo-Bar"] == "XY123"
m.get("/foo", text=text)
m.post("/foo", text=text)
conn.get("/foo", headers={"X-Openeo-Bar": "XY123"})
conn.post("/foo", {}, headers={"X-Openeo-Bar": "XY123"})
def test_connection_with_session():
session = mock.Mock()
response = session.request.return_value
response.status_code = 200
response.json.return_value = {"foo": "bar"}
conn = Connection("https://oeo.net/", session=session)
assert conn.capabilities().capabilities == {"foo": "bar"}
session.request.assert_any_call(
url="https://oeo.net/", method="get", headers=mock.ANY, stream=mock.ANY, auth=mock.ANY
)
def test_connect_with_session():
session = mock.Mock()
response = session.request.return_value
response.status_code = 200
response.json.return_value = {"foo": "bar"}
conn = connect("https://oeo.net/", session=session)
assert conn.capabilities().capabilities == {"foo": "bar"}
session.request.assert_any_call(
url="https://oeo.net/", method="get", headers=mock.ANY, stream=mock.ANY, auth=mock.ANY
)
def test_api_error(requests_mock):
conn = Connection(API_URL)
requests_mock.get('https://oeo.net/collections/foobar', status_code=404, json={
"code": "CollectionNotFound", "message": "No such things as a collection 'foobar'", "id": "54321"
})
with pytest.raises(OpenEoApiError) as exc_info:
conn.describe_collection("foobar")
exc = exc_info.value
assert exc.http_status_code == 404
assert exc.code == "CollectionNotFound"
assert exc.message == "No such things as a collection 'foobar'"
assert exc.id == "54321"
assert exc.url is None
def test_api_error_non_json(requests_mock):
conn = Connection(API_URL)
requests_mock.get('https://oeo.net/collections/foobar', status_code=500, text="olapola")
with pytest.raises(OpenEoApiError) as exc_info:
conn.describe_collection("foobar")
exc = exc_info.value
assert exc.http_status_code == 500
assert exc.code == "unknown"
assert exc.message == "olapola"
assert exc.id is None
assert exc.url is None
def test_authenticate_basic(requests_mock):
conn = Connection(API_URL)
def text_callback(request, context):
assert request.headers["Authorization"] == "Basic am9objpqMGhu"
return '{"access_token":"w3lc0m3"}'
requests_mock.get('https://oeo.net/credentials/basic', text=text_callback)
assert isinstance(conn.auth, NullAuth)
conn.authenticate_basic(username="john", password="j0hn")
assert isinstance(conn.auth, BearerAuth)
assert conn.auth.bearer == "w3lc0m3"
def test_authenticate_oidc(oidc_test_setup):
# see test/rest/conftest.py for `oidc_test_setup` fixture
client_id = "myclient"
oidc_discovery_url = "https://oeo.net/credentials/oidc"
state, webbrowser_open = oidc_test_setup(client_id=client_id, oidc_discovery_url=oidc_discovery_url)
| # With all this set up, kick off the openid connect flow
conn = Connection(API_URL)
assert isinstance(conn.auth, NullAuth)
conn.authenticate_OIDC(client_id=client_id, webbrowser_open=webbrowser_open)
assert isinstance(conn.auth, BearerAuth)
assert conn.auth.bearer == state["access_token"]
def test_load_collection_arguments(requests_mock):
conn = Connection(API_URL)
requests_mock.get(API_URL, json={"version": "0.4.0"})
requests_mock.get(API_URL + "collections/FOO", json={
"properties": {"eo:bands": [{"name": "red"}, {"name": "green"}, {"name": "blue"}]}
})
spatial_extent = {"west": 1, "south": 2, "east": 3, "north": 4}
temporal_extent = ["2019-01-01", "2019-01-22"]
im = conn.load_collection(
"FOO", spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=["red", "green"]
)
node = im.graph[im.node_id]
assert node["process_id"] == "load_collection"
assert node["arguments"] == {
"id": "FOO",
"spatial_extent": spatial_extent,
"temporal_extent": temporal_extent,
"bands": ["red", "green"]
} | |
field.py | from ._base import Base, _rule
from .fullname_json import FullnameJson
from .values.text_val import TextVal
from .values.null import NULL
from .values.holder import Holder
from .values.true import TRUE
from .values.false import FALSE
class Field(FullnameJson, TextVal, NULL, Holder, TRUE, FALSE):
reserved = {**Base.reserved, **TextVal.reserved,
**FullnameJson.reserved, **NULL.reserved, **Holder.reserved, **TRUE.reserved, **FALSE.reserved}
tokens = Base.tokens + TextVal.tokens + \
FullnameJson.tokens + NULL.tokens + Holder.tokens + TRUE.tokens + FALSE.tokens
precedence = FullnameJson.precedence
# Tokens
# rules
_start = 'field'
@_rule('''field : STAR
| NUMBER
| TEXTVAL
| NULL
| TRUE
| FALSE''')
def p_field_items(self, p):
p[0] = self.provider.new_record(p[1])
@_rule('field : HOLDER')
def p_field_param(self, p):
|
@_rule('field : fullname_json')
def p_field_name(self, p):
p[0] = p[1]
| p[0] = self.provider.new_param() |
main.js | /// <reference path="../../../../angular2/typings/node/node.d.ts" />
var { Cc, Ci, Cu } = require('chrome');
var os = Cc['@mozilla.org/observer-service;1'].getService(Ci.nsIObserverService);
var ParserUtil = require('./parser_util');
class Profiler {
constructor() {
this._profiler = Cc['@mozilla.org/tools/profiler;1'].getService(Ci.nsIProfiler);
}
start(entries, interval, features, timeStarted) {
this._profiler.StartProfiler(entries, interval, features, features.length);
this._profilerStartTime = timeStarted;
this._markerEvents = [];
}
stop() { this._profiler.StopProfiler(); }
getProfilePerfEvents() {
var profileData = this._profiler.getProfileData();
var perfEvents = ParserUtil.convertPerfProfileToEvents(profileData);
perfEvents = this._mergeMarkerEvents(perfEvents);
perfEvents.sort(function (event1, event2) { return event1.ts - event2.ts; }); // Sort by ts
return perfEvents;
}
_mergeMarkerEvents(perfEvents) {
this._markerEvents.forEach(function (markerEvent) { perfEvents.push(markerEvent); });
return perfEvents;
}
addStartEvent(name, timeStarted) {
this._markerEvents.push({ ph: 'b', ts: timeStarted - this._profilerStartTime, name: name });
}
addEndEvent(name, timeEnded) {
this._markerEvents.push({ ph: 'e', ts: timeEnded - this._profilerStartTime, name: name });
}
}
function forceGC() {
Cu.forceGC();
os.notifyObservers(null, 'child-gc-request', null);
}
; | var mod = require('sdk/page-mod');
var data = require('sdk/self').data;
var profiler = new Profiler();
mod.PageMod({
include: ['*'],
contentScriptFile: data.url('installed_script.js'),
onAttach: worker => {
worker.port.on('startProfiler', (timeStarted) => profiler.start(/* = profiler memory */ 3000000, 0.1, ['leaf', 'js', 'stackwalk', 'gc'], timeStarted));
worker.port.on('stopProfiler', () => profiler.stop());
worker.port.on('getProfile', () => worker.port.emit('perfProfile', profiler.getProfilePerfEvents()));
worker.port.on('forceGC', forceGC);
worker.port.on('markStart', (name, timeStarted) => profiler.addStartEvent(name, timeStarted));
worker.port.on('markEnd', (name, timeEnded) => profiler.addEndEvent(name, timeEnded));
}
});
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoibWFpbi5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbImJlbmNocHJlc3Mvc3JjL2ZpcmVmb3hfZXh0ZW5zaW9uL2xpYi9tYWluLnRzIl0sIm5hbWVzIjpbIlByb2ZpbGVyIiwiUHJvZmlsZXIuY29uc3RydWN0b3IiLCJQcm9maWxlci5zdGFydCIsIlByb2ZpbGVyLnN0b3AiLCJQcm9maWxlci5nZXRQcm9maWxlUGVyZkV2ZW50cyIsIlByb2ZpbGVyLl9tZXJnZU1hcmtlckV2ZW50cyIsIlByb2ZpbGVyLmFkZFN0YXJ0RXZlbnQiLCJQcm9maWxlci5hZGRFbmRFdmVudCIsImZvcmNlR0MiXSwibWFwcGluZ3MiOiJBQUFBLG9FQUFvRTtBQUVwRSxJQUFJLEVBQUMsRUFBRSxFQUFFLEVBQUUsRUFBRSxFQUFFLEVBQUMsR0FBRyxPQUFPLENBQUMsUUFBUSxDQUFDLENBQUM7QUFDckMsSUFBSSxFQUFFLEdBQUcsRUFBRSxDQUFDLGlDQUFpQyxDQUFDLENBQUMsVUFBVSxDQUFDLEVBQUUsQ0FBQyxrQkFBa0IsQ0FBQyxDQUFDO0FBQ2pGLElBQUksVUFBVSxHQUFHLE9BQU8sQ0FBQyxlQUFlLENBQUMsQ0FBQztBQUUxQztJQUtFQTtRQUFnQkMsSUFBSUEsQ0FBQ0EsU0FBU0EsR0FBR0EsRUFBRUEsQ0FBQ0EsK0JBQStCQSxDQUFDQSxDQUFDQSxVQUFVQSxDQUFDQSxFQUFFQSxDQUFDQSxXQUFXQSxDQUFDQSxDQUFDQTtJQUFDQSxDQUFDQTtJQUVsR0QsS0FBS0EsQ0FBQ0EsT0FBT0EsRUFBRUEsUUFBUUEsRUFBRUEsUUFBUUEsRUFBRUEsV0FBV0E7UUFDNUNFLElBQUlBLENBQUNBLFNBQVNBLENBQUNBLGFBQWFBLENBQUNBLE9BQU9BLEVBQUVBLFFBQVFBLEVBQUVBLFFBQVFBLEVBQUVBLFFBQVFBLENBQUNBLE1BQU1BLENBQUNBLENBQUNBO1FBQzNFQSxJQUFJQSxDQUFDQSxrQkFBa0JBLEdBQUdBLFdBQVdBLENBQUNBO1FBQ3RDQSxJQUFJQSxDQUFDQSxhQUFhQSxHQUFHQSxFQUFFQSxDQUFDQTtJQUMxQkEsQ0FBQ0E7SUFFREYsSUFBSUEsS0FBS0csSUFBSUEsQ0FBQ0EsU0FBU0EsQ0FBQ0EsWUFBWUEsRUFBRUEsQ0FBQ0EsQ0FBQ0EsQ0FBQ0E7SUFFekNILG9CQUFvQkE7UUFDbEJJLElBQUlBLFdBQVdBLEdBQUdBLElBQUlBLENBQUNBLFNBQVNBLENBQUNBLGNBQWNBLEVBQUVBLENBQUNBO1FBQ2xEQSxJQUFJQSxVQUFVQSxHQUFHQSxVQUFVQSxDQUFDQSwwQkFBMEJBLENBQUNBLFdBQVdBLENBQUNBLENBQUNBO1FBQ3BFQSxVQUFVQSxHQUFHQSxJQUFJQSxDQUFDQSxrQkFBa0JBLENBQUNBLFVBQVVBLENBQUNBLENBQUNBO1FBQ2pEQSxVQUFVQSxDQUFDQSxJQUFJQSxDQUFDQSxVQUFTQSxNQUFNQSxFQUFFQSxNQUFNQSxJQUFJLE1BQU0sQ0FBQyxNQUFNLENBQUMsRUFBRSxHQUFHLE1BQU0sQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDLENBQUNBLENBQUNBLENBQUVBLGFBQWFBO1FBQzNGQSxNQUFNQSxDQUFDQSxVQUFVQSxDQUFDQTtJQUNwQkEsQ0FBQ0E7SUFFREosa0JBQWtCQSxDQUFDQSxVQUFpQkE7UUFDbENLLElBQUlBLENBQUNBLGFBQWFBLENBQUNBLE9BQU9BLENBQUNBLFVBQVNBLFdBQVdBLElBQUksVUFBVSxDQUFDLElBQUksQ0FBQyxXQUFXLENBQUMsQ0FBQyxDQUFDLENBQUMsQ0FBQ0EsQ0FBQ0E7UUFDcEZBLE1BQU1BLENBQUNBLFVBQVVBLENBQUNBO0lBQ3BCQSxDQUFDQTtJQUVETCxhQUFhQSxDQUFDQSxJQUFZQSxFQUFFQSxXQUFtQkE7UUFDN0NNLElBQUlBLENBQUNBLGFBQWFBLENBQUNBLElBQUlBLENBQUNBLEVBQUNBLEVBQUVBLEVBQUVBLEdBQUdBLEVBQUVBLEVBQUVBLEVBQUVBLFdBQVdBLEdBQUdBLElBQUlBLENBQUNBLGtCQUFrQkEsRUFBRUEsSUFBSUEsRUFBRUEsSUFBSUEsRUFBQ0EsQ0FBQ0EsQ0FBQ0E7SUFDNUZBLENBQUNBO0lBRUROLFdBQVdBLENBQUNBLElBQVlBLEVBQUVBLFNBQWlCQTtRQUN6Q08sSUFBSUEsQ0FBQ0EsYUFBYUEsQ0FBQ0EsSUFBSUEsQ0FBQ0EsRUFBQ0EsRUFBRUEsRUFBRUEsR0FBR0EsRUFBRUEsRUFBRUEsRUFBRUEsU0FBU0EsR0FBR0EsSUFBSUEsQ0FBQ0Esa0JBQWtCQSxFQUFFQSxJQUFJQSxFQUFFQSxJQUFJQSxFQUFDQSxDQUFDQSxDQUFDQTtJQUMxRkEsQ0FBQ0E7QUFDSFAsQ0FBQ0E7QUFFRDtJQUNFUSxFQUFFQSxDQUFDQSxPQUFPQSxFQUFFQSxDQUFDQTtJQUNiQSxFQUFFQSxDQUFDQSxlQUFlQSxDQUFDQSxJQUFJQSxFQUFFQSxrQkFBa0JBLEVBQUVBLElBQUlBLENBQUNBLENBQUNBO0FBQ3JEQSxDQUFDQTtBQUFBLENBQUM7QUFFRixJQUFJLEdBQUcsR0FBRyxPQUFPLENBQUMsY0FBYyxDQUFDLENBQUM7QUFDbEMsSUFBSSxJQUFJLEdBQUcsT0FBTyxDQUFDLFVBQVUsQ0FBQyxDQUFDLElBQUksQ0FBQztBQUNwQyxJQUFJLFFBQVEsR0FBRyxJQUFJLFFBQVEsRUFBRSxDQUFDO0FBQzlCLEdBQUcsQ0FBQyxPQUFPLENBQUM7SUFDVixPQUFPLEVBQUUsQ0FBQyxHQUFHLENBQUM7SUFDZCxpQkFBaUIsRUFBRSxJQUFJLENBQUMsR0FBRyxDQUFDLHFCQUFxQixDQUFDO0lBQ2xELFFBQVEsRUFBRSxNQUFNO1FBQ2QsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLENBQUMsZUFBZSxFQUNmLENBQUMsV0FBVyxLQUFLLFFBQVEsQ0FBQyxLQUFLLENBQUMsdUJBQXVCLENBQUMsT0FBTyxFQUFFLEdBQUcsRUFDcEMsQ0FBQyxNQUFNLEVBQUUsSUFBSSxFQUFFLFdBQVcsRUFBRSxJQUFJLENBQUMsRUFBRSxXQUFXLENBQUMsQ0FBQyxDQUFDO1FBQ2hHLE1BQU0sQ0FBQyxJQUFJLENBQUMsRUFBRSxDQUFDLGNBQWMsRUFBRSxNQUFNLFFBQVEsQ0FBQyxJQUFJLEVBQUUsQ0FBQyxDQUFDO1FBQ3RELE1BQU0sQ0FBQyxJQUFJLENBQUMsRUFBRSxDQUFDLFlBQVksRUFDWixNQUFNLE1BQU0sQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLGFBQWEsRUFBRSxRQUFRLENBQUMsb0JBQW9CLEVBQUUsQ0FBQyxDQUFDLENBQUM7UUFDdkYsTUFBTSxDQUFDLElBQUksQ0FBQyxFQUFFLENBQUMsU0FBUyxFQUFFLE9BQU8sQ0FBQyxDQUFDO1FBQ25DLE1BQU0sQ0FBQyxJQUFJLENBQUMsRUFBRSxDQUFDLFdBQVcsRUFBRSxDQUFDLElBQUksRUFBRSxXQUFXLEtBQUssUUFBUSxDQUFDLGFBQWEsQ0FBQyxJQUFJLEVBQUUsV0FBVyxDQUFDLENBQUMsQ0FBQztRQUM5RixNQUFNLENBQUMsSUFBSSxDQUFDLEVBQUUsQ0FBQyxTQUFTLEVBQUUsQ0FBQyxJQUFJLEVBQUUsU0FBUyxLQUFLLFFBQVEsQ0FBQyxXQUFXLENBQUMsSUFBSSxFQUFFLFNBQVMsQ0FBQyxDQUFDLENBQUM7SUFDeEYsQ0FBQztDQUNGLENBQUMsQ0FBQyIsInNvdXJjZXNDb250ZW50IjpbIi8vLyA8cmVmZXJlbmNlIHBhdGg9XCIuLi8uLi8uLi8uLi9hbmd1bGFyMi90eXBpbmdzL25vZGUvbm9kZS5kLnRzXCIgLz5cblxudmFyIHtDYywgQ2ksIEN1fSA9IHJlcXVpcmUoJ2Nocm9tZScpO1xudmFyIG9zID0gQ2NbJ0Btb3ppbGxhLm9yZy9vYnNlcnZlci1zZXJ2aWNlOzEnXS5nZXRTZXJ2aWNlKENpLm5zSU9ic2VydmVyU2VydmljZSk7XG52YXIgUGFyc2VyVXRpbCA9IHJlcXVpcmUoJy4vcGFyc2VyX3V0aWwnKTtcblxuY2xhc3MgUHJvZmlsZXIge1xuICBwcml2YXRlIF9wcm9maWxlcjtcbiAgcHJpdmF0ZSBfbWFya2VyRXZlbnRzOiBhbnlbXTtcbiAgcHJpdmF0ZSBfcHJvZmlsZXJTdGFydFRpbWU6IG51bWJlcjtcblxuICBjb25zdHJ1Y3RvcigpIHsgdGhpcy5fcHJvZmlsZXIgPSBDY1snQG1vemlsbGEub3JnL3Rvb2xzL3Byb2ZpbGVyOzEnXS5nZXRTZXJ2aWNlKENpLm5zSVByb2ZpbGVyKTsgfVxuXG4gIHN0YXJ0KGVudHJpZXMsIGludGVydmFsLCBmZWF0dXJlcywgdGltZVN0YXJ0ZWQpIHtcbiAgICB0aGlzLl9wcm9maWxlci5TdGFydFByb2ZpbGVyKGVudHJpZXMsIGludGVydmFsLCBmZWF0dXJlcywgZmVhdHVyZXMubGVuZ3RoKTtcbiAgICB0aGlzLl9wcm9maWxlclN0YXJ0VGltZSA9IHRpbWVTdGFydGVkO1xuICAgIHRoaXMuX21hcmtlckV2ZW50cyA9IFtdO1xuICB9XG5cbiAgc3RvcCgpIHsgdGhpcy5fcHJvZmlsZXIuU3RvcFByb2ZpbGVyKCk7IH1cblxuICBnZXRQcm9maWxlUGVyZkV2ZW50cygpIHtcbiAgICB2YXIgcHJvZmlsZURhdGEgPSB0aGlzLl9wcm9maWxlci5nZXRQcm9maWxlRGF0YSgpO1xuICAgIHZhciBwZXJmRXZlbnRzID0gUGFyc2VyVXRpbC5jb252ZXJ0UGVyZlByb2ZpbGVUb0V2ZW50cyhwcm9maWxlRGF0YSk7XG4gICAgcGVyZkV2ZW50cyA9IHRoaXMuX21lcmdlTWFya2VyRXZlbnRzKHBlcmZFdmVudHMpO1xuICAgIHBlcmZFdmVudHMuc29ydChmdW5jdGlvbihldmVudDEsIGV2ZW50MikgeyByZXR1cm4gZXZlbnQxLnRzIC0gZXZlbnQyLnRzOyB9KTsgIC8vIFNvcnQgYnkgdHNcbiAgICByZXR1cm4gcGVyZkV2ZW50cztcbiAgfVxuXG4gIF9tZXJnZU1hcmtlckV2ZW50cyhwZXJmRXZlbnRzOiBhbnlbXSk6IGFueVtdIHtcbiAgICB0aGlzLl9tYXJrZXJFdmVudHMuZm9yRWFjaChmdW5jdGlvbihtYXJrZXJFdmVudCkgeyBwZXJmRXZlbnRzLnB1c2gobWFya2VyRXZlbnQpOyB9KTtcbiAgICByZXR1cm4gcGVyZkV2ZW50cztcbiAgfVxuXG4gIGFkZFN0YXJ0RXZlbnQobmFtZTogc3RyaW5nLCB0aW1lU3RhcnRlZDogbnVtYmVyKSB7XG4gICAgdGhpcy5fbWFya2VyRXZlbnRzLnB1c2goe3BoOiAnYicsIHRzOiB0aW1lU3RhcnRlZCAtIHRoaXMuX3Byb2ZpbGVyU3RhcnRUaW1lLCBuYW1lOiBuYW1lfSk7XG4gIH1cblxuICBhZGRFbmRFdmVudChuYW1lOiBzdHJpbmcsIHRpbWVFbmRlZDogbnVtYmVyKSB7XG4gICAgdGhpcy5fbWFya2VyRXZlbnRzLnB1c2goe3BoOiAnZScsIHRzOiB0aW1lRW5kZWQgLSB0aGlzLl9wcm9maWxlclN0YXJ0VGltZSwgbmFtZTogbmFtZX0pO1xuICB9XG59XG5cbmZ1bmN0aW9uIGZvcmNlR0MoKSB7XG4gIEN1LmZvcmNlR0MoKTtcbiAgb3Mubm90aWZ5T2JzZXJ2ZXJzKG51bGwsICdjaGlsZC1nYy1yZXF1ZXN0JywgbnVsbCk7XG59O1xuXG52YXIgbW9kID0gcmVxdWlyZSgnc2RrL3BhZ2UtbW9kJyk7XG52YXIgZGF0YSA9IHJlcXVpcmUoJ3Nkay9zZWxmJykuZGF0YTtcbnZhciBwcm9maWxlciA9IG5ldyBQcm9maWxlcigpO1xubW9kLlBhZ2VNb2Qoe1xuICBpbmNsdWRlOiBbJyonXSxcbiAgY29udGVudFNjcmlwdEZpbGU6IGRhdGEudXJsKCdpbnN0YWxsZWRfc2NyaXB0LmpzJyksXG4gIG9uQXR0YWNoOiB3b3JrZXIgPT4ge1xuICAgIHdvcmtlci5wb3J0Lm9uKCdzdGFydFByb2ZpbGVyJyxcbiAgICAgICAgICAgICAgICAgICAodGltZVN0YXJ0ZWQpID0+IHByb2ZpbGVyLnN0YXJ0KC8qID0gcHJvZmlsZXIgbWVtb3J5ICovIDMwMDAwMDAsIDAuMSxcbiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIFsnbGVhZicsICdqcycsICdzdGFja3dhbGsnLCAnZ2MnXSwgdGltZVN0YXJ0ZWQpKTtcbiAgICB3b3JrZXIucG9ydC5vbignc3RvcFByb2ZpbGVyJywgKCkgPT4gcHJvZmlsZXIuc3RvcCgpKTtcbiAgICB3b3JrZXIucG9ydC5vbignZ2V0UHJvZmlsZScsXG4gICAgICAgICAgICAgICAgICAgKCkgPT4gd29ya2VyLnBvcnQuZW1pdCgncGVyZlByb2ZpbGUnLCBwcm9maWxlci5nZXRQcm9maWxlUGVyZkV2ZW50cygpKSk7XG4gICAgd29ya2VyLnBvcnQub24oJ2ZvcmNlR0MnLCBmb3JjZUdDKTtcbiAgICB3b3JrZXIucG9ydC5vbignbWFya1N0YXJ0JywgKG5hbWUsIHRpbWVTdGFydGVkKSA9PiBwcm9maWxlci5hZGRTdGFydEV2ZW50KG5hbWUsIHRpbWVTdGFydGVkKSk7XG4gICAgd29ya2VyLnBvcnQub24oJ21hcmtFbmQnLCAobmFtZSwgdGltZUVuZGVkKSA9PiBwcm9maWxlci5hZGRFbmRFdmVudChuYW1lLCB0aW1lRW5kZWQpKTtcbiAgfVxufSk7XG4iXX0= | |
options.go | /*
Copyright The Voyager Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e
import (
"flag"
"os"
"path/filepath"
"strings"
"github.com/appscode/go/crypto/rand"
"github.com/appscode/go/flags"
"github.com/appscode/go/log"
logs "github.com/appscode/go/log/golog"
"github.com/appscode/voyager/pkg/cmds/server"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/client-go/util/homedir"
)
type E2EOptions struct {
*server.OperatorOptions
KubeContext string
KubeConfig string
TestNamespace string
Cleanup bool
TestCertificate bool
DumpLocation string
LBPersistIP string
OperatorOnly bool
SelfHostedOperator bool
}
var (
options = &E2EOptions{
OperatorOptions: server.NewOperatorOptions(),
KubeConfig: filepath.Join(homedir.HomeDir(), ".kube", "config"),
TestNamespace: rand.WithUniqSuffix("test-voyager"),
Cleanup: true,
TestCertificate: false,
DumpLocation: os.TempDir(),
OperatorOnly: false,
SelfHostedOperator: false,
}
)
func init() {
options.AddGoFlags(flag.CommandLine)
flag.StringVar(&options.KubeConfig, "kubeconfig", "", "Path to kubeconfig file with authorization information (the master location is set by the master flag).")
flag.StringVar(&options.KubeContext, "kube-context", "", "Name of kube context")
flag.StringVar(&options.TestNamespace, "namespace", "test-"+rand.Characters(5), "Run tests in this namespaces")
flag.BoolVar(&options.Cleanup, "cleanup", options.Cleanup, "")
flag.BoolVar(&options.TestCertificate, "cert", options.TestCertificate, "")
flag.StringVar(&options.DumpLocation, "dump", os.TempDir(), "")
flag.StringVar(&options.LBPersistIP, "lb-ip", options.LBPersistIP, "LoadBalancer persistent IP")
flag.BoolVar(&options.OperatorOnly, "operator-only", options.OperatorOnly, "run operator locally without running tests")
flag.BoolVar(&options.SelfHostedOperator, "selfhosted-operator", options.SelfHostedOperator, "If true, operator runs inside cluster")
enableLogging()
}
func enableLogging() |
func (c *E2EOptions) validate() {
if c.CloudProvider == "" {
log.Fatal("Provider name required, not provided")
}
if !strings.HasPrefix(c.TestNamespace, "test-") {
log.Fatal("Namespace is not a Test namespace")
}
}
| {
defer func() {
logs.InitLogs()
defer logs.FlushLogs()
}()
utilruntime.Must(flag.Set("logtostderr", "true"))
logLevelFlag := flag.Lookup("v")
if logLevelFlag != nil {
if len(logLevelFlag.Value.String()) > 0 && logLevelFlag.Value.String() != "0" {
return
}
}
flags.SetLogLevel(2)
} |
project_controller.py | from core_engine.utils.aws.rekognition_helper import (
create_project,
delete_project,
version_description,
get_all_projects,
)
from core_engine import logger
logging = logger(__name__)
class ProjectController:
def __init__(self):
pass
def create_project_controller(self, project_name: str):
"""[Create a project in AWS]
Args:
project_name (str): [Project NAme]
Raises:
error: [Error]
Returns:
[type]: [description]
"""
try:
logging.info(f"Create Project Controller: {project_name}")
return create_project(project_name)
except Exception as error:
logging.error(f"{error=}")
raise error
def delete_project_controller(self, project_arn: str):
"""[Deletes a project in AWS]
Args:
project_arn (str): [description]
Raises:
error: [Error]
Returns:
[type]: [description]
"""
try:
logging.info(f"Create Project Controller: {project_arn}")
return delete_project(project_arn=project_arn)
except Exception as error:
logging.error(f"{error=}")
raise error
def get_all_projects_controller(self):
"""[Lists all the project in AWS]
Raises:
error: [Error]
Returns:
[type]: [description]
"""
try:
logging.info(f"Create Project Controller")
return get_all_projects()
except Exception as error:
logging.error(f"{error=}")
raise error
def version_description_controller(self, project_arn: str, version_name: str):
"""[Describws a project in AWS]
Args:
project_arn (str): [Unique Identifier for your Project in AWS]
version_name (str): [Version name on AWS console]
Raises:
error: [Error] | Returns:
[type]: [description]
"""
try:
logging.info(f"Create Project Controller: {project_arn}")
return version_description(
project_arn=project_arn, version_name=version_name
)
except Exception as error:
logging.error(f"{error=}")
raise error | |
re_unicode.rs | // Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::borrow::Cow;
use std::collections::HashMap;
use std::fmt;
use std::ops::Index;
use std::str::FromStr;
use std::sync::Arc;
use memchr::memchr;
use syntax;
use error::Error;
use exec::{Exec, ExecNoSyncStr};
use expand::expand_str;
use re_builder::unicode::RegexBuilder;
use re_trait::{self, RegularExpression, SubCapturesPosIter};
/// Escapes all regular expression meta characters in `text`.
///
/// The string returned may be safely used as a literal in a regular
/// expression.
pub fn escape(text: &str) -> String {
syntax::escape(text)
}
/// Match represents a single match of a regex in a haystack.
///
/// The lifetime parameter `'t` refers to the lifetime of the matched text.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Match<'t> {
text: &'t str,
start: usize,
end: usize,
}
impl<'t> Match<'t> {
/// Returns the starting byte offset of the match in the haystack.
#[inline]
pub fn start(&self) -> usize {
self.start
}
/// Returns the ending byte offset of the match in the haystack.
#[inline]
pub fn end(&self) -> usize {
self.end
}
/// Returns the matched text.
#[inline]
pub fn as_str(&self) -> &'t str {
&self.text[self.start..self.end]
}
/// Creates a new match from the given haystack and byte offsets.
#[inline]
fn new(haystack: &'t str, start: usize, end: usize) -> Match<'t> {
Match {
text: haystack,
start: start,
end: end,
}
}
}
impl<'t> From<Match<'t>> for &'t str {
fn from(m: Match<'t>) -> &'t str {
m.as_str()
}
}
/// A compiled regular expression for matching Unicode strings.
///
/// It is represented as either a sequence of bytecode instructions (dynamic)
/// or as a specialized Rust function (native). It can be used to search, split
/// or replace text. All searching is done with an implicit `.*?` at the
/// beginning and end of an expression. To force an expression to match the
/// whole string (or a prefix or a suffix), you must use an anchor like `^` or
/// `$` (or `\A` and `\z`).
///
/// While this crate will handle Unicode strings (whether in the regular
/// expression or in the search text), all positions returned are **byte
/// indices**. Every byte index is guaranteed to be at a Unicode code point
/// boundary.
///
/// The lifetimes `'r` and `'t` in this crate correspond to the lifetime of a
/// compiled regular expression and text to search, respectively.
///
/// The only methods that allocate new strings are the string replacement
/// methods. All other methods (searching and splitting) return borrowed
/// pointers into the string given.
///
/// # Examples
///
/// Find the location of a US phone number:
///
/// ```rust
/// # use regex::Regex;
/// let re = Regex::new("[0-9]{3}-[0-9]{3}-[0-9]{4}").unwrap();
/// let mat = re.find("phone: 111-222-3333").unwrap();
/// assert_eq!((mat.start(), mat.end()), (7, 19));
/// ```
///
/// # Using the `std::str::pattern` methods with `Regex`
///
/// > **Note**: This section requires that this crate is compiled with the
/// > `pattern` Cargo feature enabled, which **requires nightly Rust**.
///
/// Since `Regex` implements `Pattern`, you can use regexes with methods
/// defined on `&str`. For example, `is_match`, `find`, `find_iter`
/// and `split` can be replaced with `str::contains`, `str::find`,
/// `str::match_indices` and `str::split`.
///
/// Here are some examples:
///
/// ```rust,ignore
/// # use regex::Regex;
/// let re = Regex::new(r"\d+").unwrap();
/// let haystack = "a111b222c";
///
/// assert!(haystack.contains(&re));
/// assert_eq!(haystack.find(&re), Some(1));
/// assert_eq!(haystack.match_indices(&re).collect::<Vec<_>>(),
/// vec![(1, 4), (5, 8)]);
/// assert_eq!(haystack.split(&re).collect::<Vec<_>>(), vec!["a", "b", "c"]);
/// ```
#[derive(Clone)]
pub struct Regex(Exec);
impl fmt::Display for Regex {
/// Shows the original regular expression.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl fmt::Debug for Regex {
/// Shows the original regular expression.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
#[doc(hidden)]
impl From<Exec> for Regex {
fn from(exec: Exec) -> Regex {
Regex(exec)
}
}
impl FromStr for Regex {
type Err = Error;
/// Attempts to parse a string into a regular expression
fn from_str(s: &str) -> Result<Regex, Error> {
Regex::new(s)
}
}
/// Core regular expression methods.
impl Regex {
/// Compiles a regular expression. Once compiled, it can be used repeatedly
/// to search, split or replace text in a string.
///
/// If an invalid expression is given, then an error is returned.
pub fn new(re: &str) -> Result<Regex, Error> {
RegexBuilder::new(re).build()
}
/// Returns true if and only if the regex matches the string given.
///
/// It is recommended to use this method if all you need to do is test
/// a match, since the underlying matching engine may be able to do less
/// work.
///
/// # Example
///
/// Test if some text contains at least one word with exactly 13
/// Unicode word characters:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let text = "I categorically deny having triskaidekaphobia.";
/// assert!(Regex::new(r"\b\w{13}\b").unwrap().is_match(text));
/// # }
/// ```
pub fn is_match(&self, text: &str) -> bool {
self.is_match_at(text, 0)
}
/// Returns the start and end byte range of the leftmost-first match in
/// `text`. If no match exists, then `None` is returned.
///
/// Note that this should only be used if you want to discover the position
/// of the match. Testing the existence of a match is faster if you use
/// `is_match`.
///
/// # Example
///
/// Find the start and end location of the first word with exactly 13
/// Unicode word characters:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let text = "I categorically deny having triskaidekaphobia.";
/// let mat = Regex::new(r"\b\w{13}\b").unwrap().find(text).unwrap();
/// assert_eq!(mat.start(), 2);
/// assert_eq!(mat.end(), 15);
/// # }
/// ```
pub fn find<'t>(&self, text: &'t str) -> Option<Match<'t>> {
self.find_at(text, 0)
}
/// Returns an iterator for each successive non-overlapping match in
/// `text`, returning the start and end byte indices with respect to
/// `text`.
///
/// # Example
///
/// Find the start and end location of every word with exactly 13 Unicode
/// word characters:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let text = "Retroactively relinquishing remunerations is reprehensible.";
/// for mat in Regex::new(r"\b\w{13}\b").unwrap().find_iter(text) {
/// println!("{:?}", mat);
/// }
/// # }
/// ```
pub fn find_iter<'r, 't>(&'r self, text: &'t str) -> Matches<'r, 't> {
Matches(self.0.searcher_str().find_iter(text))
}
/// Returns the capture groups corresponding to the leftmost-first
/// match in `text`. Capture group `0` always corresponds to the entire
/// match. If no match is found, then `None` is returned.
///
/// You should only use `captures` if you need access to the location of
/// capturing group matches. Otherwise, `find` is faster for discovering
/// the location of the overall match.
///
/// # Examples
///
/// Say you have some text with movie names and their release years,
/// like "'Citizen Kane' (1941)". It'd be nice if we could search for text
/// looking like that, while also extracting the movie name and its release
/// year separately.
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let re = Regex::new(r"'([^']+)'\s+\((\d{4})\)").unwrap();
/// let text = "Not my favorite movie: 'Citizen Kane' (1941).";
/// let caps = re.captures(text).unwrap();
/// assert_eq!(caps.get(1).unwrap().as_str(), "Citizen Kane");
/// assert_eq!(caps.get(2).unwrap().as_str(), "1941");
/// assert_eq!(caps.get(0).unwrap().as_str(), "'Citizen Kane' (1941)");
/// // You can also access the groups by index using the Index notation.
/// // Note that this will panic on an invalid index.
/// assert_eq!(&caps[1], "Citizen Kane");
/// assert_eq!(&caps[2], "1941");
/// assert_eq!(&caps[0], "'Citizen Kane' (1941)");
/// # }
/// ```
///
/// Note that the full match is at capture group `0`. Each subsequent
/// capture group is indexed by the order of its opening `(`.
///
/// We can make this example a bit clearer by using *named* capture groups:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let re = Regex::new(r"'(?P<title>[^']+)'\s+\((?P<year>\d{4})\)")
/// .unwrap();
/// let text = "Not my favorite movie: 'Citizen Kane' (1941).";
/// let caps = re.captures(text).unwrap();
/// assert_eq!(caps.name("title").unwrap().as_str(), "Citizen Kane");
/// assert_eq!(caps.name("year").unwrap().as_str(), "1941");
/// assert_eq!(caps.get(0).unwrap().as_str(), "'Citizen Kane' (1941)");
/// // You can also access the groups by name using the Index notation.
/// // Note that this will panic on an invalid group name.
/// assert_eq!(&caps["title"], "Citizen Kane");
/// assert_eq!(&caps["year"], "1941");
/// assert_eq!(&caps[0], "'Citizen Kane' (1941)");
///
/// # }
/// ```
///
/// Here we name the capture groups, which we can access with the `name`
/// method or the `Index` notation with a `&str`. Note that the named
/// capture groups are still accessible with `get` or the `Index` notation
/// with a `usize`.
///
/// The `0`th capture group is always unnamed, so it must always be
/// accessed with `get(0)` or `[0]`.
pub fn captures<'t>(&self, text: &'t str) -> Option<Captures<'t>> {
let mut locs = self.capture_locations();
self.captures_read_at(&mut locs, text, 0).map(move |_| Captures {
text: text,
locs: locs.0,
named_groups: self.0.capture_name_idx().clone(),
})
}
/// Returns an iterator over all the non-overlapping capture groups matched
/// in `text`. This is operationally the same as `find_iter`, except it
/// yields information about capturing group matches.
///
/// # Example
///
/// We can use this to find all movie titles and their release years in
/// some text, where the movie is formatted like "'Title' (xxxx)":
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let re = Regex::new(r"'(?P<title>[^']+)'\s+\((?P<year>\d{4})\)")
/// .unwrap();
/// let text = "'Citizen Kane' (1941), 'The Wizard of Oz' (1939), 'M' (1931).";
/// for caps in re.captures_iter(text) {
/// println!("Movie: {:?}, Released: {:?}",
/// &caps["title"], &caps["year"]);
/// }
/// // Output:
/// // Movie: Citizen Kane, Released: 1941
/// // Movie: The Wizard of Oz, Released: 1939
/// // Movie: M, Released: 1931
/// # }
/// ```
pub fn captures_iter<'r, 't>(
&'r self,
text: &'t str,
) -> CaptureMatches<'r, 't> {
CaptureMatches(self.0.searcher_str().captures_iter(text))
}
/// Returns an iterator of substrings of `text` delimited by a match of the
/// regular expression. Namely, each element of the iterator corresponds to
/// text that *isn't* matched by the regular expression.
///
/// This method will *not* copy the text given.
///
/// # Example
///
/// To split a string delimited by arbitrary amounts of spaces or tabs:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let re = Regex::new(r"[ \t]+").unwrap();
/// let fields: Vec<&str> = re.split("a b \t c\td e").collect();
/// assert_eq!(fields, vec!["a", "b", "c", "d", "e"]);
/// # }
/// ```
pub fn split<'r, 't>(&'r self, text: &'t str) -> Split<'r, 't> {
Split {
finder: self.find_iter(text),
last: 0,
}
}
/// Returns an iterator of at most `limit` substrings of `text` delimited
/// by a match of the regular expression. (A `limit` of `0` will return no
/// substrings.) Namely, each element of the iterator corresponds to text
/// that *isn't* matched by the regular expression. The remainder of the
/// string that is not split will be the last element in the iterator.
///
/// This method will *not* copy the text given.
///
/// # Example
///
/// Get the first two words in some text:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let re = Regex::new(r"\W+").unwrap();
/// let fields: Vec<&str> = re.splitn("Hey! How are you?", 3).collect();
/// assert_eq!(fields, vec!("Hey", "How", "are you?"));
/// # }
/// ```
pub fn splitn<'r, 't>(&'r self, text: &'t str, limit: usize)
-> SplitN<'r, 't> {
SplitN {
splits: self.split(text),
n: limit,
}
}
/// Replaces the leftmost-first match with the replacement provided.
/// The replacement can be a regular string (where `$N` and `$name` are
/// expanded to match capture groups) or a function that takes the matches'
/// `Captures` and returns the replaced string.
///
/// If no match is found, then a copy of the string is returned unchanged.
///
/// # Replacement string syntax
///
/// All instances of `$name` in the replacement text is replaced with the
/// corresponding capture group `name`.
///
/// `name` may be an integer corresponding to the index of the
/// capture group (counted by order of opening parenthesis where `0` is the
/// entire match) or it can be a name (consisting of letters, digits or
/// underscores) corresponding to a named capture group.
///
/// If `name` isn't a valid capture group (whether the name doesn't exist
/// or isn't a valid index), then it is replaced with the empty string.
///
/// The longest possible name is used. e.g., `$1a` looks up the capture
/// group named `1a` and not the capture group at index `1`. To exert more
/// precise control over the name, use braces, e.g., `${1}a`.
///
/// To write a literal `$` use `$$`.
///
/// # Examples
///
/// Note that this function is polymorphic with respect to the replacement.
/// In typical usage, this can just be a normal string:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let re = Regex::new("[^01]+").unwrap();
/// assert_eq!(re.replace("1078910", ""), "1010");
/// # }
/// ```
///
/// But anything satisfying the `Replacer` trait will work. For example,
/// a closure of type `|&Captures| -> String` provides direct access to the
/// captures corresponding to a match. This allows one to access
/// capturing group matches easily:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # use regex::Captures; fn main() {
/// let re = Regex::new(r"([^,\s]+),\s+(\S+)").unwrap();
/// let result = re.replace("Springsteen, Bruce", |caps: &Captures| {
/// format!("{} {}", &caps[2], &caps[1])
/// });
/// assert_eq!(result, "Bruce Springsteen");
/// # }
/// ```
///
/// But this is a bit cumbersome to use all the time. Instead, a simple
/// syntax is supported that expands `$name` into the corresponding capture
/// group. Here's the last example, but using this expansion technique
/// with named capture groups:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let re = Regex::new(r"(?P<last>[^,\s]+),\s+(?P<first>\S+)").unwrap();
/// let result = re.replace("Springsteen, Bruce", "$first $last");
/// assert_eq!(result, "Bruce Springsteen");
/// # }
/// ```
///
/// Note that using `$2` instead of `$first` or `$1` instead of `$last`
/// would produce the same result. To write a literal `$` use `$$`.
///
/// Sometimes the replacement string requires use of curly braces to
/// delineate a capture group replacement and surrounding literal text.
/// For example, if we wanted to join two words together with an
/// underscore:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let re = Regex::new(r"(?P<first>\w+)\s+(?P<second>\w+)").unwrap();
/// let result = re.replace("deep fried", "${first}_$second");
/// assert_eq!(result, "deep_fried");
/// # }
/// ```
///
/// Without the curly braces, the capture group name `first_` would be
/// used, and since it doesn't exist, it would be replaced with the empty
/// string.
///
/// Finally, sometimes you just want to replace a literal string with no
/// regard for capturing group expansion. This can be done by wrapping a
/// byte string with `NoExpand`:
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// use regex::NoExpand;
///
/// let re = Regex::new(r"(?P<last>[^,\s]+),\s+(\S+)").unwrap();
/// let result = re.replace("Springsteen, Bruce", NoExpand("$2 $last"));
/// assert_eq!(result, "$2 $last");
/// # }
/// ```
pub fn replace<'t, R: Replacer>(
&self,
text: &'t str,
rep: R,
) -> Cow<'t, str> {
self.replacen(text, 1, rep)
}
/// Replaces all non-overlapping matches in `text` with the replacement
/// provided. This is the same as calling `replacen` with `limit` set to
/// `0`.
///
/// See the documentation for `replace` for details on how to access
/// capturing group matches in the replacement string.
pub fn replace_all<'t, R: Replacer>(
&self,
text: &'t str,
rep: R,
) -> Cow<'t, str> {
self.replacen(text, 0, rep)
}
/// Replaces at most `limit` non-overlapping matches in `text` with the
/// replacement provided. If `limit` is 0, then all non-overlapping matches
/// are replaced.
///
/// See the documentation for `replace` for details on how to access
/// capturing group matches in the replacement string.
pub fn | <'t, R: Replacer>(
&self,
text: &'t str,
limit: usize,
mut rep: R,
) -> Cow<'t, str> {
// If we know that the replacement doesn't have any capture expansions,
// then we can fast path. The fast path can make a tremendous
// difference:
//
// 1) We use `find_iter` instead of `captures_iter`. Not asking for
// captures generally makes the regex engines faster.
// 2) We don't need to look up all of the capture groups and do
// replacements inside the replacement string. We just push it
// at each match and be done with it.
if let Some(rep) = rep.no_expansion() {
let mut it = self.find_iter(text).enumerate().peekable();
if it.peek().is_none() {
return Cow::Borrowed(text);
}
let mut new = String::with_capacity(text.len());
let mut last_match = 0;
for (i, m) in it {
if limit > 0 && i >= limit {
break
}
new.push_str(&text[last_match..m.start()]);
new.push_str(&rep);
last_match = m.end();
}
new.push_str(&text[last_match..]);
return Cow::Owned(new);
}
// The slower path, which we use if the replacement needs access to
// capture groups.
let mut it = self.captures_iter(text).enumerate().peekable();
if it.peek().is_none() {
return Cow::Borrowed(text);
}
let mut new = String::with_capacity(text.len());
let mut last_match = 0;
for (i, cap) in it {
if limit > 0 && i >= limit {
break
}
// unwrap on 0 is OK because captures only reports matches
let m = cap.get(0).unwrap();
new.push_str(&text[last_match..m.start()]);
rep.replace_append(&cap, &mut new);
last_match = m.end();
}
new.push_str(&text[last_match..]);
Cow::Owned(new)
}
}
/// Advanced or "lower level" search methods.
impl Regex {
/// Returns the end location of a match in the text given.
///
/// This method may have the same performance characteristics as
/// `is_match`, except it provides an end location for a match. In
/// particular, the location returned *may be shorter* than the proper end
/// of the leftmost-first match.
///
/// # Example
///
/// Typically, `a+` would match the entire first sequence of `a` in some
/// text, but `shortest_match` can give up as soon as it sees the first
/// `a`.
///
/// ```rust
/// # extern crate regex; use regex::Regex;
/// # fn main() {
/// let text = "aaaaa";
/// let pos = Regex::new(r"a+").unwrap().shortest_match(text);
/// assert_eq!(pos, Some(1));
/// # }
/// ```
pub fn shortest_match(&self, text: &str) -> Option<usize> {
self.shortest_match_at(text, 0)
}
/// Returns the same as shortest_match, but starts the search at the given
/// offset.
///
/// The significance of the starting point is that it takes the surrounding
/// context into consideration. For example, the `\A` anchor can only
/// match when `start == 0`.
pub fn shortest_match_at(
&self,
text: &str,
start: usize,
) -> Option<usize> {
self.0.searcher_str().shortest_match_at(text, start)
}
/// Returns the same as is_match, but starts the search at the given
/// offset.
///
/// The significance of the starting point is that it takes the surrounding
/// context into consideration. For example, the `\A` anchor can only
/// match when `start == 0`.
pub fn is_match_at(&self, text: &str, start: usize) -> bool {
self.shortest_match_at(text, start).is_some()
}
/// Returns the same as find, but starts the search at the given
/// offset.
///
/// The significance of the starting point is that it takes the surrounding
/// context into consideration. For example, the `\A` anchor can only
/// match when `start == 0`.
pub fn find_at<'t>(
&self,
text: &'t str,
start: usize,
) -> Option<Match<'t>> {
self.0.searcher_str().find_at(text, start).map(|(s, e)| {
Match::new(text, s, e)
})
}
/// This is like `captures`, but uses
/// [`CaptureLocations`](struct.CaptureLocations.html)
/// instead of
/// [`Captures`](struct.Captures.html) in order to amortize allocations.
///
/// To create a `CaptureLocations` value, use the
/// `Regex::capture_locations` method.
///
/// This returns the overall match if this was successful, which is always
/// equivalence to the `0`th capture group.
pub fn captures_read<'t>(
&self,
locs: &mut CaptureLocations,
text: &'t str,
) -> Option<Match<'t>> {
self.captures_read_at(locs, text, 0)
}
/// Returns the same as captures, but starts the search at the given
/// offset and populates the capture locations given.
///
/// The significance of the starting point is that it takes the surrounding
/// context into consideration. For example, the `\A` anchor can only
/// match when `start == 0`.
pub fn captures_read_at<'t>(
&self,
locs: &mut CaptureLocations,
text: &'t str,
start: usize,
) -> Option<Match<'t>> {
self.0
.searcher_str()
.captures_read_at(&mut locs.0, text, start)
.map(|(s, e)| Match::new(text, s, e))
}
/// An undocumented alias for `captures_read_at`.
///
/// The `regex-capi` crate previously used this routine, so to avoid
/// breaking that crate, we continue to provide the name as an undocumented
/// alias.
#[doc(hidden)]
pub fn read_captures_at<'t>(
&self,
locs: &mut CaptureLocations,
text: &'t str,
start: usize,
) -> Option<Match<'t>> {
self.captures_read_at(locs, text, start)
}
}
/// Auxiliary methods.
impl Regex {
/// Returns the original string of this regex.
pub fn as_str(&self) -> &str {
&self.0.regex_strings()[0]
}
/// Returns an iterator over the capture names.
pub fn capture_names(&self) -> CaptureNames {
CaptureNames(self.0.capture_names().iter())
}
/// Returns the number of captures.
pub fn captures_len(&self) -> usize {
self.0.capture_names().len()
}
/// Returns an empty set of capture locations that can be reused in
/// multiple calls to `captures_read` or `captures_read_at`.
pub fn capture_locations(&self) -> CaptureLocations {
CaptureLocations(self.0.searcher_str().locations())
}
/// An alias for `capture_locations` to preserve backward compatibility.
///
/// The `regex-capi` crate uses this method, so to avoid breaking that
/// crate, we continue to export it as an undocumented API.
#[doc(hidden)]
pub fn locations(&self) -> CaptureLocations {
CaptureLocations(self.0.searcher_str().locations())
}
}
/// An iterator over the names of all possible captures.
///
/// `None` indicates an unnamed capture; the first element (capture 0, the
/// whole matched region) is always unnamed.
///
/// `'r` is the lifetime of the compiled regular expression.
pub struct CaptureNames<'r>(::std::slice::Iter<'r, Option<String>>);
impl<'r> Iterator for CaptureNames<'r> {
type Item = Option<&'r str>;
fn next(&mut self) -> Option<Option<&'r str>> {
self.0
.next()
.as_ref()
.map(|slot| slot.as_ref().map(|name| name.as_ref()))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
/// Yields all substrings delimited by a regular expression match.
///
/// `'r` is the lifetime of the compiled regular expression and `'t` is the
/// lifetime of the string being split.
pub struct Split<'r, 't> {
finder: Matches<'r, 't>,
last: usize,
}
impl<'r, 't> Iterator for Split<'r, 't> {
type Item = &'t str;
fn next(&mut self) -> Option<&'t str> {
let text = self.finder.0.text();
match self.finder.next() {
None => {
if self.last >= text.len() {
None
} else {
let s = &text[self.last..];
self.last = text.len();
Some(s)
}
}
Some(m) => {
let matched = &text[self.last..m.start()];
self.last = m.end();
Some(matched)
}
}
}
}
/// Yields at most `N` substrings delimited by a regular expression match.
///
/// The last substring will be whatever remains after splitting.
///
/// `'r` is the lifetime of the compiled regular expression and `'t` is the
/// lifetime of the string being split.
pub struct SplitN<'r, 't> {
splits: Split<'r, 't>,
n: usize,
}
impl<'r, 't> Iterator for SplitN<'r, 't> {
type Item = &'t str;
fn next(&mut self) -> Option<&'t str> {
if self.n == 0 {
return None
}
self.n -= 1;
if self.n == 0 {
let text = self.splits.finder.0.text();
Some(&text[self.splits.last..])
} else {
self.splits.next()
}
}
}
/// CaptureLocations is a low level representation of the raw offsets of each
/// submatch.
///
/// You can think of this as a lower level
/// [`Captures`](struct.Captures.html), where this type does not support
/// named capturing groups directly and it does not borrow the text that these
/// offsets were matched on.
///
/// Primarily, this type is useful when using the lower level `Regex` APIs
/// such as `read_captures`, which permits amortizing the allocation in which
/// capture match locations are stored.
///
/// In order to build a value of this type, you'll need to call the
/// `capture_locations` method on the `Regex` being used to execute the search.
/// The value returned can then be reused in subsequent searches.
#[derive(Clone, Debug)]
pub struct CaptureLocations(re_trait::Locations);
/// A type alias for `CaptureLocations` for backwards compatibility.
///
/// Previously, we exported `CaptureLocations` as `Locations` in an
/// undocumented API. To prevent breaking that code (e.g., in `regex-capi`),
/// we continue re-exporting the same undocumented API.
#[doc(hidden)]
pub type Locations = CaptureLocations;
impl CaptureLocations {
/// Returns the start and end positions of the Nth capture group. Returns
/// `None` if `i` is not a valid capture group or if the capture group did
/// not match anything. The positions returned are *always* byte indices
/// with respect to the original string matched.
#[inline]
pub fn get(&self, i: usize) -> Option<(usize, usize)> {
self.0.pos(i)
}
/// Returns the total number of capturing groups.
///
/// This is always at least `1` since every regex has at least `1`
/// capturing group that corresponds to the entire match.
#[inline]
pub fn len(&self) -> usize {
self.0.len()
}
/// An alias for the `get` method for backwards compatibility.
///
/// Previously, we exported `get` as `pos` in an undocumented API. To
/// prevent breaking that code (e.g., in `regex-capi`), we continue
/// re-exporting the same undocumented API.
#[doc(hidden)]
#[inline]
pub fn pos(&self, i: usize) -> Option<(usize, usize)> {
self.get(i)
}
}
/// Captures represents a group of captured strings for a single match.
///
/// The 0th capture always corresponds to the entire match. Each subsequent
/// index corresponds to the next capture group in the regex. If a capture
/// group is named, then the matched string is *also* available via the `name`
/// method. (Note that the 0th capture is always unnamed and so must be
/// accessed with the `get` method.)
///
/// Positions returned from a capture group are always byte indices.
///
/// `'t` is the lifetime of the matched text.
pub struct Captures<'t> {
text: &'t str,
locs: re_trait::Locations,
named_groups: Arc<HashMap<String, usize>>,
}
impl<'t> Captures<'t> {
/// Returns the match associated with the capture group at index `i`. If
/// `i` does not correspond to a capture group, or if the capture group
/// did not participate in the match, then `None` is returned.
///
/// # Examples
///
/// Get the text of the match with a default of an empty string if this
/// group didn't participate in the match:
///
/// ```rust
/// # use regex::Regex;
/// let re = Regex::new(r"[a-z]+(?:([0-9]+)|([A-Z]+))").unwrap();
/// let caps = re.captures("abc123").unwrap();
///
/// let text1 = caps.get(1).map_or("", |m| m.as_str());
/// let text2 = caps.get(2).map_or("", |m| m.as_str());
/// assert_eq!(text1, "123");
/// assert_eq!(text2, "");
/// ```
pub fn get(&self, i: usize) -> Option<Match<'t>> {
self.locs.pos(i).map(|(s, e)| Match::new(self.text, s, e))
}
/// Returns the match for the capture group named `name`. If `name` isn't a
/// valid capture group or didn't match anything, then `None` is returned.
pub fn name(&self, name: &str) -> Option<Match<'t>> {
self.named_groups.get(name).and_then(|&i| self.get(i))
}
/// An iterator that yields all capturing matches in the order in which
/// they appear in the regex. If a particular capture group didn't
/// participate in the match, then `None` is yielded for that capture.
///
/// The first match always corresponds to the overall match of the regex.
pub fn iter<'c>(&'c self) -> SubCaptureMatches<'c, 't> {
SubCaptureMatches {
caps: self,
it: self.locs.iter(),
}
}
/// Expands all instances of `$name` in `replacement` to the corresponding
/// capture group `name`, and writes them to the `dst` buffer given.
///
/// `name` may be an integer corresponding to the index of the
/// capture group (counted by order of opening parenthesis where `0` is the
/// entire match) or it can be a name (consisting of letters, digits or
/// underscores) corresponding to a named capture group.
///
/// If `name` isn't a valid capture group (whether the name doesn't exist
/// or isn't a valid index), then it is replaced with the empty string.
///
/// The longest possible name is used. e.g., `$1a` looks up the capture
/// group named `1a` and not the capture group at index `1`. To exert more
/// precise control over the name, use braces, e.g., `${1}a`.
///
/// To write a literal `$` use `$$`.
pub fn expand(&self, replacement: &str, dst: &mut String) {
expand_str(self, replacement, dst)
}
/// Returns the number of captured groups.
///
/// This is always at least `1`, since every regex has at least one capture
/// group that corresponds to the full match.
#[inline]
pub fn len(&self) -> usize {
self.locs.len()
}
}
impl<'t> fmt::Debug for Captures<'t> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Captures").field(&CapturesDebug(self)).finish()
}
}
struct CapturesDebug<'c, 't: 'c>(&'c Captures<'t>);
impl<'c, 't> fmt::Debug for CapturesDebug<'c, 't> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// We'd like to show something nice here, even if it means an
// allocation to build a reverse index.
let slot_to_name: HashMap<&usize, &String> =
self.0.named_groups.iter().map(|(a, b)| (b, a)).collect();
let mut map = f.debug_map();
for (slot, m) in self.0.locs.iter().enumerate() {
let m = m.map(|(s, e)| &self.0.text[s..e]);
if let Some(name) = slot_to_name.get(&slot) {
map.entry(&name, &m);
} else {
map.entry(&slot, &m);
}
}
map.finish()
}
}
/// Get a group by index.
///
/// `'t` is the lifetime of the matched text.
///
/// The text can't outlive the `Captures` object if this method is
/// used, because of how `Index` is defined (normally `a[i]` is part
/// of `a` and can't outlive it); to do that, use `get()` instead.
///
/// # Panics
///
/// If there is no group at the given index.
impl<'t> Index<usize> for Captures<'t> {
type Output = str;
fn index(&self, i: usize) -> &str {
self.get(i).map(|m| m.as_str())
.unwrap_or_else(|| panic!("no group at index '{}'", i))
}
}
/// Get a group by name.
///
/// `'t` is the lifetime of the matched text and `'i` is the lifetime
/// of the group name (the index).
///
/// The text can't outlive the `Captures` object if this method is
/// used, because of how `Index` is defined (normally `a[i]` is part
/// of `a` and can't outlive it); to do that, use `name` instead.
///
/// # Panics
///
/// If there is no group named by the given value.
impl<'t, 'i> Index<&'i str> for Captures<'t> {
type Output = str;
fn index<'a>(&'a self, name: &'i str) -> &'a str {
self.name(name).map(|m| m.as_str())
.unwrap_or_else(|| panic!("no group named '{}'", name))
}
}
/// An iterator that yields all capturing matches in the order in which they
/// appear in the regex.
///
/// If a particular capture group didn't participate in the match, then `None`
/// is yielded for that capture. The first match always corresponds to the
/// overall match of the regex.
///
/// The lifetime `'c` corresponds to the lifetime of the `Captures` value, and
/// the lifetime `'t` corresponds to the originally matched text.
pub struct SubCaptureMatches<'c, 't: 'c> {
caps: &'c Captures<'t>,
it: SubCapturesPosIter<'c>,
}
impl<'c, 't> Iterator for SubCaptureMatches<'c, 't> {
type Item = Option<Match<'t>>;
fn next(&mut self) -> Option<Option<Match<'t>>> {
self.it.next()
.map(|cap| cap.map(|(s, e)| Match::new(self.caps.text, s, e)))
}
}
/// An iterator that yields all non-overlapping capture groups matching a
/// particular regular expression.
///
/// The iterator stops when no more matches can be found.
///
/// `'r` is the lifetime of the compiled regular expression and `'t` is the
/// lifetime of the matched string.
pub struct CaptureMatches<'r, 't>(re_trait::CaptureMatches<'t, ExecNoSyncStr<'r>>);
impl<'r, 't> Iterator for CaptureMatches<'r, 't> {
type Item = Captures<'t>;
fn next(&mut self) -> Option<Captures<'t>> {
self.0.next().map(|locs| Captures {
text: self.0.text(),
locs: locs,
named_groups: self.0.regex().capture_name_idx().clone(),
})
}
}
/// An iterator over all non-overlapping matches for a particular string.
///
/// The iterator yields a `Match` value. The iterator stops when no more
/// matches can be found.
///
/// `'r` is the lifetime of the compiled regular expression and `'t` is the
/// lifetime of the matched string.
pub struct Matches<'r, 't>(re_trait::Matches<'t, ExecNoSyncStr<'r>>);
impl<'r, 't> Iterator for Matches<'r, 't> {
type Item = Match<'t>;
fn next(&mut self) -> Option<Match<'t>> {
let text = self.0.text();
self.0.next().map(|(s, e)| Match::new(text, s, e))
}
}
/// Replacer describes types that can be used to replace matches in a string.
///
/// In general, users of this crate shouldn't need to implement this trait,
/// since implementations are already provided for `&str` and
/// `FnMut(&Captures) -> String` (or any `FnMut(&Captures) -> T`
/// where `T: AsRef<str>`), which covers most use cases.
pub trait Replacer {
/// Appends text to `dst` to replace the current match.
///
/// The current match is represented by `caps`, which is guaranteed to
/// have a match at capture group `0`.
///
/// For example, a no-op replacement would be
/// `dst.extend(caps.get(0).unwrap().as_str())`.
fn replace_append(&mut self, caps: &Captures, dst: &mut String);
/// Return a fixed unchanging replacement string.
///
/// When doing replacements, if access to `Captures` is not needed (e.g.,
/// the replacement byte string does not need `$` expansion), then it can
/// be beneficial to avoid finding sub-captures.
///
/// In general, this is called once for every call to `replacen`.
fn no_expansion<'r>(&'r mut self) -> Option<Cow<'r, str>> {
None
}
/// Return a `Replacer` that borrows and wraps this `Replacer`.
///
/// This is useful when you want to take a generic `Replacer` (which might
/// not be cloneable) and use it without consuming it, so it can be used
/// more than once.
///
/// # Example
///
/// ```
/// use regex::{Regex, Replacer};
///
/// fn replace_all_twice<R: Replacer>(
/// re: Regex,
/// src: &str,
/// mut rep: R,
/// ) -> String {
/// let dst = re.replace_all(src, rep.by_ref());
/// let dst = re.replace_all(&dst, rep.by_ref());
/// dst.into_owned()
/// }
/// ```
fn by_ref<'r>(&'r mut self) -> ReplacerRef<'r, Self> {
ReplacerRef(self)
}
}
/// By-reference adaptor for a `Replacer`
///
/// Returned by [`Replacer::by_ref`](trait.Replacer.html#method.by_ref).
#[derive(Debug)]
pub struct ReplacerRef<'a, R: ?Sized + 'a>(&'a mut R);
impl<'a, R: Replacer + ?Sized + 'a> Replacer for ReplacerRef<'a, R> {
fn replace_append(&mut self, caps: &Captures, dst: &mut String) {
self.0.replace_append(caps, dst)
}
fn no_expansion(&mut self) -> Option<Cow<str>> {
self.0.no_expansion()
}
}
impl<'a> Replacer for &'a str {
fn replace_append(&mut self, caps: &Captures, dst: &mut String) {
caps.expand(*self, dst);
}
fn no_expansion(&mut self) -> Option<Cow<str>> {
match memchr(b'$', self.as_bytes()) {
Some(_) => None,
None => Some(Cow::Borrowed(*self)),
}
}
}
impl<F, T> Replacer for F where F: FnMut(&Captures) -> T, T: AsRef<str> {
fn replace_append(&mut self, caps: &Captures, dst: &mut String) {
dst.push_str((*self)(caps).as_ref());
}
}
/// `NoExpand` indicates literal string replacement.
///
/// It can be used with `replace` and `replace_all` to do a literal string
/// replacement without expanding `$name` to their corresponding capture
/// groups. This can be both convenient (to avoid escaping `$`, for example)
/// and performant (since capture groups don't need to be found).
///
/// `'t` is the lifetime of the literal text.
pub struct NoExpand<'t>(pub &'t str);
impl<'t> Replacer for NoExpand<'t> {
fn replace_append(&mut self, _: &Captures, dst: &mut String) {
dst.push_str(self.0);
}
fn no_expansion(&mut self) -> Option<Cow<str>> {
Some(Cow::Borrowed(self.0))
}
}
| replacen |
virtual_connection.rs | use std::fmt;
use std::net::SocketAddr;
use std::time::{Duration, Instant};
use crate::{
config::Config,
error::{ErrorKind, PacketErrorKind, Result},
infrastructure::{
arranging::{Arranging, ArrangingSystem, OrderingSystem, SequencingSystem},
AcknowledgmentHandler, CongestionHandler, Fragmentation, SentPacket,
},
net::constants::{
ACKED_PACKET_HEADER, DEFAULT_ORDERING_STREAM, DEFAULT_SEQUENCING_STREAM,
STANDARD_HEADER_SIZE,
},
packet::{
DeliveryGuarantee, IncomingPackets, OrderingGuarantee, OutgoingPacketBuilder,
OutgoingPackets, Packet, PacketInfo, PacketReader, PacketType, SequenceNumber,
},
};
/// Contains the information about a certain 'virtual connection' over udp.
/// This connections also keeps track of network quality, processing packets, buffering data related to connection etc.
pub struct VirtualConnection {
/// Last time we received a packet from this client
pub last_heard: Instant,
/// Last time we sent a packet to this client
pub last_sent: Instant,
/// The address of the remote endpoint
pub remote_address: SocketAddr,
ever_sent: bool,
ever_recv: bool,
ordering_system: OrderingSystem<(Box<[u8]>, PacketType)>,
sequencing_system: SequencingSystem<Box<[u8]>>,
acknowledge_handler: AcknowledgmentHandler,
congestion_handler: CongestionHandler,
config: Config,
fragmentation: Fragmentation,
}
impl VirtualConnection {
/// Creates and returns a new Connection that wraps the provided socket address
pub fn new(addr: SocketAddr, config: &Config, time: Instant) -> VirtualConnection {
VirtualConnection {
last_heard: time,
last_sent: time,
remote_address: addr,
ever_sent: false,
ever_recv: false,
ordering_system: OrderingSystem::new(),
sequencing_system: SequencingSystem::new(),
acknowledge_handler: AcknowledgmentHandler::new(),
congestion_handler: CongestionHandler::new(config),
fragmentation: Fragmentation::new(config),
config: config.to_owned(),
}
}
/// Records that this connection has sent a packet. Returns whether the connection has
/// become acknowledged because of this send.
pub fn record_send(&mut self) -> bool {
let was_est = self.is_established();
self.ever_sent = true;
!was_est && self.is_established()
}
/// Records that this connection has sent a packet. Returns whether the connection has
/// become acknowledged because of this send.
pub fn record_recv(&mut self) -> bool {
let was_est = self.is_established();
self.ever_recv = true;
!was_est && self.is_established()
}
pub fn is_established(&self) -> bool {
self.ever_sent && self.ever_recv
}
pub fn packets_in_flight(&self) -> u16 {
self.acknowledge_handler.packets_in_flight()
}
/// Returns a [Duration] representing the interval since we last heard from the client
pub fn last_heard(&self, time: Instant) -> Duration {
// TODO: Replace with `saturating_duration_since` once it becomes stable.
// this function panics if the user supplies a time instant earlier than last_heard
time.duration_since(self.last_heard)
}
/// Returns a [Duration] representing the interval since we last sent to the client
pub fn last_sent(&self, time: Instant) -> Duration {
// TODO: Replace with `saturating_duration_since` once it becomes stable.
// this function panics if the user supplies a time instant earlier than last_heard
time.duration_since(self.last_sent)
}
/// Pre-processes the given buffer to be sent over the network.
pub fn process_outgoing<'a>(
&mut self,
packet: PacketInfo<'a>,
last_item_identifier: Option<SequenceNumber>,
time: Instant,
) -> Result<OutgoingPackets<'a>> {
self.last_sent = time;
match packet.delivery {
DeliveryGuarantee::Unreliable => {
if packet.payload.len() <= self.config.receive_buffer_max_size {
if packet.packet_type == PacketType::Heartbeat {
self.congestion_handler
.process_outgoing(self.acknowledge_handler.local_sequence_num(), time);
}
let mut builder = OutgoingPacketBuilder::new(packet.payload)
.with_default_header(packet.packet_type, packet.delivery, packet.ordering);
if let OrderingGuarantee::Sequenced(stream_id) = packet.ordering {
let item_identifier = self
.sequencing_system
.get_or_create_stream(stream_id.unwrap_or(DEFAULT_SEQUENCING_STREAM))
.new_item_identifier();
builder = builder.with_sequencing_header(item_identifier as u16, stream_id);
};
Ok(OutgoingPackets::one(builder.build()))
} else {
Err(PacketErrorKind::ExceededMaxPacketSize.into())
}
}
DeliveryGuarantee::Reliable => {
let payload_length = packet.payload.len() as u16;
let mut item_identifier_value = None;
let outgoing = {
// split the packet if the payload length is greater than the allowed fragment size.
if payload_length <= self.config.fragment_size {
let mut builder = OutgoingPacketBuilder::new(packet.payload)
.with_default_header(
packet.packet_type,
packet.delivery,
packet.ordering,
);
builder = builder.with_acknowledgment_header(
self.acknowledge_handler.local_sequence_num(),
self.acknowledge_handler.remote_sequence_num(),
self.acknowledge_handler.ack_bitfield(),
);
if let OrderingGuarantee::Ordered(stream_id) = packet.ordering {
let item_identifier =
if let Some(item_identifier) = last_item_identifier {
item_identifier
} else {
self.ordering_system
.get_or_create_stream(
stream_id.unwrap_or(DEFAULT_ORDERING_STREAM),
)
.new_item_identifier()
};
item_identifier_value = Some(item_identifier);
builder = builder.with_ordering_header(item_identifier, stream_id);
};
if let OrderingGuarantee::Sequenced(stream_id) = packet.ordering {
let item_identifier =
if let Some(item_identifier) = last_item_identifier {
item_identifier
} else {
self.sequencing_system
.get_or_create_stream(
stream_id.unwrap_or(DEFAULT_SEQUENCING_STREAM),
)
.new_item_identifier()
};
item_identifier_value = Some(item_identifier);
builder = builder.with_sequencing_header(item_identifier, stream_id);
};
OutgoingPackets::one(builder.build())
} else {
if packet.packet_type != PacketType::Packet {
return Err(PacketErrorKind::PacketCannotBeFragmented.into());
}
OutgoingPackets::many(
Fragmentation::spit_into_fragments(packet.payload, &self.config)?
.into_iter()
.enumerate()
.map(|(fragment_id, fragment)| {
let fragments_needed = Fragmentation::fragments_needed(
payload_length,
self.config.fragment_size,
)
as u8;
let mut builder = OutgoingPacketBuilder::new(fragment)
.with_default_header(
PacketType::Fragment, // change from Packet to Fragment type, it only matters when assembling/dissasembling packet header.
packet.delivery,
packet.ordering,
);
builder = builder.with_fragment_header(
self.acknowledge_handler.local_sequence_num(),
fragment_id as u8,
fragments_needed,
);
if fragment_id == 0 {
builder = builder.with_acknowledgment_header(
self.acknowledge_handler.local_sequence_num(),
self.acknowledge_handler.remote_sequence_num(),
self.acknowledge_handler.ack_bitfield(),
);
}
builder.build()
})
.collect(),
)
}
};
self.congestion_handler
.process_outgoing(self.acknowledge_handler.local_sequence_num(), time);
self.acknowledge_handler.process_outgoing(
packet.packet_type,
packet.payload,
packet.ordering,
item_identifier_value,
);
Ok(outgoing)
}
}
}
/// Processes the incoming data and returns a packet once the data is complete.
pub fn process_incoming(
&mut self,
received_data: &[u8],
time: Instant,
) -> Result<IncomingPackets> {
self.last_heard = time;
let mut packet_reader = PacketReader::new(received_data);
let header = packet_reader.read_standard_header()?;
if !header.is_current_protocol() {
return Err(ErrorKind::ProtocolVersionMismatch);
}
if header.is_heartbeat() {
// heartbeat packets are unreliable, unordered and empty packets.
// we already updated our `self.last_heard` time, nothing else to be done.
return Ok(IncomingPackets::zero());
}
match header.delivery_guarantee() {
DeliveryGuarantee::Unreliable => {
if let OrderingGuarantee::Sequenced(_id) = header.ordering_guarantee() {
let arranging_header =
packet_reader.read_arranging_header(u16::from(STANDARD_HEADER_SIZE))?;
let payload = packet_reader.read_payload();
let stream = self
.sequencing_system
.get_or_create_stream(arranging_header.stream_id());
if let Some(packet) = stream.arrange(arranging_header.arranging_id(), payload) {
return Ok(IncomingPackets::one(
Packet::new(
self.remote_address,
packet,
header.delivery_guarantee(),
OrderingGuarantee::Sequenced(Some(arranging_header.stream_id())),
),
header.packet_type(),
));
}
return Ok(IncomingPackets::zero());
}
return Ok(IncomingPackets::one(
Packet::new(
self.remote_address,
packet_reader.read_payload(),
header.delivery_guarantee(),
header.ordering_guarantee(),
),
header.packet_type(),
));
}
DeliveryGuarantee::Reliable => {
if header.is_fragment() {
if let Ok((fragment_header, acked_header)) = packet_reader.read_fragment() {
let payload = packet_reader.read_payload();
match self.fragmentation.handle_fragment(
fragment_header,
&payload,
acked_header,
) {
Ok(Some((payload, acked_header))) => {
self.congestion_handler
.process_incoming(acked_header.sequence());
self.acknowledge_handler.process_incoming(
acked_header.sequence(),
acked_header.ack_seq(),
acked_header.ack_field(),
);
return Ok(IncomingPackets::one(
Packet::new(
self.remote_address,
payload.into_boxed_slice(),
header.delivery_guarantee(),
header.ordering_guarantee(),
),
PacketType::Packet, // change from Fragment to Packet type, it only matters when assembling/dissasembling packet header.
));
}
Ok(None) => return Ok(IncomingPackets::zero()),
Err(e) => return Err(e),
};
}
} else {
let acked_header = packet_reader.read_acknowledge_header()?;
self.congestion_handler
.process_incoming(acked_header.sequence());
self.acknowledge_handler.process_incoming(
acked_header.sequence(),
acked_header.ack_seq(),
acked_header.ack_field(),
);
if let OrderingGuarantee::Sequenced(_) = header.ordering_guarantee() {
let arranging_header = packet_reader.read_arranging_header(u16::from(
STANDARD_HEADER_SIZE + ACKED_PACKET_HEADER,
))?;
let payload = packet_reader.read_payload();
let stream = self
.sequencing_system
.get_or_create_stream(arranging_header.stream_id());
if let Some(packet) =
stream.arrange(arranging_header.arranging_id(), payload)
{
return Ok(IncomingPackets::one(
Packet::new(
self.remote_address,
packet,
header.delivery_guarantee(),
OrderingGuarantee::Sequenced(Some(
arranging_header.stream_id(),
)),
),
header.packet_type(),
));
}
} else if let OrderingGuarantee::Ordered(_id) = header.ordering_guarantee() {
let arranging_header = packet_reader.read_arranging_header(u16::from(
STANDARD_HEADER_SIZE + ACKED_PACKET_HEADER,
))?;
let payload = packet_reader.read_payload();
let stream = self
.ordering_system
.get_or_create_stream(arranging_header.stream_id());
let address = self.remote_address;
return Ok(IncomingPackets::many(
stream
.arrange(
arranging_header.arranging_id(),
(payload, header.packet_type()),
)
.into_iter()
.chain(stream.iter_mut())
.map(|(packet, packet_type)| {
(
Packet::new(
address,
packet,
header.delivery_guarantee(),
OrderingGuarantee::Ordered(Some(
arranging_header.stream_id(),
)),
),
packet_type,
)
})
.collect(),
));
} else {
let payload = packet_reader.read_payload();
return Ok(IncomingPackets::one(
Packet::new(
self.remote_address,
payload,
header.delivery_guarantee(),
header.ordering_guarantee(),
),
header.packet_type(),
));
}
}
}
}
Ok(IncomingPackets::zero())
}
/// Gathers dropped packets from the acknowledgment handler.
///
/// Note that after requesting dropped packets the dropped packets will be removed from this client.
pub fn gather_dropped_packets(&mut self) -> Vec<SentPacket> {
self.acknowledge_handler.dropped_packets()
}
}
impl fmt::Debug for VirtualConnection {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"{}:{}",
self.remote_address.ip(),
self.remote_address.port()
)
}
}
#[cfg(test)]
mod tests {
use std::io::Write;
use std::time::{Duration, Instant};
use byteorder::{BigEndian, WriteBytesExt};
use crate::config::Config;
use crate::net::constants;
use crate::packet::header::{AckedPacketHeader, ArrangingHeader, HeaderWriter, StandardHeader};
use crate::packet::{DeliveryGuarantee, OrderingGuarantee, Packet, PacketInfo, PacketType};
use crate::protocol_version::ProtocolVersion;
use super::VirtualConnection;
const PAYLOAD: [u8; 4] = [1, 2, 3, 4];
#[test]
fn set_last_sent_and_last_heard_when_processing() {
let mut connection = create_virtual_connection();
let curr_sent = connection.last_sent;
let curr_heard = connection.last_heard;
let out_packet = connection
.process_outgoing(
PacketInfo::heartbeat_packet(&[]),
None,
curr_sent + Duration::from_secs(1),
)
.unwrap()
.into_iter()
.next()
.unwrap();
let in_packet = connection
.process_incoming(&out_packet.contents(), curr_heard + Duration::from_secs(2))
.unwrap()
.into_iter()
.next();
assert_eq!(
connection.last_sent.duration_since(curr_sent),
Duration::from_secs(1)
);
assert_eq!(
connection.last_heard.duration_since(curr_heard),
Duration::from_secs(2)
);
assert_eq!(in_packet.is_none(), true);
}
#[test]
fn assure_right_fragmentation() {
let mut protocol_version = Vec::new();
protocol_version
.write_u16::<BigEndian>(ProtocolVersion::get_crc16())
.unwrap();
let standard_header = [protocol_version, vec![1, 1, 2]].concat();
let acked_header = vec![0, 0, 0, 4, 0, 0, 255, 255, 0, 0, 0, 0];
let first_fragment = vec![0, 0, 1, 4];
let second_fragment = vec![0, 0, 2, 4];
let third_fragment = vec![0, 0, 3, 4];
let mut connection = create_virtual_connection();
let packet = connection
.process_incoming(
[standard_header.as_slice(), acked_header.as_slice()]
.concat()
.as_slice(),
Instant::now(),
)
.unwrap()
.into_iter()
.next();
assert!(packet.is_none());
let packet = connection
.process_incoming(
[
standard_header.as_slice(),
first_fragment.as_slice(),
&PAYLOAD,
]
.concat()
.as_slice(),
Instant::now(),
)
.unwrap()
.into_iter()
.next();
assert!(packet.is_none());
let packet = connection
.process_incoming(
[
standard_header.as_slice(),
second_fragment.as_slice(),
&PAYLOAD,
]
.concat()
.as_slice(),
Instant::now(),
)
.unwrap()
.into_iter()
.next();
assert!(packet.is_none());
let (packets, _) = connection
.process_incoming(
[
standard_header.as_slice(),
third_fragment.as_slice(),
&PAYLOAD,
]
.concat()
.as_slice(),
Instant::now(),
)
.unwrap()
.into_iter()
.next()
.unwrap();
assert_eq!(
packets.payload(),
&*[PAYLOAD, PAYLOAD, PAYLOAD].concat().into_boxed_slice()
);
}
#[test]
fn expect_fragmentation() {
let mut connection = create_virtual_connection();
let buffer = vec![1; 4000];
let packets: Vec<_> = connection
.process_outgoing(
PacketInfo::user_packet(
&buffer,
DeliveryGuarantee::Reliable,
OrderingGuarantee::Ordered(None),
),
None,
Instant::now(),
)
.unwrap()
.into_iter()
.collect();
assert_eq!(packets.len(), 4);
}
#[test]
fn assure_correct_outgoing_processing() {
let mut connection = create_virtual_connection();
let buffer = vec![1; 1000];
connection
.process_outgoing(
PacketInfo::user_packet(
&buffer,
DeliveryGuarantee::Unreliable,
OrderingGuarantee::None,
),
None,
Instant::now(),
)
.unwrap();
connection
.process_outgoing(
PacketInfo::user_packet(
&buffer,
DeliveryGuarantee::Unreliable,
OrderingGuarantee::Sequenced(None),
),
None,
Instant::now(),
)
.unwrap();
connection
.process_outgoing(
PacketInfo::user_packet(
&buffer,
DeliveryGuarantee::Reliable,
OrderingGuarantee::Ordered(None),
),
None,
Instant::now(),
)
.unwrap();
connection
.process_outgoing(
PacketInfo::user_packet(
&buffer,
DeliveryGuarantee::Reliable,
OrderingGuarantee::Sequenced(None),
),
None,
Instant::now(),
)
.unwrap();
}
#[test]
fn assure_right_sequencing() {
let mut connection = create_virtual_connection();
assert_incoming_with_order(
DeliveryGuarantee::Unreliable,
OrderingGuarantee::Sequenced(Some(1)),
&mut connection,
Some(Packet::unreliable_sequenced(
get_fake_addr(),
PAYLOAD.to_vec(),
Some(1),
)),
1,
);
assert_incoming_with_order(
DeliveryGuarantee::Unreliable,
OrderingGuarantee::Sequenced(Some(1)),
&mut connection,
Some(Packet::unreliable_sequenced(
get_fake_addr(),
PAYLOAD.to_vec(),
Some(1),
)),
3,
);
assert_incoming_with_order(
DeliveryGuarantee::Unreliable,
OrderingGuarantee::Sequenced(Some(1)),
&mut connection,
None,
2,
);
assert_incoming_with_order(
DeliveryGuarantee::Unreliable,
OrderingGuarantee::Sequenced(Some(1)),
&mut connection,
Some(Packet::unreliable_sequenced(
get_fake_addr(),
PAYLOAD.to_vec(),
Some(1),
)),
4,
);
assert_incoming_with_order(
DeliveryGuarantee::Reliable,
OrderingGuarantee::Sequenced(Some(1)),
&mut connection,
Some(Packet::reliable_sequenced(
get_fake_addr(),
PAYLOAD.to_vec(),
Some(1),
)),
5,
);
}
#[test]
fn assure_right_ordering() {
let mut connection = create_virtual_connection();
assert_incoming_with_order(
DeliveryGuarantee::Reliable,
OrderingGuarantee::Ordered(Some(1)),
&mut connection,
Some(Packet::reliable_ordered(
get_fake_addr(),
PAYLOAD.to_vec(),
Some(1),
)),
0,
);
assert_incoming_with_order(
DeliveryGuarantee::Reliable,
OrderingGuarantee::Ordered(Some(1)),
&mut connection,
None,
2,
);
assert_incoming_with_order(
DeliveryGuarantee::Reliable,
OrderingGuarantee::Ordered(Some(1)),
&mut connection,
None,
3,
);
assert_incoming_with_order(
DeliveryGuarantee::Reliable,
OrderingGuarantee::Ordered(Some(1)),
&mut connection,
Some(Packet::reliable_ordered(
get_fake_addr(),
PAYLOAD.to_vec(),
Some(1),
)),
1,
);
}
#[test]
fn assure_correct_processing_of_incoming() {
let mut connection = create_virtual_connection();
assert_incoming_without_order(
DeliveryGuarantee::Unreliable,
&mut connection,
Packet::unreliable(get_fake_addr(), PAYLOAD.to_vec()),
);
assert_incoming_without_order(
DeliveryGuarantee::Reliable,
&mut connection,
Packet::reliable_unordered(get_fake_addr(), PAYLOAD.to_vec()),
);
assert_incoming_with_order(
DeliveryGuarantee::Unreliable,
OrderingGuarantee::Sequenced(Some(1)),
&mut connection,
Some(Packet::unreliable_sequenced(
get_fake_addr(),
PAYLOAD.to_vec(),
Some(1),
)),
1,
);
assert_incoming_with_order(
DeliveryGuarantee::Reliable,
OrderingGuarantee::Ordered(Some(1)),
&mut connection,
Some(Packet::reliable_ordered(
get_fake_addr(),
PAYLOAD.to_vec(),
Some(1),
)),
0,
);
}
#[test]
fn assure_right_header_size() {
assert_right_header_size(
DeliveryGuarantee::Unreliable,
OrderingGuarantee::None,
(constants::STANDARD_HEADER_SIZE) as usize,
);
assert_right_header_size(
DeliveryGuarantee::Unreliable,
OrderingGuarantee::Sequenced(None),
(constants::STANDARD_HEADER_SIZE + constants::ARRANGING_PACKET_HEADER) as usize,
);
assert_right_header_size(
DeliveryGuarantee::Reliable,
OrderingGuarantee::None,
(constants::STANDARD_HEADER_SIZE + constants::ACKED_PACKET_HEADER) as usize,
);
assert_right_header_size(
DeliveryGuarantee::Reliable,
OrderingGuarantee::Ordered(None),
(constants::STANDARD_HEADER_SIZE
+ constants::ACKED_PACKET_HEADER
+ constants::ARRANGING_PACKET_HEADER) as usize,
);
}
#[test]
fn ensure_input_header_data_does_not_access_out_of_bounds() {
let mut protocol_version = Vec::new();
protocol_version
.write_u16::<BigEndian>(ProtocolVersion::get_crc16())
.unwrap();
let standard_header = [protocol_version, vec![1, 1, 2]].concat();
let acked_header = vec![0, 0, 255, 4, 0, 0, 255, 255, 0, 0, 0, 0];
use crate::error::{ErrorKind, FragmentErrorKind};
let mut connection = create_virtual_connection();
let result = connection.process_incoming(
[standard_header.as_slice(), acked_header.as_slice()]
.concat()
.as_slice(),
Instant::now(),
);
match result {
Err(ErrorKind::FragmentError(FragmentErrorKind::ExceededMaxFragments)) => {
// Ok
}
_ => {
panic!["Supposed to get a fragment error"];
}
}
}
/// ======= helper functions =========
fn create_virtual_connection() -> VirtualConnection {
VirtualConnection::new(get_fake_addr(), &Config::default(), Instant::now())
}
fn get_fake_addr() -> std::net::SocketAddr {
"127.0.0.1:0".parse().unwrap()
}
// assert that the processing of the given `DeliveryGuarantee` and `OrderingGuarantee` results into the given `result_event`
fn assert_incoming_with_order(
delivery: DeliveryGuarantee,
ordering: OrderingGuarantee,
connection: &mut VirtualConnection,
result_packet: Option<Packet>,
order_id: u16,
) {
let mut packet = Vec::new();
// configure the right header based on specified guarantees.
let header = StandardHeader::new(delivery, ordering, PacketType::Packet);
header.parse(&mut packet).unwrap();
if let OrderingGuarantee::Sequenced(val) = ordering {
if delivery == DeliveryGuarantee::Reliable {
let ack_header = AckedPacketHeader::new(1, 2, 3);
ack_header.parse(&mut packet).unwrap();
}
let order_header = ArrangingHeader::new(order_id, val.unwrap());
order_header.parse(&mut packet).unwrap();
}
if let OrderingGuarantee::Ordered(val) = ordering {
if delivery == DeliveryGuarantee::Reliable {
let ack_header = AckedPacketHeader::new(1, 2, 3);
let order_header = ArrangingHeader::new(order_id, val.unwrap());
ack_header.parse(&mut packet).unwrap();
order_header.parse(&mut packet).unwrap();
}
}
if let OrderingGuarantee::None = ordering {
if delivery == DeliveryGuarantee::Reliable {
let ack_header = AckedPacketHeader::new(1, 2, 3);
ack_header.parse(&mut packet).unwrap();
}
}
packet.write_all(&PAYLOAD).unwrap();
let packets = connection
.process_incoming(packet.as_slice(), Instant::now())
.unwrap()
.into_iter()
.next()
.map(|(packet, _)| packet);
assert_eq!(packets, result_packet);
}
// assert that the given `DeliveryGuarantee` results into the given `Packet` after processing.
fn assert_incoming_without_order(
delivery: DeliveryGuarantee,
connection: &mut VirtualConnection,
result_packet: Packet,
) {
let mut packet = Vec::new();
// configure the right header based on specified guarantees.
let header = StandardHeader::new(delivery, OrderingGuarantee::None, PacketType::Packet);
header.parse(&mut packet).unwrap();
if delivery == DeliveryGuarantee::Reliable {
let ack_header = AckedPacketHeader::new(1, 2, 3);
ack_header.parse(&mut packet).unwrap();
}
packet.write_all(&PAYLOAD).unwrap();
let (packet, _) = connection
.process_incoming(packet.as_slice(), Instant::now())
.unwrap()
.into_iter()
.next()
.unwrap();
assert_eq!(packet, result_packet);
}
// assert that the size of the processed header is the same as the given one.
fn assert_right_header_size(
delivery: DeliveryGuarantee,
ordering: OrderingGuarantee,
expected_header_size: usize,
) {
let mut connection = create_virtual_connection();
let buffer = vec![1; 500];
let outgoing = connection
.process_outgoing(
PacketInfo::user_packet(&buffer, delivery, ordering),
None,
Instant::now(),
)
.unwrap();
let mut iter = outgoing.into_iter();
assert_eq!(
iter.next().unwrap().contents().len() - buffer.len(),
expected_header_size
);
if iter.next().is_some() {
panic!("Expected not fragmented packet")
}
}
#[test]
fn | () {
let mut connection = create_virtual_connection();
let buffer = vec![1; 5000];
let res = connection.process_outgoing(
PacketInfo::user_packet(
&buffer,
DeliveryGuarantee::Unreliable,
OrderingGuarantee::None,
),
None,
Instant::now(),
);
assert_eq!(res.is_err(), true);
}
#[test]
fn send_returns_right_size() {
let mut connection = create_virtual_connection();
let buffer = vec![1; 1024];
let mut packets = connection
.process_outgoing(
PacketInfo::user_packet(
&buffer,
DeliveryGuarantee::Unreliable,
OrderingGuarantee::None,
),
None,
Instant::now(),
)
.unwrap()
.into_iter();
let packet = packets.next().unwrap();
assert_eq!(
packet.contents().len(),
1024 + constants::STANDARD_HEADER_SIZE as usize
);
assert_eq!(packets.next().is_none(), true);
}
#[test]
fn fragmentation_send_returns_right_size() {
let fragment_packet_size =
constants::STANDARD_HEADER_SIZE + constants::FRAGMENT_HEADER_SIZE;
let mut connection = create_virtual_connection();
let buffer = vec![1; 4000];
let packets = connection
.process_outgoing(
PacketInfo::user_packet(
&buffer,
DeliveryGuarantee::Reliable,
OrderingGuarantee::None,
),
None,
Instant::now(),
)
.unwrap()
.into_iter();
// the first fragment of an sequence of fragments contains also the acknowledgment header.
assert_eq!(
packets.fold(0, |acc, p| acc + p.contents().len()),
4000 + (fragment_packet_size * 4 + constants::ACKED_PACKET_HEADER) as usize
);
}
#[test]
fn ordered_16_bit_overflow() {
let mut send_conn = create_virtual_connection();
let mut recv_conn = create_virtual_connection();
let time = Instant::now();
let mut last_recv_value = 0u32;
for idx in 1..100_000u32 {
let data_to_send = idx.to_ne_bytes();
let packet_sent = send_conn
.process_outgoing(
PacketInfo::user_packet(
&data_to_send,
DeliveryGuarantee::Reliable,
OrderingGuarantee::None,
),
None,
time,
)
.unwrap()
.into_iter()
.next()
.unwrap();
let packets = recv_conn
.process_incoming(&packet_sent.contents(), time)
.unwrap();
for (packet, _) in packets.into_iter() {
let mut recv_buff = [0; 4];
recv_buff.copy_from_slice(packet.payload());
let value = u32::from_ne_bytes(recv_buff);
assert_eq!(value, last_recv_value + 1);
last_recv_value = value;
}
}
assert_eq![last_recv_value, 99_999];
}
}
| sending_large_unreliable_packet_should_fail |
docker_test.go | package executables_test
import (
"bytes"
"context"
"fmt"
"reflect"
"testing"
"github.com/golang/mock/gomock"
. "github.com/onsi/gomega"
"github.com/aws/eks-anywhere/pkg/executables"
mockexecutables "github.com/aws/eks-anywhere/pkg/executables/mocks"
)
func TestGetDockerLBPort(t *testing.T) {
clusterName := "clusterName"
wantPort := "test:port"
clusterLBName := fmt.Sprintf("%s-lb", clusterName)
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, []string{"port", clusterLBName, "6443/tcp"}).Return(*bytes.NewBufferString(wantPort), nil)
d := executables.NewDocker(executable)
_, err := d.GetDockerLBPort(ctx, clusterName)
if err != nil {
t.Fatalf("Docker.GetDockerLBPort() error = %v, want nil", err)
}
}
func TestDockerPullImage(t *testing.T) {
image := "test_image"
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, "pull", image).Return(bytes.Buffer{}, nil)
d := executables.NewDocker(executable)
err := d.PullImage(ctx, image)
if err != nil {
t.Fatalf("Docker.PullImage() error = %v, want nil", err)
}
}
func TestDockerVersion(t *testing.T) {
version := "1.234"
wantVersion := 1
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, "version", "--format", "{{.Client.Version}}").Return(*bytes.NewBufferString(version), nil)
d := executables.NewDocker(executable)
v, err := d.Version(ctx)
if err != nil {
t.Fatalf("Docker.Version() error = %v, want nil", err)
}
if !reflect.DeepEqual(v, wantVersion) {
t.Fatalf("Docker.Version() version = %v, want %v", v, wantVersion)
}
}
func TestDockerAllocatedMemory(t *testing.T) {
memory := "12345"
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, "info", "--format", "'{{json .MemTotal}}'").Return(*bytes.NewBufferString(memory), nil)
d := executables.NewDocker(executable)
mem, err := d.AllocatedMemory(ctx)
if err != nil {
t.Fatalf("Docker.AllocatedMemory() error = %v, want %v", err, mem)
}
}
func TestDockerCgroupVersion(t *testing.T) {
version := "'\"1\"'\n"
wantVersion := 1
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, "info", "--format", "'{{json .CgroupVersion}}'").Return(*bytes.NewBufferString(version), nil)
d := executables.NewDocker(executable)
cgroupVersion, err := d.CgroupVersion(ctx)
if err != nil {
t.Fatalf("Docker.AllocatedMemory() error = %v, want %v", err, cgroupVersion)
}
if !reflect.DeepEqual(cgroupVersion, wantVersion) {
t.Fatalf("Docker.Version() version = %v, want %v", cgroupVersion, wantVersion)
}
}
func TestDockerLoadFromFile(t *testing.T) {
file := "file"
g := NewWithT(t)
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, "load", "-i", file).Return(bytes.Buffer{}, nil)
d := executables.NewDocker(executable)
g.Expect(d.LoadFromFile(ctx, file)).To(Succeed())
}
func TestDockerSaveToFileMultipleImages(t *testing.T) {
file := "file"
image1 := "image1:tag1"
image2 := "image2:tag2"
image3 := "image3:tag3"
g := NewWithT(t)
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, "save", "-o", file, image1, image2, image3).Return(bytes.Buffer{}, nil)
d := executables.NewDocker(executable)
g.Expect(d.SaveToFile(ctx, file, image1, image2, image3)).To(Succeed())
} | g := NewWithT(t)
ctx := context.Background()
mockCtrl := gomock.NewController(t)
executable := mockexecutables.NewMockExecutable(mockCtrl)
executable.EXPECT().Execute(ctx, "save", "-o", file).Return(bytes.Buffer{}, nil)
d := executables.NewDocker(executable)
g.Expect(d.SaveToFile(ctx, file)).To(Succeed())
} |
func TestDockerSaveToFileNoImages(t *testing.T) {
file := "file"
|
mod.rs | // Copyright 2016-2018 Mateusz Sieczko and other GilRs Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
#![cfg_attr(target_os = "windows", allow(dead_code))]
mod parser;
use crate::ev::{self, Axis, AxisOrBtn, Button};
use gilrs_core::native_ev_codes as nec;
use gilrs_core::EvCode;
use std::collections::HashMap;
use std::env;
use std::error::Error;
use std::fmt::{Display, Formatter, Result as FmtResult};
use fnv::FnvHashMap;
use uuid::Uuid;
use vec_map::VecMap;
use self::parser::{Error as ParserError, ErrorKind as ParserErrorKind, Parser, Token};
/// Platform name used by SDL mappings
#[cfg(target_os = "linux")]
const SDL_PLATFORM_NAME: &str = "Linux";
#[cfg(target_os = "macos")]
const SDL_PLATFORM_NAME: &'static str = "Mac OS X";
#[cfg(target_os = "windows")]
const SDL_PLATFORM_NAME: &'static str = "Windows";
#[cfg(all(
not(target_os = "linux"),
not(target_os = "macos"),
not(target_os = "windows")
))]
const SDL_PLATFORM_NAME: &'static str = "Unknown";
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
/// Store mappings from one `EvCode` (`u16`) to another.
///
/// This struct is internal, `MappingData` is exported in public interface as `Mapping`.
pub struct Mapping {
mappings: FnvHashMap<EvCode, AxisOrBtn>,
name: String,
default: bool,
hats_mapped: u8,
}
impl Mapping {
pub fn new() -> Self {
Mapping {
mappings: FnvHashMap::default(),
name: String::new(),
default: false,
hats_mapped: 0,
}
}
pub fn default(gamepad: &gilrs_core::Gamepad) -> Self {
use self::Axis as Ax;
use self::AxisOrBtn::*;
macro_rules! fnv_map {
( $( $key:expr => $elem:expr ),* ) => {
{
let mut map = FnvHashMap::default();
$(
map.insert($key, $elem);
)*
map
}
};
}
let mut mappings = fnv_map![
nec::BTN_SOUTH => Btn(Button::South),
nec::BTN_EAST => Btn(Button::East),
nec::BTN_C => Btn(Button::C),
nec::BTN_NORTH => Btn(Button::North),
nec::BTN_WEST => Btn(Button::West),
nec::BTN_Z => Btn(Button::Z),
nec::BTN_LT => Btn(Button::LeftTrigger),
nec::BTN_RT => Btn(Button::RightTrigger),
nec::BTN_LT2 => Btn(Button::LeftTrigger2),
nec::BTN_RT2 => Btn(Button::RightTrigger2),
nec::BTN_SELECT => Btn(Button::Select),
nec::BTN_START => Btn(Button::Start),
nec::BTN_MODE => Btn(Button::Mode),
nec::BTN_LTHUMB => Btn(Button::LeftThumb),
nec::BTN_RTHUMB => Btn(Button::RightThumb),
nec::BTN_DPAD_UP => Btn(Button::DPadUp),
nec::BTN_DPAD_DOWN => Btn(Button::DPadDown),
nec::BTN_DPAD_LEFT => Btn(Button::DPadLeft),
nec::BTN_DPAD_RIGHT => Btn(Button::DPadRight),
nec::AXIS_LT => Btn(Button::LeftTrigger),
nec::AXIS_RT => Btn(Button::RightTrigger),
nec::AXIS_LT2 => Btn(Button::LeftTrigger2),
nec::AXIS_RT2 => Btn(Button::RightTrigger2),
nec::AXIS_LSTICKX => Axis(Ax::LeftStickX),
nec::AXIS_LSTICKY => Axis(Ax::LeftStickY),
nec::AXIS_LEFTZ => Axis(Ax::LeftZ),
nec::AXIS_RSTICKX => Axis(Ax::RightStickX),
nec::AXIS_RSTICKY => Axis(Ax::RightStickY),
nec::AXIS_RIGHTZ => Axis(Ax::RightZ),
nec::AXIS_DPADX => Axis(Ax::DPadX),
nec::AXIS_DPADY => Axis(Ax::DPadY)
];
// Remove all mappings that don't have corresponding element in gamepad. Partial fix to #83
let axes = [
nec::AXIS_DPADX,
nec::AXIS_DPADY,
nec::AXIS_LEFTZ,
nec::AXIS_LSTICKX,
nec::AXIS_LSTICKY,
nec::AXIS_RSTICKX,
nec::AXIS_RSTICKY,
nec::AXIS_LT,
nec::AXIS_LT2,
nec::AXIS_RT,
nec::AXIS_RT2,
nec::AXIS_RIGHTZ,
];
let btns = [
nec::BTN_SOUTH,
nec::BTN_NORTH,
nec::BTN_WEST,
nec::BTN_WEST,
nec::BTN_C,
nec::BTN_Z,
nec::BTN_LT,
nec::BTN_LT2,
nec::BTN_RT,
nec::BTN_RT2,
nec::BTN_SELECT,
nec::BTN_START,
nec::BTN_MODE,
nec::BTN_LTHUMB,
nec::BTN_RTHUMB,
nec::BTN_DPAD_DOWN,
nec::BTN_DPAD_LEFT,
nec::BTN_DPAD_RIGHT,
nec::BTN_DPAD_UP,
];
for axis in &axes {
if !gamepad.axes().contains(axis) {
mappings.remove(axis);
}
}
for btn in &btns {
if !gamepad.buttons().contains(btn) {
mappings.remove(btn);
}
}
Mapping {
mappings,
name: String::new(),
default: true,
hats_mapped: 0,
}
}
pub fn name(&self) -> &str {
&self.name
}
pub fn from_data(
data: &MappingData,
buttons: &[EvCode],
axes: &[EvCode],
name: &str,
uuid: Uuid,
) -> Result<(Self, String), MappingError> {
use crate::constants::*;
if !Self::is_name_valid(name) {
return Err(MappingError::InvalidName);
}
let mut mappings = FnvHashMap::default();
let mut sdl_mappings = format!("{},{},", uuid.to_simple(), name);
{
let mut add_button = |ident, ev_code, mapped_btn| {
Self::add_button(
ident,
ev_code,
mapped_btn,
buttons,
&mut sdl_mappings,
&mut mappings,
)
};
for (button, &ev_code) in &data.buttons {
match button as u16 {
BTN_SOUTH => add_button("a", ev_code, Button::South)?,
BTN_EAST => add_button("b", ev_code, Button::East)?,
BTN_WEST => add_button("x", ev_code, Button::West)?,
BTN_NORTH => add_button("y", ev_code, Button::North)?,
BTN_LT => add_button("leftshoulder", ev_code, Button::LeftTrigger)?,
BTN_RT => add_button("rightshoulder", ev_code, Button::RightTrigger)?,
BTN_LT2 => add_button("lefttrigger", ev_code, Button::LeftTrigger2)?,
BTN_RT2 => add_button("righttrigger", ev_code, Button::RightTrigger2)?,
BTN_SELECT => add_button("back", ev_code, Button::Select)?,
BTN_START => add_button("start", ev_code, Button::Start)?,
BTN_MODE => add_button("guide", ev_code, Button::Mode)?,
BTN_LTHUMB => add_button("leftstick", ev_code, Button::LeftThumb)?,
BTN_RTHUMB => add_button("rightstick", ev_code, Button::RightThumb)?,
BTN_DPAD_UP => add_button("dpup", ev_code, Button::DPadUp)?,
BTN_DPAD_DOWN => add_button("dpdown", ev_code, Button::DPadDown)?,
BTN_DPAD_LEFT => add_button("dpleft", ev_code, Button::DPadLeft)?,
BTN_DPAD_RIGHT => add_button("dpright", ev_code, Button::DPadRight)?,
BTN_C => add_button("c", ev_code, Button::C)?,
BTN_Z => add_button("z", ev_code, Button::Z)?,
BTN_UNKNOWN => return Err(MappingError::UnknownElement),
_ => unreachable!(),
}
}
}
{
let mut add_axis = |ident, ev_code, mapped_axis| {
Self::add_axis(
ident,
ev_code,
mapped_axis,
axes,
&mut sdl_mappings,
&mut mappings,
)
};
for (axis, &ev_code) in &data.axes {
match axis as u16 {
AXIS_LSTICKX => add_axis("leftx", ev_code, Axis::LeftStickX)?,
AXIS_LSTICKY => add_axis("lefty", ev_code, Axis::LeftStickY)?,
AXIS_RSTICKX => add_axis("rightx", ev_code, Axis::RightStickX)?,
AXIS_RSTICKY => add_axis("righty", ev_code, Axis::RightStickY)?,
AXIS_LEFTZ => add_axis("leftz", ev_code, Axis::LeftZ)?,
AXIS_RIGHTZ => add_axis("rightz", ev_code, Axis::RightZ)?,
AXIS_UNKNOWN => return Err(MappingError::UnknownElement),
_ => unreachable!(),
}
}
}
let mapping = Mapping {
mappings,
name: name.to_owned(),
default: false,
hats_mapped: 0,
};
Ok((mapping, sdl_mappings))
}
pub fn parse_sdl_mapping(
line: &str,
buttons: &[EvCode],
axes: &[EvCode],
) -> Result<Self, ParseSdlMappingError> {
let mut mapping = Mapping::new();
let mut parser = Parser::new(line);
while let Some(token) = parser.next_token() {
if let Err(ref e) = token {
if e.kind() == &ParserErrorKind::EmptyValue {
continue;
}
}
let token = token?;
match token {
Token::Platform(platform) => {
if platform != SDL_PLATFORM_NAME {
warn!("Mappings for different platform – {}", platform);
}
}
Token::Uuid(_) => (),
Token::Name(name) => mapping.name = name.to_owned(),
Token::AxisMapping { from, to, .. } => {
let axis = axes
.get(from as usize)
.cloned()
.ok_or(ParseSdlMappingError::InvalidAxis)?;
mapping.mappings.insert(axis, to);
}
Token::ButtonMapping { from, to } => {
let btn = buttons
.get(from as usize)
.cloned()
.ok_or(ParseSdlMappingError::InvalidButton)?;
mapping.mappings.insert(btn, AxisOrBtn::Btn(to));
}
Token::HatMapping { hat, direction, to } => {
if hat != 0 || !to.is_dpad() {
warn!(
"Hat mappings are only supported for dpads (requested to map hat \
{}.{} to {:?}",
hat, direction, to
);
} else {
// We don't have anything like "hat" in gilrs, so let's jus assume that
// user want to map dpad axes.
//
// We have to add mappings for axes AND buttons, because axis_dpad_to_button
// filter may transform event to button event.
let (from_axis, from_btn) = match direction {
1 => (nec::AXIS_DPADY, nec::BTN_DPAD_UP),
4 => (nec::AXIS_DPADY, nec::BTN_DPAD_DOWN),
2 => (nec::AXIS_DPADX, nec::BTN_DPAD_RIGHT),
8 => (nec::AXIS_DPADX, nec::BTN_DPAD_LEFT),
0 => continue, // FIXME: I have no idea what 0 means here
_ => return Err(ParseSdlMappingError::UnknownHatDirection),
};
let to_axis = match to {
Button::DPadLeft | Button::DPadRight => Axis::DPadX,
Button::DPadUp | Button::DPadDown => Axis::DPadY,
_ => unreachable!(),
};
mapping.mappings.insert(from_axis, AxisOrBtn::Axis(to_axis));
mapping.mappings.insert(from_btn, AxisOrBtn::Btn(to));
mapping.hats_mapped |= direction as u8;
}
}
}
}
Ok(mapping)
}
fn add_button(
ident: &str,
ev_code: EvCode,
mapped_btn: Button,
buttons: &[EvCode],
sdl_mappings: &mut String,
mappings: &mut FnvHashMap<EvCode, AxisOrBtn>,
) -> Result<(), MappingError> {
let n_btn = buttons
.iter()
.position(|&x| x == ev_code)
.ok_or(MappingError::InvalidCode(ev::Code(ev_code)))?;
sdl_mappings.push_str(&format!("{}:b{},", ident, n_btn));
mappings.insert(ev_code, AxisOrBtn::Btn(mapped_btn));
Ok(())
}
fn add_axis(
ident: &str,
ev_code: EvCode,
mapped_axis: Axis,
axes: &[EvCode],
sdl_mappings: &mut String,
mappings: &mut FnvHashMap<EvCode, AxisOrBtn>,
) -> Result<(), MappingError> {
let n_axis = axes
.iter()
.position(|&x| x == ev_code)
.ok_or(MappingError::InvalidCode(ev::Code(ev_code)))?;
sdl_mappings.push_str(&format!("{}:a{},", ident, n_axis));
mappings.insert(ev_code, AxisOrBtn::Axis(mapped_axis));
Ok(())
}
fn is_name_valid(name: &str) -> bool {
!name.chars().any(|x| x == ',')
}
pub fn map(&self, code: &EvCode) -> Option<AxisOrBtn> {
self.mappings.get(code).cloned()
}
pub fn map_rev(&self, el: &AxisOrBtn) -> Option<EvCode> {
self.mappings.iter().find(|x| x.1 == el).map(|x| *x.0)
}
pub fn is_default(&self) -> bool {
| /// Return bit field with mapped hats. Only for mappings created from SDL format this function
/// can return non-zero value.
pub fn hats_mapped(&self) -> u8 {
self.hats_mapped
}
}
#[derive(Clone, PartialEq, Debug)]
pub enum ParseSdlMappingError {
InvalidButton,
InvalidAxis,
UnknownHatDirection,
ParseError(ParserError),
}
impl From<ParserError> for ParseSdlMappingError {
fn from(f: ParserError) -> Self {
ParseSdlMappingError::ParseError(f)
}
}
impl Error for ParseSdlMappingError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
if let ParseSdlMappingError::ParseError(ref err) = self {
Some(err)
} else {
None
}
}
}
impl Display for ParseSdlMappingError {
fn fmt(&self, fmt: &mut Formatter<'_>) -> FmtResult {
let s = match self {
ParseSdlMappingError::InvalidButton => "gamepad doesn't have requested button",
ParseSdlMappingError::InvalidAxis => "gamepad doesn't have requested axis",
ParseSdlMappingError::UnknownHatDirection => "hat direction wasn't 1, 2, 4 or 8",
ParseSdlMappingError::ParseError(_) => "parsing error",
};
fmt.write_str(s)
}
}
#[derive(Debug)]
pub struct MappingDb {
mappings: HashMap<Uuid, String>,
}
impl MappingDb {
pub fn new() -> Self {
MappingDb {
mappings: HashMap::new(),
}
}
pub fn add_included_mappings(&mut self) {
self.insert(include_str!(
"../../SDL_GameControllerDB/gamecontrollerdb.txt"
));
}
pub fn add_env_mappings(&mut self) {
if let Ok(mapping) = env::var("SDL_GAMECONTROLLERCONFIG") {
self.insert(&mapping);
}
}
pub fn insert(&mut self, s: &str) {
for mapping in s.lines() {
let pat = "platform:";
if let Some(offset) = mapping.find(pat).map(|o| o + pat.len()) {
let s = &mapping[offset..];
let end = s.find(',').unwrap_or_else(|| s.len());
if &s[..end] != SDL_PLATFORM_NAME {
continue;
}
}
mapping
.split(',')
.next()
.and_then(|s| Uuid::parse_str(s).ok())
.and_then(|uuid| self.mappings.insert(uuid, mapping.to_owned()));
}
}
pub fn get(&self, uuid: Uuid) -> Option<&str> {
self.mappings.get(&uuid).map(String::as_ref)
}
pub fn len(&self) -> usize {
self.mappings.len()
}
}
/// Stores data used to map gamepad buttons and axes.
///
/// After you add all mappings, use
/// [`Gamepad::set_mapping(…)`](struct.Gamepad.html#method.set_mapping) to change mapping of
/// existing gamepad.
///
/// See `examples/mapping.rs` for more detailed example.
#[derive(Debug, Clone, Default)]
// Re-exported as Mapping
pub struct MappingData {
buttons: VecMap<EvCode>,
axes: VecMap<EvCode>,
}
impl MappingData {
/// Creates new `Mapping`.
pub fn new() -> Self {
MappingData {
buttons: VecMap::with_capacity(18),
axes: VecMap::with_capacity(11),
}
}
/// Returns `EvCode` associated with button index.
pub fn button(&self, idx: Button) -> Option<ev::Code> {
self.buttons.get(idx as usize).cloned().map(ev::Code)
}
/// Returns `EvCode` associated with axis index.
pub fn axis(&self, idx: Axis) -> Option<ev::Code> {
self.axes.get(idx as usize).cloned().map(ev::Code)
}
/// Inserts new button mapping.
pub fn insert_btn(&mut self, from: ev::Code, to: Button) -> Option<ev::Code> {
self.buttons.insert(to as usize, from.0).map(ev::Code)
}
/// Inserts new axis mapping.
pub fn insert_axis(&mut self, from: ev::Code, to: Axis) -> Option<ev::Code> {
self.axes.insert(to as usize, from.0).map(ev::Code)
}
/// Removes button and returns associated `NativEvCode`.
pub fn remove_button(&mut self, idx: Button) -> Option<ev::Code> {
self.buttons.remove(idx as usize).map(ev::Code)
}
/// Removes axis and returns associated `NativEvCode`.
pub fn remove_axis(&mut self, idx: Axis) -> Option<ev::Code> {
self.axes.remove(idx as usize).map(ev::Code)
}
}
/// The error type for functions related to gamepad mapping.
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum MappingError {
/// Gamepad does not have element referenced by `EvCode`.
InvalidCode(ev::Code),
/// Name contains comma (',').
InvalidName,
/// This function is not implemented for current platform.
NotImplemented,
/// Gamepad is not connected.
NotConnected,
/// Same gamepad element is referenced by axis and button.
DuplicatedEntry,
/// `Mapping` with `Button::Unknown` or `Axis::Unknown`.
UnknownElement,
/// `Mapping` have button or axis that are not present in SDL2.
NotSdl2Compatible,
}
impl Error for MappingError {}
impl Display for MappingError {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
let sbuf;
let s = match self {
MappingError::InvalidCode(code) => {
sbuf = format!("gamepad does not have element with {}", code);
sbuf.as_ref()
}
MappingError::InvalidName => "name can not contain comma",
MappingError::NotImplemented => {
"current platform does not implement setting custom mappings"
}
MappingError::NotConnected => "gamepad is not connected",
MappingError::DuplicatedEntry => {
"same gamepad element is referenced by axis and button"
}
MappingError::UnknownElement => "Button::Unknown and Axis::Unknown are not allowed",
MappingError::NotSdl2Compatible => "one of buttons or axes is not compatible with SDL2",
};
f.write_str(s)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ev::{Axis, Button};
use gilrs_core::native_ev_codes as nec;
use gilrs_core::EvCode;
use uuid::Uuid;
// Do not include platform, mapping from (with UUID modified)
// https://github.com/gabomdq/SDL_GameControllerDB/blob/master/gamecontrollerdb.txt
const TEST_STR: &str = "03000000260900008888000000010001,GameCube {WiseGroup USB \
box},a:b0,b:b2,y:b3,x:b1,start:b7,rightshoulder:b6,dpup:h0.1,dpleft:\
h0.8,dpdown:h0.4,dpright:h0.2,leftx:a0,lefty:a1,rightx:a2,righty:a3,\
lefttrigger:a4,righttrigger:a5,";
const BUTTONS: [EvCode; 15] = [
nec::BTN_SOUTH,
nec::BTN_EAST,
nec::BTN_C,
nec::BTN_NORTH,
nec::BTN_WEST,
nec::BTN_Z,
nec::BTN_LT,
nec::BTN_RT,
nec::BTN_LT2,
nec::BTN_RT2,
nec::BTN_SELECT,
nec::BTN_START,
nec::BTN_MODE,
nec::BTN_LTHUMB,
nec::BTN_RTHUMB,
];
const AXES: [EvCode; 12] = [
nec::AXIS_LSTICKX,
nec::AXIS_LSTICKY,
nec::AXIS_LEFTZ,
nec::AXIS_RSTICKX,
nec::AXIS_RSTICKY,
nec::AXIS_RIGHTZ,
nec::AXIS_DPADX,
nec::AXIS_DPADY,
nec::AXIS_RT,
nec::AXIS_LT,
nec::AXIS_RT2,
nec::AXIS_LT2,
];
#[test]
fn mapping() {
Mapping::parse_sdl_mapping(TEST_STR, &BUTTONS, &AXES).unwrap();
}
#[test]
fn from_data() {
let uuid = Uuid::nil();
let name = "Best Gamepad";
let buttons = BUTTONS.iter().cloned().map(ev::Code).collect::<Vec<_>>();
let axes = AXES.iter().cloned().map(ev::Code).collect::<Vec<_>>();
let mut data = MappingData::new();
data.insert_axis(axes[0], Axis::LeftStickX);
data.insert_axis(axes[1], Axis::LeftStickY);
data.insert_axis(axes[2], Axis::LeftZ);
data.insert_axis(axes[3], Axis::RightStickX);
data.insert_axis(axes[4], Axis::RightStickY);
data.insert_axis(axes[5], Axis::RightZ);
data.insert_btn(buttons[0], Button::South);
data.insert_btn(buttons[1], Button::East);
data.insert_btn(buttons[3], Button::North);
data.insert_btn(buttons[4], Button::West);
data.insert_btn(buttons[5], Button::Select);
data.insert_btn(buttons[6], Button::Start);
data.insert_btn(buttons[7], Button::DPadDown);
data.insert_btn(buttons[8], Button::DPadLeft);
data.insert_btn(buttons[9], Button::RightThumb);
let (mappings, sdl_mappings) =
Mapping::from_data(&data, &BUTTONS, &AXES, name, uuid).unwrap();
let sdl_mappings = Mapping::parse_sdl_mapping(&sdl_mappings, &BUTTONS, &AXES).unwrap();
assert_eq!(mappings, sdl_mappings);
let incorrect_mappings = Mapping::from_data(&data, &BUTTONS, &AXES, "Inval,id name", uuid);
assert_eq!(Err(MappingError::InvalidName), incorrect_mappings);
data.insert_btn(ev::Code(nec::BTN_DPAD_RIGHT), Button::DPadRight);
let incorrect_mappings = Mapping::from_data(&data, &BUTTONS, &AXES, name, uuid);
assert_eq!(
Err(MappingError::InvalidCode(ev::Code(nec::BTN_DPAD_RIGHT))),
incorrect_mappings
);
data.insert_btn(ev::Code(BUTTONS[3]), Button::Unknown);
let incorrect_mappings = Mapping::from_data(&data, &BUTTONS, &AXES, name, uuid);
assert_eq!(Err(MappingError::UnknownElement), incorrect_mappings);
}
#[test]
fn with_mappings() {
let mappings = format!(
"\nShould be ignored\nThis also should,be ignored\n\n{}",
TEST_STR
);
let mut db = MappingDb::new();
db.add_included_mappings();
db.insert(&mappings);
assert_eq!(
Some(TEST_STR),
db.get(Uuid::parse_str("03000000260900008888000000010001").unwrap())
);
}
}
| self.default
}
|
util.go | package eval
import (
"reflect"
"regexp"
"strconv"
"strings"
"go/ast"
"go/token"
)
// Equivalent of reflect.New, but unwraps internal Types into their original reflect.Type
func hackedNew(t reflect.Type) reflect.Value {
return reflect.New(unhackType(t))
}
// Get the underlying reflect.Type a hacked type
func unhackType(t reflect.Type) reflect.Type {
switch tt := t.(type) {
case Rune:
return tt.Type
case Byte:
return tt.Type
default:
return t
}
}
// Determine if type from is assignable to type to. From and To must not be ConstTypes
func typeAssignableTo(from, to reflect.Type) bool {
return from.AssignableTo(unhackType(to))
}
// exprAssignableTo(CheckExpr(expr), t), but errors are accumulated and a
// bool value is returned indicating if the expr is assignable to t.
// The bool value will be false if and only if the conversion check
// was reached and failed.
func checkExprAssignableTo(expr ast.Expr, t reflect.Type, env Env) (Expr, bool, []error) {
var errs []error
aexpr, moreErrs := CheckExpr(expr, env)
if moreErrs != nil | else if _, err := expectSingleType(aexpr); err != nil {
errs = append(errs, err)
}
if errs != nil {
return aexpr, true, errs
}
ok, convErrs := exprAssignableTo(aexpr, t)
if convErrs != nil {
errs = append(errs, convErrs...)
}
return aexpr, ok, errs
}
// Determine if the result of from expr is assignable to type to. to must be a vanilla reflect.Type.
// from must have a KnownType() of length 1. Const types that raise overflow and truncation
// errors will still return true, but the errors will be reflected in the []error slice.
func exprAssignableTo(from Expr, to reflect.Type) (bool, []error) {
if len(from.KnownType()) != 1 {
panic("go-eval: assignableTo called with from.KnownType() != 1")
}
fromType := from.KnownType()[0]
// Check that consts can be converted
if c, ok := fromType.(ConstType); ok && from.IsConst() {
// If cv is a valid value, then the types are assignable even if
// other conversion errors, such as overflows, are present.
cv, errs := promoteConstToTyped(c, constValue(from.Const()), to, from)
return reflect.Value(cv).IsValid(), errs
}
return typeAssignableTo(fromType, to), nil
}
func expectSingleType(expr Expr) (reflect.Type, error) {
types := expr.KnownType()
if len(types) == 0 {
return nil, ErrMissingValue{expr}
} else if multivalueOk(expr) {
return types[0], nil
} else if len(types) != 1 {
return nil, ErrMultiInSingleContext{expr}
} else {
return types[0], nil
}
}
// Is op a boolean operator that produces a const bool type.
// Notably absent are LAND(&&) and LOR(||), which result
// in a value of the same type as their operands.
func isBooleanOp(op token.Token) bool {
switch op {
case token.EQL, token.NEQ, token.LEQ, token.GEQ, token.LSS, token.GTR:
return true
default:
return false
}
}
func isOpDefinedOn(op token.Token, t reflect.Type) bool {
if _, ok := t.(ConstNilType); ok {
return false
}
switch t.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
switch op {
case token.ADD, token.SUB, token.MUL, token.QUO,
token.REM, token.AND, token.OR, token.XOR, token.AND_NOT,
token.EQL, token.NEQ,
token.LEQ, token.GEQ, token.LSS, token.GTR:
return true
}
case reflect.Float32, reflect.Float64:
switch op {
case token.ADD, token.SUB, token.MUL, token.QUO,
token.EQL, token.NEQ,
token.LEQ, token.GEQ, token.LSS, token.GTR:
return true
}
case reflect.Complex64, reflect.Complex128:
switch op {
case token.ADD, token.SUB, token.MUL, token.QUO,
token.EQL, token.NEQ:
return true
}
case reflect.Bool:
switch op {
case token.LAND, token.LOR, token.EQL, token.NEQ:
return true
}
case reflect.String:
switch op {
case token.ADD, token.EQL, token.NEQ, token.LEQ, token.GEQ, token.LSS, token.GTR:
return true
}
// This is slighly misleading. slices, funcs and maps are only
// comparable if their paired operand is nil
case reflect.Ptr, reflect.Array, reflect.Interface, reflect.Struct,
reflect.Slice, reflect.Map, reflect.Chan:
return op == token.EQL || op == token.NEQ
}
return false
}
func isUnaryOpDefinedOn(op token.Token, t reflect.Type) bool {
if _, ok := t.(ConstNilType); ok {
return false
}
switch t.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
switch op {
case token.ADD, token.SUB, token.XOR:
return true
}
case reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128:
switch op {
case token.ADD, token.SUB:
return true
}
case reflect.Bool:
switch op {
case token.NOT:
return true
}
}
return false
}
// FIXME: should also match and handle just a line and no column
var parseError = regexp.MustCompile(`^([0-9]+):([0-9]+): `)
// FormatErrorPos formats source to show the position that a (parse)
// error occurs. When this works, it returns a slice of one or two
// strings: the source line with the error and if it can find a column
// position under that, a line indicating the position where the error
// occurred.
//
// For example, if we have:
// source := `split(os.Args ", )")`
// errmsg := "1:15: expected ')'"
// then PrintErrPos(source, errmsg) returns:
// {
// `split(os.Args ", )")`,
// `-------------^`
// }
//
// If something is wrong parsing the error message or matching it with
// the source, an empty slice is returned.
func FormatErrorPos(source, errmsg string) (cursored [] string) {
matches := parseError.FindStringSubmatch(errmsg)
if len(matches) == 3 {
var err error
var line, column int
if line, err = strconv.Atoi(matches[1]); err != nil {
return cursored
}
if column, err = strconv.Atoi(matches[2]); err != nil {
return cursored
}
sourceLines := strings.Split(source, "\n")
if line > len(sourceLines) {
return cursored
}
errLine := sourceLines[line-1]
cursored = append(cursored, errLine)
if column-1 > len(errLine) || column < 1 {
return cursored
} else if column == 1 {
cursored = append(cursored, "^")
} else {
cursored = append(cursored, strings.Repeat("-", column-1) + "^")
}
}
return cursored
}
// Walk the ast of expressions like (((x))) and return the inner *ParenExpr.
// Returns input Expr if it is not a *ParenExpr
func skipSuperfluousParens(expr Expr) Expr {
if p, ok := expr.(*ParenExpr); ok {
// Remove useless parens from (((x))) expressions
for tmp, ok := p.X.(*ParenExpr); ok; tmp, ok = p.X.(*ParenExpr) {
p = tmp
}
// Remove parens from all expressions where order of evaluation is irrelevant
switch p.X.(type) {
case *BinaryExpr:
return p
default:
return p.X
}
}
return expr
}
// Returns the float type that is half the width of the input complex type
func comprisingFloatType(complexType reflect.Type) reflect.Type {
if complexType == c128 {
return f64
} else {
return f32
}
}
// Evals an expression with a known result type. If the node is an
// untyped constant, it is converted to type t. This function assumes
// the input is successfully type checked, and therefore is undefined
// incorrectly typed inputs.
func evalTypedExpr(expr Expr, t knownType, env Env) (xs []reflect.Value, err error) {
if expr.IsConst() {
x := expr.Const()
if ct, ok := expr.KnownType()[0].(ConstType); ok {
cx, _ := promoteConstToTyped(ct, constValue(x), t[0], expr)
xs = []reflect.Value{reflect.Value(cx)}
} else {
xs = []reflect.Value{x}
}
} else {
xs, err = EvalExpr(expr, env)
}
return xs, err
}
// Type check an integral node. Returns the type checked node, the
// integer value if constant, ok if the node was indeed integral,
// and checkErrs which occur during the type check. It is possible
// that checkErrs will be non-nil yet ok is still true. In this case
// the errors are non-fatal, such as integer truncation.
func checkInteger(expr ast.Expr, env Env) (aexpr Expr, i int, ok bool, checkErrs []error) {
aexpr, checkErrs = CheckExpr(expr, env)
if checkErrs != nil && !aexpr.IsConst() {
return aexpr, 0, false, checkErrs
}
t, err := expectSingleType(aexpr)
if err != nil {
return aexpr, 0, false, append(checkErrs, err)
}
var ii int64
if ct, ok := t.(ConstType); ok {
c, moreErrs := promoteConstToTyped(ct, constValue(aexpr.Const()), intType, aexpr)
if moreErrs != nil {
checkErrs = append(checkErrs, moreErrs...)
}
v := reflect.Value(c)
if v.IsValid() {
ii = v.Int()
} else {
return aexpr, 0, false, checkErrs
}
} else {
switch t.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if aexpr.IsConst() {
ii = aexpr.Const().Int()
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
if aexpr.IsConst() {
ii = int64(aexpr.Const().Uint())
}
default:
return aexpr, 0, false, checkErrs
}
}
return aexpr, int(ii), true, checkErrs
}
// Eval a node and cast it to an int. expr must be a *ConstNumber or integral type
func evalInteger(expr Expr, env Env) (int, error) {
if expr.IsConst() {
x := expr.Const()
if ct, ok := expr.KnownType()[0].(ConstType); ok {
cx, _ := promoteConstToTyped(ct, constValue(x), intType, expr)
return int(reflect.Value(cx).Int()), nil
} else {
panic(dytc("const bool or string evaluated as int"))
}
} else {
xs, err := EvalExpr(expr, env);
if err != nil {
return 0, err
}
x := xs[0]
switch x.Type().Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return int(x.Int()), nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return int(x.Uint()), nil
default:
panic(dytc("non-integral type evaluated as int"))
}
}
}
func checkArrayIndex(expr ast.Expr, env Env) (aexpr Expr, i int, ok bool, checkErrs []error) {
aexpr, checkErrs = CheckExpr(expr, env)
if !aexpr.IsConst() {
return aexpr, 0, false, checkErrs
}
t := aexpr.KnownType()[0]
var ii int64
if ct, ok := t.(ConstType); ok {
c, moreErrs := promoteConstToTyped(ct, constValue(aexpr.Const()), intType, aexpr)
if moreErrs != nil {
checkErrs = append(checkErrs, moreErrs...)
}
v := reflect.Value(c)
if v.IsValid() {
ii = v.Int()
} else {
return aexpr, 0, false, checkErrs
}
} else {
switch t.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
ii = aexpr.Const().Int()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
ii = int64(aexpr.Const().Uint())
default:
return aexpr, 0, false, checkErrs
}
}
// The limit of 2^31-1 is derived from the gc implementation,
// which seems to use this definition whilst type checking.
// The actual definition is the "largest value representable by an int"
return aexpr, int(ii), 0 <= ii && ii <= 0x7fffffff, checkErrs
}
// spec: addressable, that is, either a
// variable,
// pointer indirection,
// or slice indexing operation;
// or a field selector of an addressable struct operand;
// or an array indexing operation of an addressable array.
// As an exception to the addressability requirement, x may also be a (possibly parenthesized) composite literal
func isAddressable(expr Expr) bool {
expr = skipSuperfluousParens(expr)
switch n := expr.(type) {
case *Ident:
return n.source == EnvVar
case *StarExpr:
return true
case *IndexExpr:
x := n.X
t := x.KnownType()[0]
switch t.Kind() {
case reflect.Slice:
return true
case reflect.Array:
return isAddressable(x)
case reflect.Ptr:
return true
}
case *SelectorExpr:
if n.pkgName != "" {
return isAddressable(n.Sel)
}
x := n.X
t := x.KnownType()[0]
switch t.Kind() {
case reflect.Struct:
return isAddressable(x)
case reflect.Ptr:
return true
}
}
return false
}
func isAddressableOrCompositeLit(expr Expr) bool {
expr = skipSuperfluousParens(expr)
if _, ok := expr.(*CompositeLit); ok {
return true
} else {
return isAddressable(expr)
}
}
func isStaticTypeComparable(t reflect.Type) bool {
switch t.Kind() {
case reflect.Slice, reflect.Map, reflect.Func:
return false
case reflect.Struct:
return isStructComparable(t)
default:
return true
}
}
func isStructComparable(structT reflect.Type) bool {
_, ok := nonComparableField(structT)
return !ok
}
func nonComparableField(structT reflect.Type) (reflect.StructField, bool) {
numField := structT.NumField()
for i := 0; i < numField; i += 1 {
field := structT.Field(i)
if !isStaticTypeComparable(field.Type) {
return field, true
}
}
return reflect.StructField{}, false
}
func attemptBinaryOpConversion(to reflect.Type) bool {
switch to.Kind() {
case reflect.Invalid, reflect.Array, reflect.Chan, reflect.Func, reflect.Interface,
reflect.Map, reflect.Ptr, reflect.Slice, reflect.Struct:
return false
}
return true
}
func comparableToNilOnly(x reflect.Type) bool {
switch x.Kind() {
case reflect.Func, reflect.Map, reflect.Slice:
return true
}
return false
}
func isNillable(t reflect.Type) bool {
switch t.Kind() {
case reflect.Chan, reflect.Func, reflect.Interface,
reflect.Map, reflect.Ptr, reflect.Slice:
return true
}
return false
}
func isUnsignedInt(t reflect.Type) bool {
// All const numeric types can be "truncated" to an unsigned int, and
// therefore for type checking purposes are valid
if ct, ok := t.(ConstType); ok {
return ct.IsNumeric()
}
switch t.Kind() {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return true
}
return false
}
func isShiftable(t reflect.Type) bool {
switch t.(type) {
case ConstNilType, ConstBoolType, ConstComplexType, ConstStringType:
return false
case ConstIntType, ConstFloatType, ConstRuneType:
return true
default:
switch t.Kind() {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr,
reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return true
default:
return false
}
}
}
func isBlankIdentifier(blank ast.Expr) bool {
switch x := blank.(type) {
case *ast.ParenExpr:
return isBlankIdentifier(x.X)
case *ast.Ident:
return x.Name == "_"
}
return false
}
func multivalueOk(expr Expr) bool {
switch e := skipSuperfluousParens(expr).(type) {
case *TypeAssertExpr:
return true
case *IndexExpr:
return e.X.KnownType()[0].Kind() == reflect.Map
case *UnaryExpr:
return e.Op == token.ARROW
default:
return false
}
}
func inTopEnv(name string, env Env) bool {
if v := env.Var(name); v.IsValid() {
return true
} else if v := env.Const(name); v.IsValid() {
return true
} else if v := env.Func(name); v.IsValid() {
return true
} else {
return false
}
}
func equal(x, y reflect.Value) (bool, error) {
if t := areDynamicTypesComparable(x, y); t != nil {
return false, PanicUncomparableType{t}
} else {
return x.Interface() == y.Interface(), nil
}
}
| {
errs = append(errs, moreErrs...)
} |
index.js | const extract = require("extract-zip");
const fs = require("fs");
const path = require("path");
const createCsvWriter = require("csv-writer").createObjectCsvWriter;
const OUTPUT_DIR = process.argv[3] || process.cwd() + "/output";
const CSV_FILE = process.cwd() + "/file.csv";
const csvWriter = createCsvWriter({
path: CSV_FILE,
header: [
{ id: "folder", title: "FOLDER" },
{ id: "xml1", title: "XML1" },
{ id: "xml2", title: "XML2" },
{ id: "xml3", title: "XML3" },
{ id: "xml4", title: "XML4" },
{ id: "xml5", title: "XML5" },
],
});
async function main() {
const source = process.argv[2];
const target = OUTPUT_DIR;
await extractZip(source, target);
await unzipFiles(OUTPUT_DIR);
console.log("----------UNZIP SUCCESSFULL-----------");
const xmlZipFiles = getAllFiles(OUTPUT_DIR, []);
const records = createDataSet(xmlZipFiles);
writeToCSV(records);
}
async function | (source, target) {
try {
await extract(source, { dir: target });
console.log("Extraction complete");
} catch (err) {
console.log("Oops: extractZip failed", err);
}
}
const zippedFiles = [];
const unzipFiles = async function (dirPath) {
const files = fs.readdirSync(dirPath);
await Promise.all(
files.map(async (file) => {
if (fs.statSync(dirPath + "/" + file).isDirectory()) {
await unzipFiles(dirPath + "/" + file);
} else {
const fullFilePath = path.join(dirPath, "/", file);
const folderName = file.replace(".zip", "");
if (file.endsWith(".zip")) {
zippedFiles.push(folderName);
await extractZip(fullFilePath, path.join(dirPath, "/", folderName));
await unzipFiles(path.join(dirPath, "/", folderName));
}
}
})
);
};
const getAllFiles = function (dirPath, arrayOfFiles) {
const files = fs.readdirSync(dirPath);
arrayOfFiles = arrayOfFiles || [];
files.forEach(function (file) {
if (fs.statSync(dirPath + "/" + file).isDirectory()) {
arrayOfFiles = getAllFiles(dirPath + "/" + file, arrayOfFiles);
} else {
const fullFilePath = path.join(dirPath, "/", file);
if (file.endsWith(".xml")) {
arrayOfFiles.push(fullFilePath);
fullFilePath;
}
if (file.endsWith(".zip")) {
arrayOfFiles.push(file);
}
}
});
return arrayOfFiles;
};
function createDataSet(filesArray) {
const records = [];
filesArray.forEach((file, index) => {
if (file.endsWith(".zip")) {
if (filesArray[index - 1] && filesArray[index - 1].endsWith(".xml")) {
let i = 1;
const xmls = [];
while (i <= 4) {
if (filesArray[index - i].endsWith(".xml")) {
xmls.push(filesArray[index - i]);
}
i++;
}
records.push({ folder: file, xmls });
}
}
});
return records;
}
async function writeToCSV(rawRecords) {
const records = [];
await rawRecords.map(async (r) => {
const xmlObj = {};
await Promise.all(
r.xmls.map(async (xml, index) => {
let xmlNumber = 5; // not match
if (xml.includes("PersonalDetailsFlowRuleset")) xmlNumber = 1;
if (xml.includes("MyDocumentsFlowRuleset")) xmlNumber = 2;
if (xml.includes("FamilyAndEmergencyContactsFlowRuleset"))
xmlNumber = 3;
if (xml.includes("ContactInformationFlowRuleset")) xmlNumber = 4;
const data = await fs.promises.readFile(xml, "utf8");
xmlObj["xml" + xmlNumber] = data;
})
);
records.push({
folder: r.folder.replace(".zip", "").split("_")[1],
...xmlObj,
});
await csvWriter.writeRecords(records);
});
}
main();
| extractZip |
lib.rs | //! Elliptic Curve Digital Signature Algorithm (ECDSA) as specified in
//! [FIPS 186-4] (Digital Signature Standard)
//!
//! ## About
//!
//! This crate provides generic ECDSA support which can be used in the
//! following ways:
//!
//! - Generic implementation of ECDSA usable with the following crates:
//! - [`k256`] (secp256k1)
//! - [`p256`] (NIST P-256)
//! - ECDSA signature types alone which can be used to provide interoperability
//! between other crates that provide an ECDSA implementation:
//! - [`p384`] (NIST P-384)
//!
//! Any crates which provide an implementation of ECDSA for a particular
//! elliptic curve can leverage the types from this crate, along with the
//! [`k256`], [`p256`], and/or [`p384`] crates to expose ECDSA functionality in
//! a generic, interoperable way by leveraging the [`Signature`] type with in
//! conjunction with the [`signature::Signer`] and [`signature::Verifier`]
//! traits.
//!
//! For example, the [`ring-compat`] crate implements the [`signature::Signer`]
//! and [`signature::Verifier`] traits in conjunction with the
//! [`p256::ecdsa::Signature`] and [`p384::ecdsa::Signature`] types to
//! wrap the ECDSA implementations from [*ring*] in a generic, interoperable
//! API.
//!
//! ## Minimum Supported Rust Version
//!
//! Rust **1.46+** or higher.
//!
//! Minimum supported Rust version may be changed in the future, but it will be
//! accompanied with a minor version bump.
//!
//! [FIPS 186-4]: https://csrc.nist.gov/publications/detail/fips/186/4/final
//! [`k256`]: https://docs.rs/k256
//! [`p256`]: https://docs.rs/p256
//! [`p256::ecdsa::Signature`]: https://docs.rs/p256/latest/p256/ecdsa/type.Signature.html
//! [`p384`]: https://docs.rs/p384
//! [`p384::ecdsa::Signature`]: https://docs.rs/p384/latest/p384/ecdsa/type.Signature.html
//! [`ring-compat`]: https://docs.rs/ring-compat
//! [*ring*]: https://docs.rs/ring
#![no_std]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![forbid(unsafe_code)]
#![warn(missing_docs, rust_2018_idioms)]
#![doc(
html_logo_url = "https://raw.githubusercontent.com/RustCrypto/meta/master/logo_small.png",
html_root_url = "https://docs.rs/ecdsa/0.10.2"
)]
#[cfg(feature = "alloc")]
extern crate alloc;
pub mod asn1;
#[cfg(feature = "dev")]
#[cfg_attr(docsrs, doc(cfg(feature = "dev")))]
pub mod dev;
#[cfg(feature = "hazmat")]
#[cfg_attr(docsrs, doc(cfg(feature = "hazmat")))]
pub mod hazmat;
#[cfg(feature = "sign")]
#[cfg_attr(docsrs, doc(cfg(feature = "sign")))]
pub mod rfc6979;
#[cfg(feature = "sign")]
#[cfg_attr(docsrs, doc(cfg(feature = "sign")))]
pub mod sign;
#[cfg(feature = "verify")]
#[cfg_attr(docsrs, doc(cfg(feature = "verify")))]
pub mod verify;
// Re-export the `elliptic-curve` crate (and select types)
pub use elliptic_curve::{self, generic_array, sec1::EncodedPoint, weierstrass::Curve};
// Re-export the `signature` crate (and select types)
pub use signature::{self, Error};
#[cfg(feature = "sign")]
#[cfg_attr(docsrs, doc(cfg(feature = "sign")))]
pub use sign::SigningKey;
#[cfg(feature = "verify")]
#[cfg_attr(docsrs, doc(cfg(feature = "verify")))]
pub use verify::VerifyingKey;
#[cfg(feature = "zeroize")]
#[cfg_attr(docsrs, doc(cfg(feature = "zeroize")))]
pub use elliptic_curve::SecretKey;
#[cfg(feature = "pkcs8")]
pub use elliptic_curve::pkcs8;
use core::{
convert::{TryFrom, TryInto},
fmt::{self, Debug},
ops::Add,
};
use elliptic_curve::FieldBytes;
use generic_array::{sequence::Concat, typenum::Unsigned, ArrayLength, GenericArray};
#[cfg(feature = "arithmetic")]
use elliptic_curve::{
ff::PrimeField,
scalar::{NonZeroScalar, Scalar},
ProjectiveArithmetic,
};
/// Size of a fixed sized signature for the given elliptic curve.
pub type SignatureSize<C> = <<C as elliptic_curve::Curve>::FieldSize as Add>::Output;
/// Fixed-size byte array containing an ECDSA signature
pub type SignatureBytes<C> = GenericArray<u8, SignatureSize<C>>;
/// ECDSA signatures (fixed-size).
///
/// Generic over elliptic curve types.
///
/// These signatures are serialized as fixed-sized big endian scalar values
/// with no additional framing:
///
/// - `r`: field element size for the given curve, big-endian
/// - `s`: field element size for the given curve, big-endian
///
/// For example, in a curve with a 256-bit modulus like NIST P-256 or
/// secp256k1, `r` and `s` will both be 32-bytes, resulting in a signature
/// with a total of 64-bytes.
///
/// ASN.1 is also supported via the [`Signature::from_asn1`] and
/// [`Signature::to_asn1`] methods.
#[derive(Clone, Eq, PartialEq)]
pub struct Signature<C: Curve + CheckSignatureBytes>
where
SignatureSize<C>: ArrayLength<u8>,
{
bytes: SignatureBytes<C>,
}
impl<C> Signature<C>
where
C: Curve + CheckSignatureBytes,
SignatureSize<C>: ArrayLength<u8>,
{
/// Create a [`Signature`] from the serialized `r` and `s` scalar values
/// which comprise the signature.
pub fn from_scalars(
r: impl Into<FieldBytes<C>>,
s: impl Into<FieldBytes<C>>,
) -> Result<Self, Error> {
Self::try_from(r.into().concat(s.into()).as_slice())
}
/// Parse a signature from ASN.1 DER
pub fn from_asn1(bytes: &[u8]) -> Result<Self, Error>
where
C::FieldSize: Add + ArrayLength<u8>,
asn1::MaxSize<C>: ArrayLength<u8>,
<C::FieldSize as Add>::Output: Add<asn1::MaxOverhead> + ArrayLength<u8>,
{
asn1::Signature::<C>::try_from(bytes).and_then(TryInto::try_into)
}
/// Serialize this signature as ASN.1 DER
pub fn to_asn1(&self) -> asn1::Signature<C>
where
C::FieldSize: Add + ArrayLength<u8>,
asn1::MaxSize<C>: ArrayLength<u8>,
<C::FieldSize as Add>::Output: Add<asn1::MaxOverhead> + ArrayLength<u8>,
{
let (r, s) = self.bytes.split_at(C::FieldSize::to_usize());
asn1::Signature::from_scalar_bytes(r, s)
}
}
#[cfg(feature = "arithmetic")]
#[cfg_attr(docsrs, doc(cfg(feature = "arithmetic")))]
impl<C> Signature<C>
where
C: Curve + ProjectiveArithmetic,
Scalar<C>: PrimeField<Repr = FieldBytes<C>>,
<Scalar<C> as PrimeField>::Repr: From<Scalar<C>> + for<'a> From<&'a Scalar<C>>,
SignatureSize<C>: ArrayLength<u8>,
{
/// Get the `r` component of this signature
pub fn r(&self) -> NonZeroScalar<C> {
let r_bytes = GenericArray::clone_from_slice(&self.bytes[..C::FieldSize::to_usize()]);
NonZeroScalar::from_repr(r_bytes)
.unwrap_or_else(|| unreachable!("r-component ensured valid in constructor"))
}
/// Get the `s` component of this signature
pub fn s(&self) -> NonZeroScalar<C> {
let s_bytes = GenericArray::clone_from_slice(&self.bytes[C::FieldSize::to_usize()..]);
NonZeroScalar::from_repr(s_bytes)
.unwrap_or_else(|| unreachable!("r-component ensured valid in constructor"))
}
/// Normalize signature into "low S" form as described in
/// [BIP 0062: Dealing with Malleability][1].
///
/// [1]: https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki
pub fn normalize_s(&mut self) -> Result<bool, Error>
where
Scalar<C>: NormalizeLow,
{
let s_bytes = GenericArray::from_mut_slice(&mut self.bytes[C::FieldSize::to_usize()..]);
Scalar::<C>::from_repr(s_bytes.clone())
.map(|s| {
let (s_low, was_high) = s.normalize_low();
if was_high {
s_bytes.copy_from_slice(&s_low.to_repr());
}
was_high
})
.ok_or_else(Error::new)
}
}
impl<C> signature::Signature for Signature<C>
where
C: Curve + CheckSignatureBytes,
SignatureSize<C>: ArrayLength<u8>,
{
fn from_bytes(bytes: &[u8]) -> Result<Self, Error> {
Self::try_from(bytes)
}
}
impl<C> AsRef<[u8]> for Signature<C>
where
C: Curve + CheckSignatureBytes,
SignatureSize<C>: ArrayLength<u8>,
{
fn as_ref(&self) -> &[u8] {
self.bytes.as_slice()
}
}
impl<C> Copy for Signature<C>
where
C: Curve + CheckSignatureBytes,
SignatureSize<C>: ArrayLength<u8>,
<SignatureSize<C> as ArrayLength<u8>>::ArrayType: Copy,
{
}
impl<C> Debug for Signature<C>
where
C: Curve + CheckSignatureBytes,
SignatureSize<C>: ArrayLength<u8>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"ecdsa::Signature<{:?}>({:?})",
C::default(),
self.as_ref()
)
}
}
impl<C> TryFrom<&[u8]> for Signature<C>
where
C: Curve + CheckSignatureBytes,
SignatureSize<C>: ArrayLength<u8>,
{
type Error = Error;
fn try_from(bytes: &[u8]) -> Result<Self, Error> {
if bytes.len() != <SignatureSize<C>>::to_usize() {
return Err(Error::new());
}
let bytes = GenericArray::clone_from_slice(bytes);
C::check_signature_bytes(&bytes)?;
Ok(Self { bytes })
}
}
impl<C> TryFrom<asn1::Signature<C>> for Signature<C>
where
C: Curve + CheckSignatureBytes,
C::FieldSize: Add + ArrayLength<u8>,
asn1::MaxSize<C>: ArrayLength<u8>,
<C::FieldSize as Add>::Output: Add<asn1::MaxOverhead> + ArrayLength<u8>,
{
type Error = Error;
fn try_from(doc: asn1::Signature<C>) -> Result<Signature<C>, Error> |
}
/// Ensure a signature is well-formed.
pub trait CheckSignatureBytes: Curve
where
SignatureSize<Self>: ArrayLength<u8>,
{
/// Validate that the given signature is well-formed.
///
/// This trait is auto-impl'd for curves which impl the
/// `elliptic_curve::ProjectiveArithmetic` trait, which validates that the
/// `r` and `s` components of the signature are in range of the
/// scalar field.
///
/// Note that this trait is not for verifying a signature, but allows for
/// asserting properties of it which allow infallible conversions
/// (e.g. accessors for the `r` and `s` components)
fn check_signature_bytes(bytes: &SignatureBytes<Self>) -> Result<(), Error> {
// Ensure `r` and `s` are both non-zero
// TODO(tarcieri): check that `r` and `s` are in range of the curve's order
for scalar_bytes in bytes.chunks(Self::FieldSize::to_usize()) {
if scalar_bytes.iter().all(|&b| b == 0) {
return Err(Error::new());
}
}
Ok(())
}
}
#[cfg(feature = "arithmetic")]
#[cfg_attr(docsrs, doc(cfg(feature = "arithmetic")))]
impl<C> CheckSignatureBytes for C
where
C: Curve + ProjectiveArithmetic,
FieldBytes<C>: From<Scalar<C>> + for<'a> From<&'a Scalar<C>>,
Scalar<C>: PrimeField<Repr = FieldBytes<C>>,
SignatureSize<C>: ArrayLength<u8>,
{
/// When curve arithmetic is available, check that the scalar components
/// of the signature are in range.
fn check_signature_bytes(bytes: &SignatureBytes<C>) -> Result<(), Error> {
let (r, s) = bytes.split_at(C::FieldSize::to_usize());
let r_ok = NonZeroScalar::<C>::from_repr(GenericArray::clone_from_slice(r)).is_some();
let s_ok = NonZeroScalar::<C>::from_repr(GenericArray::clone_from_slice(s)).is_some();
if r_ok && s_ok {
Ok(())
} else {
Err(Error::new())
}
}
}
/// Normalize a scalar (i.e. ECDSA S) to the lower half the field, as described
/// in [BIP 0062: Dealing with Malleability][1].
///
/// [1]: https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki
pub trait NormalizeLow: Sized {
/// Normalize scalar to the lower half of the field (i.e. negate it if it's
/// larger than half the curve's order).
/// Returns a tuple with the new scalar and a boolean indicating whether the given scalar
/// was in the higher half.
///
/// May be implemented to work in variable time.
fn normalize_low(&self) -> (Self, bool);
}
| {
let mut bytes = GenericArray::default();
let scalar_size = C::FieldSize::to_usize();
let r_begin = scalar_size.checked_sub(doc.r().len()).unwrap();
let s_begin = bytes.len().checked_sub(doc.s().len()).unwrap();
bytes[r_begin..scalar_size].copy_from_slice(doc.r());
bytes[s_begin..].copy_from_slice(doc.s());
C::check_signature_bytes(&bytes)?;
Ok(Signature { bytes })
} |
dropck_outlives.rs | use rustc::hir::def_id::DefId;
use rustc::infer::canonical::{Canonical, QueryResponse};
use rustc::traits::query::dropck_outlives::{DropckOutlivesResult, DtorckConstraint};
use rustc::traits::query::{CanonicalTyGoal, NoSolution};
use rustc::traits::{TraitEngine, Normalized, ObligationCause, TraitEngineExt};
use rustc::ty::query::Providers;
use rustc::ty::subst::{Subst, Substs};
use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
use rustc::util::nodemap::FxHashSet;
use rustc_data_structures::sync::Lrc;
use syntax::source_map::{Span, DUMMY_SP};
crate fn provide(p: &mut Providers) {
*p = Providers {
dropck_outlives,
adt_dtorck_constraint,
..*p
};
}
fn dropck_outlives<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
canonical_goal: CanonicalTyGoal<'tcx>,
) -> Result<Lrc<Canonical<'tcx, QueryResponse<'tcx, DropckOutlivesResult<'tcx>>>>, NoSolution> {
debug!("dropck_outlives(goal={:#?})", canonical_goal);
tcx.infer_ctxt().enter_with_canonical(
DUMMY_SP,
&canonical_goal,
|ref infcx, goal, canonical_inference_vars| {
let tcx = infcx.tcx;
let ParamEnvAnd {
param_env,
value: for_ty,
} = goal;
let mut result = DropckOutlivesResult {
kinds: vec![],
overflows: vec![],
};
// A stack of types left to process. Each round, we pop
// something from the stack and invoke
// `dtorck_constraint_for_ty`. This may produce new types that
// have to be pushed on the stack. This continues until we have explored
// all the reachable types from the type `for_ty`.
//
// Example: Imagine that we have the following code:
//
// ```rust
// struct A {
// value: B,
// children: Vec<A>,
// }
//
// struct B {
// value: u32
// }
//
// fn f() {
// let a: A = ...;
// ..
// } // here, `a` is dropped
// ```
//
// at the point where `a` is dropped, we need to figure out
// which types inside of `a` contain region data that may be
// accessed by any destructors in `a`. We begin by pushing `A`
// onto the stack, as that is the type of `a`. We will then
// invoke `dtorck_constraint_for_ty` which will expand `A`
// into the types of its fields `(B, Vec<A>)`. These will get
// pushed onto the stack. Eventually, expanding `Vec<A>` will
// lead to us trying to push `A` a second time -- to prevent
// infinite recursion, we notice that `A` was already pushed
// once and stop.
let mut ty_stack = vec![(for_ty, 0)];
// Set used to detect infinite recursion.
let mut ty_set = FxHashSet::default();
let mut fulfill_cx = TraitEngine::new(infcx.tcx);
let cause = ObligationCause::dummy();
while let Some((ty, depth)) = ty_stack.pop() {
let DtorckConstraint {
dtorck_types,
outlives,
overflows,
} = dtorck_constraint_for_ty(tcx, DUMMY_SP, for_ty, depth, ty)?;
// "outlives" represent types/regions that may be touched
// by a destructor.
result.kinds.extend(outlives);
result.overflows.extend(overflows);
// dtorck types are "types that will get dropped but which
// do not themselves define a destructor", more or less. We have
// to push them onto the stack to be expanded.
for ty in dtorck_types {
match infcx.at(&cause, param_env).normalize(&ty) {
Ok(Normalized {
value: ty,
obligations,
}) => {
fulfill_cx.register_predicate_obligations(infcx, obligations);
debug!("dropck_outlives: ty from dtorck_types = {:?}", ty);
match ty.sty {
// All parameters live for the duration of the
// function.
ty::Param(..) => {}
// A projection that we couldn't resolve - it
// might have a destructor.
ty::Projection(..) | ty::Opaque(..) => {
result.kinds.push(ty.into());
}
_ => {
if ty_set.insert(ty) {
ty_stack.push((ty, depth + 1));
}
}
}
}
// We don't actually expect to fail to normalize.
// That implies a WF error somewhere else.
Err(NoSolution) => {
return Err(NoSolution);
}
}
}
}
debug!("dropck_outlives: result = {:#?}", result);
infcx.make_canonicalized_query_response(
canonical_inference_vars,
result,
&mut *fulfill_cx
)
},
)
}
/// Return a set of constraints that needs to be satisfied in
/// order for `ty` to be valid for destruction.
fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>(
tcx: TyCtxt<'a, 'gcx, 'tcx>,
span: Span,
for_ty: Ty<'tcx>,
depth: usize,
ty: Ty<'tcx>,
) -> Result<DtorckConstraint<'tcx>, NoSolution> {
debug!(
"dtorck_constraint_for_ty({:?}, {:?}, {:?}, {:?})",
span, for_ty, depth, ty
);
if depth >= *tcx.sess.recursion_limit.get() {
return Ok(DtorckConstraint {
outlives: vec![],
dtorck_types: vec![],
overflows: vec![ty],
});
}
let result = match ty.sty {
ty::Bool
| ty::Char
| ty::Int(_)
| ty::Uint(_)
| ty::Float(_)
| ty::Str
| ty::Never
| ty::Foreign(..)
| ty::RawPtr(..)
| ty::Ref(..)
| ty::FnDef(..)
| ty::FnPtr(_)
| ty::GeneratorWitness(..) => {
// these types never have a destructor
Ok(DtorckConstraint::empty())
}
ty::Array(ety, _) | ty::Slice(ety) => {
// single-element containers, behave like their element
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety)
}
ty::Tuple(tys) => tys.iter()
.map(|ty| dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty))
.collect(),
ty::Closure(def_id, substs) => substs
.upvar_tys(def_id, tcx)
.map(|ty| dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty)) | // in the interior, and sit idle when generator yields
// (and is subsequently dropped).
//
// It would be nice to descend into interior of a
// generator to determine what effects dropping it might
// have (by looking at any drop effects associated with
// its interior).
//
// However, the interior's representation uses things like
// GeneratorWitness that explicitly assume they are not
// traversed in such a manner. So instead, we will
// simplify things for now by treating all generators as
// if they were like trait objects, where its upvars must
// all be alive for the generator's (potential)
// destructor.
//
// In particular, skipping over `_interior` is safe
// because any side-effects from dropping `_interior` can
// only take place through references with lifetimes
// derived from lifetimes attached to the upvars, and we
// *do* incorporate the upvars here.
let constraint = DtorckConstraint {
outlives: substs.upvar_tys(def_id, tcx).map(|t| t.into()).collect(),
dtorck_types: vec![],
overflows: vec![],
};
debug!(
"dtorck_constraint: generator {:?} => {:?}",
def_id, constraint
);
Ok(constraint)
}
ty::Adt(def, substs) => {
let DtorckConstraint {
dtorck_types,
outlives,
overflows,
} = tcx.at(span).adt_dtorck_constraint(def.did)?;
Ok(DtorckConstraint {
// FIXME: we can try to recursively `dtorck_constraint_on_ty`
// there, but that needs some way to handle cycles.
dtorck_types: dtorck_types.subst(tcx, substs),
outlives: outlives.subst(tcx, substs),
overflows: overflows.subst(tcx, substs),
})
}
// Objects must be alive in order for their destructor
// to be called.
ty::Dynamic(..) => Ok(DtorckConstraint {
outlives: vec![ty.into()],
dtorck_types: vec![],
overflows: vec![],
}),
// Types that can't be resolved. Pass them forward.
ty::Projection(..) | ty::Opaque(..) | ty::Param(..) => Ok(DtorckConstraint {
outlives: vec![],
dtorck_types: vec![ty],
overflows: vec![],
}),
ty::UnnormalizedProjection(..) => bug!("only used with chalk-engine"),
ty::Placeholder(..) | ty::Bound(..) | ty::Infer(..) | ty::Error => {
// By the time this code runs, all type variables ought to
// be fully resolved.
Err(NoSolution)
}
};
debug!("dtorck_constraint_for_ty({:?}) = {:?}", ty, result);
result
}
/// Calculates the dtorck constraint for a type.
crate fn adt_dtorck_constraint<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
) -> Result<DtorckConstraint<'tcx>, NoSolution> {
let def = tcx.adt_def(def_id);
let span = tcx.def_span(def_id);
debug!("dtorck_constraint: {:?}", def);
if def.is_phantom_data() {
// The first generic parameter here is guaranteed to be a type because it's
// `PhantomData`.
let substs = Substs::identity_for_item(tcx, def_id);
assert_eq!(substs.len(), 1);
let result = DtorckConstraint {
outlives: vec![],
dtorck_types: vec![substs.type_at(0)],
overflows: vec![],
};
debug!("dtorck_constraint: {:?} => {:?}", def, result);
return Ok(result);
}
let mut result = def.all_fields()
.map(|field| tcx.type_of(field.did))
.map(|fty| dtorck_constraint_for_ty(tcx, span, fty, 0, fty))
.collect::<Result<DtorckConstraint, NoSolution>>()?;
result.outlives.extend(tcx.destructor_constraints(def));
dedup_dtorck_constraint(&mut result);
debug!("dtorck_constraint: {:?} => {:?}", def, result);
Ok(result)
}
fn dedup_dtorck_constraint<'tcx>(c: &mut DtorckConstraint<'tcx>) {
let mut outlives = FxHashSet::default();
let mut dtorck_types = FxHashSet::default();
c.outlives.retain(|&val| outlives.replace(val).is_none());
c.dtorck_types
.retain(|&val| dtorck_types.replace(val).is_none());
} | .collect(),
ty::Generator(def_id, substs, _movability) => {
// rust-lang/rust#49918: types can be constructed, stored |
config.go | // Copyright 2016 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package params
import (
"encoding/binary"
"errors"
"fmt"
"math/big"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
)
// Genesis hashes to enforce below configs on.
var (
MainnetGenesisHash = common.HexToHash("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3")
RopstenGenesisHash = common.HexToHash("0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d")
RinkebyGenesisHash = common.HexToHash("0x6341fd3daf94b748c72ced5a5b26028f2474f5f00d824504e4fa37a75767e177")
GoerliGenesisHash = common.HexToHash("0xbf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a")
// TODO: update with yolov2 values
YoloV2GenesisHash = common.HexToHash("0x498a7239036dd2cd09e2bb8a80922b78632017958c332b42044c250d603a8a3e")
)
// TrustedCheckpoints associates each known checkpoint with the genesis hash of
// the chain it belongs to.
var TrustedCheckpoints = map[common.Hash]*TrustedCheckpoint{
MainnetGenesisHash: MainnetTrustedCheckpoint,
RopstenGenesisHash: RopstenTrustedCheckpoint,
RinkebyGenesisHash: RinkebyTrustedCheckpoint,
GoerliGenesisHash: GoerliTrustedCheckpoint,
}
// CheckpointOracles associates each known checkpoint oracles with the genesis hash of
// the chain it belongs to.
var CheckpointOracles = map[common.Hash]*CheckpointOracleConfig{
MainnetGenesisHash: MainnetCheckpointOracle,
RopstenGenesisHash: RopstenCheckpointOracle,
RinkebyGenesisHash: RinkebyCheckpointOracle,
GoerliGenesisHash: GoerliCheckpointOracle,
}
var (
// MainnetChainConfig is the chain parameters to run a node on the main network.
MainnetChainConfig = &ChainConfig{
ChainID: big.NewInt(1),
HomesteadBlock: big.NewInt(1150000),
DAOForkBlock: big.NewInt(1920000),
DAOForkSupport: true,
EIP150Block: big.NewInt(2463000),
EIP150Hash: common.HexToHash("0x2086799aeebeae135c246c65021c82b4e15a2c451340993aacfd2751886514f0"),
EIP155Block: big.NewInt(2675000),
EIP158Block: big.NewInt(2675000),
ByzantiumBlock: big.NewInt(4370000),
ConstantinopleBlock: big.NewInt(7280000),
PetersburgBlock: big.NewInt(7280000),
IstanbulBlock: big.NewInt(9069000),
MuirGlacierBlock: big.NewInt(9200000),
Ethash: new(EthashConfig),
}
// MainnetTrustedCheckpoint contains the light client trusted checkpoint for the main network.
MainnetTrustedCheckpoint = &TrustedCheckpoint{
SectionIndex: 345,
SectionHead: common.HexToHash("0x5453bab878704adebc934b41fd214a07ea7a72b8572ff088dca7f7956cd0ef28"),
CHTRoot: common.HexToHash("0x7693d432595846c094f47cb37f5c868b0b7b1968fc6b0fc411ded1345fdaffab"), | MainnetCheckpointOracle = &CheckpointOracleConfig{
Address: common.HexToAddress("0x9a9070028361F7AAbeB3f2F2Dc07F82C4a98A02a"),
Signers: []common.Address{
common.HexToAddress("0x1b2C260efc720BE89101890E4Db589b44E950527"), // Peter
common.HexToAddress("0x78d1aD571A1A09D60D9BBf25894b44e4C8859595"), // Martin
common.HexToAddress("0x286834935f4A8Cfb4FF4C77D5770C2775aE2b0E7"), // Zsolt
common.HexToAddress("0xb86e2B0Ab5A4B1373e40c51A7C712c70Ba2f9f8E"), // Gary
common.HexToAddress("0x0DF8fa387C602AE62559cC4aFa4972A7045d6707"), // Guillaume
},
Threshold: 2,
}
// RopstenChainConfig contains the chain parameters to run a node on the Ropsten test network.
RopstenChainConfig = &ChainConfig{
ChainID: big.NewInt(3),
HomesteadBlock: big.NewInt(0),
DAOForkBlock: nil,
DAOForkSupport: true,
EIP150Block: big.NewInt(0),
EIP150Hash: common.HexToHash("0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d"),
EIP155Block: big.NewInt(10),
EIP158Block: big.NewInt(10),
ByzantiumBlock: big.NewInt(1700000),
ConstantinopleBlock: big.NewInt(4230000),
PetersburgBlock: big.NewInt(4939394),
IstanbulBlock: big.NewInt(6485846),
MuirGlacierBlock: big.NewInt(7117117),
Ethash: new(EthashConfig),
}
// RopstenTrustedCheckpoint contains the light client trusted checkpoint for the Ropsten test network.
RopstenTrustedCheckpoint = &TrustedCheckpoint{
SectionIndex: 279,
SectionHead: common.HexToHash("0x4a4912848d4c06090097073357c10015d11c6f4544a0f93cbdd584701c3b7d58"),
CHTRoot: common.HexToHash("0x9053b7867ae921e80a4e2f5a4b15212e4af3d691ca712fb33dc150e9c6ea221c"),
BloomRoot: common.HexToHash("0x3dc04cb1be7ddc271f3f83469b47b76184a79d7209ef51d85b1539ea6d25a645"),
}
// RopstenCheckpointOracle contains a set of configs for the Ropsten test network oracle.
RopstenCheckpointOracle = &CheckpointOracleConfig{
Address: common.HexToAddress("0xEF79475013f154E6A65b54cB2742867791bf0B84"),
Signers: []common.Address{
common.HexToAddress("0x32162F3581E88a5f62e8A61892B42C46E2c18f7b"), // Peter
common.HexToAddress("0x78d1aD571A1A09D60D9BBf25894b44e4C8859595"), // Martin
common.HexToAddress("0x286834935f4A8Cfb4FF4C77D5770C2775aE2b0E7"), // Zsolt
common.HexToAddress("0xb86e2B0Ab5A4B1373e40c51A7C712c70Ba2f9f8E"), // Gary
common.HexToAddress("0x0DF8fa387C602AE62559cC4aFa4972A7045d6707"), // Guillaume
},
Threshold: 2,
}
// RinkebyChainConfig contains the chain parameters to run a node on the Rinkeby test network.
RinkebyChainConfig = &ChainConfig{
ChainID: big.NewInt(4),
HomesteadBlock: big.NewInt(1),
DAOForkBlock: nil,
DAOForkSupport: true,
EIP150Block: big.NewInt(2),
EIP150Hash: common.HexToHash("0x9b095b36c15eaf13044373aef8ee0bd3a382a5abb92e402afa44b8249c3a90e9"),
EIP155Block: big.NewInt(3),
EIP158Block: big.NewInt(3),
ByzantiumBlock: big.NewInt(1035301),
ConstantinopleBlock: big.NewInt(3660663),
PetersburgBlock: big.NewInt(4321234),
IstanbulBlock: big.NewInt(5435345),
MuirGlacierBlock: nil,
Clique: &CliqueConfig{
Period: 15,
Epoch: 30000,
},
}
// RinkebyTrustedCheckpoint contains the light client trusted checkpoint for the Rinkeby test network.
RinkebyTrustedCheckpoint = &TrustedCheckpoint{
SectionIndex: 232,
SectionHead: common.HexToHash("0x8170fca4039b11a008c11f9996ff112151cbb17411437bb2f86288e11158b2f0"),
CHTRoot: common.HexToHash("0x4526560d92ae1b3a6d3ee780c3ad289ba2bbf1b5da58d9ea107f2f26412b631f"),
BloomRoot: common.HexToHash("0x82a889098a35d6a21ea8894d35a1db69b94bad61b988bbe5ae4601437320e331"),
}
// RinkebyCheckpointOracle contains a set of configs for the Rinkeby test network oracle.
RinkebyCheckpointOracle = &CheckpointOracleConfig{
Address: common.HexToAddress("0xebe8eFA441B9302A0d7eaECc277c09d20D684540"),
Signers: []common.Address{
common.HexToAddress("0xd9c9cd5f6779558b6e0ed4e6acf6b1947e7fa1f3"), // Peter
common.HexToAddress("0x78d1aD571A1A09D60D9BBf25894b44e4C8859595"), // Martin
common.HexToAddress("0x286834935f4A8Cfb4FF4C77D5770C2775aE2b0E7"), // Zsolt
common.HexToAddress("0xb86e2B0Ab5A4B1373e40c51A7C712c70Ba2f9f8E"), // Gary
},
Threshold: 2,
}
// GoerliChainConfig contains the chain parameters to run a node on the Görli test network.
GoerliChainConfig = &ChainConfig{
ChainID: big.NewInt(5),
HomesteadBlock: big.NewInt(0),
DAOForkBlock: nil,
DAOForkSupport: true,
EIP150Block: big.NewInt(0),
EIP155Block: big.NewInt(0),
EIP158Block: big.NewInt(0),
ByzantiumBlock: big.NewInt(0),
ConstantinopleBlock: big.NewInt(0),
PetersburgBlock: big.NewInt(0),
IstanbulBlock: big.NewInt(1561651),
MuirGlacierBlock: nil,
Clique: &CliqueConfig{
Period: 15,
Epoch: 30000,
},
}
// GoerliTrustedCheckpoint contains the light client trusted checkpoint for the Görli test network.
GoerliTrustedCheckpoint = &TrustedCheckpoint{
SectionIndex: 116,
SectionHead: common.HexToHash("0xf2d200f636f213c9c7bb4e747ff564813da7708253037103aef3d8be5203c5e1"),
CHTRoot: common.HexToHash("0xb0ac83e2ccf6c2776945e099c4e3df50fe6200499c8b2045c34cafdf57d15087"),
BloomRoot: common.HexToHash("0xfb580ad1c611230a4bfc56534f58bcb156d028bc6ce70e35403dc019c7c02d90"),
}
// GoerliCheckpointOracle contains a set of configs for the Goerli test network oracle.
GoerliCheckpointOracle = &CheckpointOracleConfig{
Address: common.HexToAddress("0x18CA0E045F0D772a851BC7e48357Bcaab0a0795D"),
Signers: []common.Address{
common.HexToAddress("0x4769bcaD07e3b938B7f43EB7D278Bc7Cb9efFb38"), // Peter
common.HexToAddress("0x78d1aD571A1A09D60D9BBf25894b44e4C8859595"), // Martin
common.HexToAddress("0x286834935f4A8Cfb4FF4C77D5770C2775aE2b0E7"), // Zsolt
common.HexToAddress("0xb86e2B0Ab5A4B1373e40c51A7C712c70Ba2f9f8E"), // Gary
common.HexToAddress("0x0DF8fa387C602AE62559cC4aFa4972A7045d6707"), // Guillaume
},
Threshold: 2,
}
// YoloV2ChainConfig contains the chain parameters to run a node on the YOLOv2 test network.
YoloV2ChainConfig = &ChainConfig{
ChainID: big.NewInt(133519467574834),
HomesteadBlock: big.NewInt(0),
DAOForkBlock: nil,
DAOForkSupport: true,
EIP150Block: big.NewInt(0),
EIP155Block: big.NewInt(0),
EIP158Block: big.NewInt(0),
ByzantiumBlock: big.NewInt(0),
ConstantinopleBlock: big.NewInt(0),
PetersburgBlock: big.NewInt(0),
IstanbulBlock: big.NewInt(0),
MuirGlacierBlock: nil,
YoloV2Block: big.NewInt(0),
Clique: &CliqueConfig{
Period: 15,
Epoch: 30000,
},
}
// AllEthashProtocolChanges contains every protocol change (EIPs) introduced
// and accepted by the Ethereum core developers into the Ethash consensus.
//
// This configuration is intentionally not using keyed fields to force anyone
// adding flags to the config to also have to set these fields.
AllEthashProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, nil, new(EthashConfig), nil, nil, false, 32, 35, big.NewInt(0), big.NewInt(0), nil, nil, false, nil}
// AllCliqueProtocolChanges contains every protocol change (EIPs) introduced
// and accepted by the Ethereum core developers into the Clique consensus.
//
// This configuration is intentionally not using keyed fields to force anyone
// adding flags to the config to also have to set these fields.
AllCliqueProtocolChanges = &ChainConfig{big.NewInt(1337), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, nil, nil, &CliqueConfig{Period: 0, Epoch: 30000}, nil, false, 32, 32, big.NewInt(0), big.NewInt(0), nil, nil, false, nil}
TestChainConfig = &ChainConfig{big.NewInt(10), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, nil, new(EthashConfig), nil, nil, false, 32, 32, big.NewInt(0), big.NewInt(0), nil, nil, false, nil}
TestRules = TestChainConfig.Rules(new(big.Int))
QuorumTestChainConfig = &ChainConfig{big.NewInt(10), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, nil, new(EthashConfig), nil, nil, true, 64, 32, big.NewInt(0), big.NewInt(0), nil, big.NewInt(0), false, nil}
QuorumMPSTestChainConfig = &ChainConfig{big.NewInt(10), big.NewInt(0), nil, false, big.NewInt(0), common.Hash{}, big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), big.NewInt(0), nil, nil, nil, new(EthashConfig), nil, nil, true, 64, 32, big.NewInt(0), big.NewInt(0), nil, big.NewInt(0), true, nil}
)
// TrustedCheckpoint represents a set of post-processed trie roots (CHT and
// BloomTrie) associated with the appropriate section index and head hash. It is
// used to start light syncing from this checkpoint and avoid downloading the
// entire header chain while still being able to securely access old headers/logs.
type TrustedCheckpoint struct {
SectionIndex uint64 `json:"sectionIndex"`
SectionHead common.Hash `json:"sectionHead"`
CHTRoot common.Hash `json:"chtRoot"`
BloomRoot common.Hash `json:"bloomRoot"`
}
// HashEqual returns an indicator comparing the itself hash with given one.
func (c *TrustedCheckpoint) HashEqual(hash common.Hash) bool {
if c.Empty() {
return hash == common.Hash{}
}
return c.Hash() == hash
}
// Hash returns the hash of checkpoint's four key fields(index, sectionHead, chtRoot and bloomTrieRoot).
func (c *TrustedCheckpoint) Hash() common.Hash {
buf := make([]byte, 8+3*common.HashLength)
binary.BigEndian.PutUint64(buf, c.SectionIndex)
copy(buf[8:], c.SectionHead.Bytes())
copy(buf[8+common.HashLength:], c.CHTRoot.Bytes())
copy(buf[8+2*common.HashLength:], c.BloomRoot.Bytes())
return crypto.Keccak256Hash(buf)
}
// Empty returns an indicator whether the checkpoint is regarded as empty.
func (c *TrustedCheckpoint) Empty() bool {
return c.SectionHead == (common.Hash{}) || c.CHTRoot == (common.Hash{}) || c.BloomRoot == (common.Hash{})
}
// CheckpointOracleConfig represents a set of checkpoint contract(which acts as an oracle)
// config which used for light client checkpoint syncing.
type CheckpointOracleConfig struct {
Address common.Address `json:"address"`
Signers []common.Address `json:"signers"`
Threshold uint64 `json:"threshold"`
}
type MaxCodeConfigStruct struct {
Block *big.Int `json:"block,omitempty"`
Size uint64 `json:"size,omitempty"`
}
// ChainConfig is the core config which determines the blockchain settings.
//
// ChainConfig is stored in the database on a per block basis. This means
// that any network, identified by its genesis block, can have its own
// set of configuration options.
type ChainConfig struct {
ChainID *big.Int `json:"chainId"` // chainId identifies the current chain and is used for replay protection
HomesteadBlock *big.Int `json:"homesteadBlock,omitempty"` // Homestead switch block (nil = no fork, 0 = already homestead)
DAOForkBlock *big.Int `json:"daoForkBlock,omitempty"` // TheDAO hard-fork switch block (nil = no fork)
DAOForkSupport bool `json:"daoForkSupport,omitempty"` // Whether the nodes supports or opposes the DAO hard-fork
// EIP150 implements the Gas price changes (https://github.com/ethereum/EIPs/issues/150)
EIP150Block *big.Int `json:"eip150Block,omitempty"` // EIP150 HF block (nil = no fork)
EIP150Hash common.Hash `json:"eip150Hash,omitempty"` // EIP150 HF hash (needed for header only clients as only gas pricing changed)
EIP155Block *big.Int `json:"eip155Block,omitempty"` // EIP155 HF block
EIP158Block *big.Int `json:"eip158Block,omitempty"` // EIP158 HF block
ByzantiumBlock *big.Int `json:"byzantiumBlock,omitempty"` // Byzantium switch block (nil = no fork, 0 = already on byzantium)
ConstantinopleBlock *big.Int `json:"constantinopleBlock,omitempty"` // Constantinople switch block (nil = no fork, 0 = already activated)
PetersburgBlock *big.Int `json:"petersburgBlock,omitempty"` // Petersburg switch block (nil = same as Constantinople)
IstanbulBlock *big.Int `json:"istanbulBlock,omitempty"` // Istanbul switch block (nil = no fork, 0 = already on istanbul)
MuirGlacierBlock *big.Int `json:"muirGlacierBlock,omitempty"` // Eip-2384 (bomb delay) switch block (nil = no fork, 0 = already activated)
YoloV2Block *big.Int `json:"yoloV2Block,omitempty"` // YOLO v2: Gas repricings TODO @holiman add EIP references
EWASMBlock *big.Int `json:"ewasmBlock,omitempty"` // EWASM switch block (nil = no fork, 0 = already activated)
// Various consensus engines
Ethash *EthashConfig `json:"ethash,omitempty"`
Clique *CliqueConfig `json:"clique,omitempty"`
Istanbul *IstanbulConfig `json:"istanbul,omitempty"` // Quorum
// Start of Quorum specific configs
IsQuorum bool `json:"isQuorum"` // Quorum flag
TransactionSizeLimit uint64 `json:"txnSizeLimit"` // Quorum - transaction size limit
MaxCodeSize uint64 `json:"maxCodeSize"` // Quorum - maximum CodeSize of contract
// QIP714Block implements the permissions related changes
QIP714Block *big.Int `json:"qip714Block,omitempty"`
MaxCodeSizeChangeBlock *big.Int `json:"maxCodeSizeChangeBlock,omitempty"`
// to track multiple changes to maxCodeSize
MaxCodeSizeConfig []MaxCodeConfigStruct `json:"maxCodeSizeConfig,omitempty"`
PrivacyEnhancementsBlock *big.Int `json:"privacyEnhancementsBlock,omitempty"`
IsMPS bool `json:"isMPS"` // multiple private states flag
PrivacyPrecompileBlock *big.Int `json:"privacyPrecompileBlock,omitempty"` // Switch block to enable privacy precompiled contract to process privacy marker transactions
// End of Quorum specific configs
}
// EthashConfig is the consensus engine configs for proof-of-work based sealing.
type EthashConfig struct{}
// String implements the stringer interface, returning the consensus engine details.
func (c *EthashConfig) String() string {
return "ethash"
}
// CliqueConfig is the consensus engine configs for proof-of-authority based sealing.
type CliqueConfig struct {
Period uint64 `json:"period"` // Number of seconds between blocks to enforce
Epoch uint64 `json:"epoch"` // Epoch length to reset votes and checkpoint
AllowedFutureBlockTime uint64 `json:"allowedFutureBlockTime"` // Max time (in seconds) from current time allowed for blocks, before they're considered future blocks
}
// String implements the stringer interface, returning the consensus engine details.
func (c *CliqueConfig) String() string {
return "clique"
}
// IstanbulConfig is the consensus engine configs for Istanbul based sealing.
type IstanbulConfig struct {
Epoch uint64 `json:"epoch"` // Epoch length to reset votes and checkpoint
ProposerPolicy uint64 `json:"policy"` // The policy for proposer selection
Ceil2Nby3Block *big.Int `json:"ceil2Nby3Block,omitempty"` // Number of confirmations required to move from one state to next [2F + 1 to Ceil(2N/3)]
TestQBFTBlock *big.Int `json:"testQBFTBlock,omitempty"` // Fork block at which block confirmations are done using qbft consensus instead of ibft
}
// String implements the stringer interface, returning the consensus engine details.
func (c *IstanbulConfig) String() string {
return "istanbul"
}
// String implements the fmt.Stringer interface.
func (c *ChainConfig) String() string {
var engine interface{}
switch {
case c.Ethash != nil:
engine = c.Ethash
case c.Clique != nil:
engine = c.Clique
case c.Istanbul != nil:
engine = c.Istanbul
default:
engine = "unknown"
}
return fmt.Sprintf("{ChainID: %v Homestead: %v DAO: %v DAOSupport: %v EIP150: %v EIP155: %v EIP158: %v Byzantium: %v IsQuorum: %v Constantinople: %v TransactionSizeLimit: %v MaxCodeSize: %v Petersburg: %v Istanbul: %v, Muir Glacier: %v YOLO v2: %v PrivacyEnhancements: %v PrivacyPrecompile: %v Engine: %v}",
c.ChainID,
c.HomesteadBlock,
c.DAOForkBlock,
c.DAOForkSupport,
c.EIP150Block,
c.EIP155Block,
c.EIP158Block,
c.ByzantiumBlock,
c.IsQuorum,
c.ConstantinopleBlock,
c.TransactionSizeLimit,
c.MaxCodeSize,
c.PetersburgBlock,
c.IstanbulBlock,
c.MuirGlacierBlock,
c.YoloV2Block,
c.PrivacyEnhancementsBlock,
c.PrivacyPrecompileBlock,
engine,
)
}
// Quorum - validate code size and transaction size limit
func (c *ChainConfig) IsValid() error {
if c.TransactionSizeLimit < 32 || c.TransactionSizeLimit > 128 {
return errors.New("Genesis transaction size limit must be between 32 and 128")
}
if c.MaxCodeSize != 0 && (c.MaxCodeSize < 24 || c.MaxCodeSize > 128) {
return errors.New("Genesis max code size must be between 24 and 128")
}
return nil
}
// IsHomestead returns whether num is either equal to the homestead block or greater.
func (c *ChainConfig) IsHomestead(num *big.Int) bool {
return isForked(c.HomesteadBlock, num)
}
// IsDAOFork returns whether num is either equal to the DAO fork block or greater.
func (c *ChainConfig) IsDAOFork(num *big.Int) bool {
return isForked(c.DAOForkBlock, num)
}
// IsEIP150 returns whether num is either equal to the EIP150 fork block or greater.
func (c *ChainConfig) IsEIP150(num *big.Int) bool {
return isForked(c.EIP150Block, num)
}
// IsEIP155 returns whether num is either equal to the EIP155 fork block or greater.
func (c *ChainConfig) IsEIP155(num *big.Int) bool {
return isForked(c.EIP155Block, num)
}
// IsEIP158 returns whether num is either equal to the EIP158 fork block or greater.
func (c *ChainConfig) IsEIP158(num *big.Int) bool {
return isForked(c.EIP158Block, num)
}
// IsByzantium returns whether num is either equal to the Byzantium fork block or greater.
func (c *ChainConfig) IsByzantium(num *big.Int) bool {
return isForked(c.ByzantiumBlock, num)
}
// IsConstantinople returns whether num is either equal to the Constantinople fork block or greater.
func (c *ChainConfig) IsConstantinople(num *big.Int) bool {
return isForked(c.ConstantinopleBlock, num)
}
// IsMuirGlacier returns whether num is either equal to the Muir Glacier (EIP-2384) fork block or greater.
func (c *ChainConfig) IsMuirGlacier(num *big.Int) bool {
return isForked(c.MuirGlacierBlock, num)
}
// IsPetersburg returns whether num is either
// - equal to or greater than the PetersburgBlock fork block,
// - OR is nil, and Constantinople is active
func (c *ChainConfig) IsPetersburg(num *big.Int) bool {
return isForked(c.PetersburgBlock, num) || c.PetersburgBlock == nil && isForked(c.ConstantinopleBlock, num)
}
// IsIstanbul returns whether num is either equal to the Istanbul fork block or greater.
func (c *ChainConfig) IsIstanbul(num *big.Int) bool {
return isForked(c.IstanbulBlock, num)
}
// IsYoloV2 returns whether num is either equal to the YoloV2 fork block or greater.
func (c *ChainConfig) IsYoloV2(num *big.Int) bool {
return isForked(c.YoloV2Block, num)
}
// IsEWASM returns whether num represents a block number after the EWASM fork
func (c *ChainConfig) IsEWASM(num *big.Int) bool {
return isForked(c.EWASMBlock, num)
}
// Quorum
//
// IsQIP714 returns whether num represents a block number where permissions is enabled
func (c *ChainConfig) IsQIP714(num *big.Int) bool {
return isForked(c.QIP714Block, num)
}
// IsMaxCodeSizeChangeBlock returns whether num represents a block number
// where maxCodeSize change was done
func (c *ChainConfig) IsMaxCodeSizeChangeBlock(num *big.Int) bool {
return isForked(c.MaxCodeSizeChangeBlock, num)
}
// Quorum
//
// GetMaxCodeSize returns maxCodeSize for the given block number
func (c *ChainConfig) GetMaxCodeSize(num *big.Int) int {
maxCodeSize := MaxCodeSize
if len(c.MaxCodeSizeConfig) > 0 {
for _, data := range c.MaxCodeSizeConfig {
if data.Block.Cmp(num) > 0 {
break
}
maxCodeSize = int(data.Size) * 1024
}
} else if c.MaxCodeSize > 0 {
if c.MaxCodeSizeChangeBlock != nil && c.MaxCodeSizeChangeBlock.Cmp(big.NewInt(0)) >= 0 {
if c.IsMaxCodeSizeChangeBlock(num) {
maxCodeSize = int(c.MaxCodeSize) * 1024
}
} else {
maxCodeSize = int(c.MaxCodeSize) * 1024
}
}
return maxCodeSize
}
// Quorum
//
// validates the maxCodeSizeConfig data passed in config
func (c *ChainConfig) CheckMaxCodeConfigData() error {
if c.MaxCodeSize != 0 || (c.MaxCodeSizeChangeBlock != nil && c.MaxCodeSizeChangeBlock.Cmp(big.NewInt(0)) >= 0) {
return errors.New("maxCodeSize & maxCodeSizeChangeBlock deprecated. Consider using maxCodeSizeConfig")
}
// validate max code size data
// 1. Code size should not be less than 24 and greater than 128
// 2. block entries are in ascending order
prevBlock := big.NewInt(0)
for _, data := range c.MaxCodeSizeConfig {
if data.Size < 24 || data.Size > 128 {
return errors.New("Genesis max code size must be between 24 and 128")
}
if data.Block == nil {
return errors.New("Block number not given in maxCodeSizeConfig data")
}
if data.Block.Cmp(prevBlock) < 0 {
return errors.New("invalid maxCodeSize detail, block order has to be ascending")
}
prevBlock = data.Block
}
return nil
}
// Quorum
//
// checks if changes to maxCodeSizeConfig proposed are compatible
// with already existing genesis data
func isMaxCodeSizeConfigCompatible(c1, c2 *ChainConfig, head *big.Int) (error, *big.Int, *big.Int) {
if len(c1.MaxCodeSizeConfig) == 0 && len(c2.MaxCodeSizeConfig) == 0 {
// maxCodeSizeConfig not used. return
return nil, big.NewInt(0), big.NewInt(0)
}
// existing config had maxCodeSizeConfig and new one does not have the same return error
if len(c1.MaxCodeSizeConfig) > 0 && len(c2.MaxCodeSizeConfig) == 0 {
return fmt.Errorf("genesis file missing max code size information"), head, head
}
if len(c2.MaxCodeSizeConfig) > 0 && len(c1.MaxCodeSizeConfig) == 0 {
return nil, big.NewInt(0), big.NewInt(0)
}
// check the number of records below current head in both configs
// if they do not match throw an error
c1RecsBelowHead := 0
for _, data := range c1.MaxCodeSizeConfig {
if data.Block.Cmp(head) <= 0 {
c1RecsBelowHead++
} else {
break
}
}
c2RecsBelowHead := 0
for _, data := range c2.MaxCodeSizeConfig {
if data.Block.Cmp(head) <= 0 {
c2RecsBelowHead++
} else {
break
}
}
// if the count of past records is not matching return error
if c1RecsBelowHead != c2RecsBelowHead {
return errors.New("maxCodeSizeConfig data incompatible. updating maxCodeSize for past"), head, head
}
// validate that each past record is matching exactly. if not return error
for i := 0; i < c1RecsBelowHead; i++ {
if c1.MaxCodeSizeConfig[i].Block.Cmp(c2.MaxCodeSizeConfig[i].Block) != 0 ||
c1.MaxCodeSizeConfig[i].Size != c2.MaxCodeSizeConfig[i].Size {
return errors.New("maxCodeSizeConfig data incompatible. maxCodeSize historical data does not match"), head, head
}
}
return nil, big.NewInt(0), big.NewInt(0)
}
// Quorum
//
// IsPrivacyEnhancementsEnabled returns whether num represents a block number after the PrivacyEnhancementsEnabled fork
func (c *ChainConfig) IsPrivacyEnhancementsEnabled(num *big.Int) bool {
return isForked(c.PrivacyEnhancementsBlock, num)
}
// Quorum
//
// Check whether num represents a block number after the PrivacyPrecompileBlock
func (c *ChainConfig) IsPrivacyPrecompile(num *big.Int) bool {
return isForked(c.PrivacyPrecompileBlock, num)
}
// CheckCompatible checks whether scheduled fork transitions have been imported
// with a mismatching chain configuration.
func (c *ChainConfig) CheckCompatible(newcfg *ChainConfig, height uint64, isQuorumEIP155Activated bool) *ConfigCompatError {
bhead := new(big.Int).SetUint64(height)
// check if the maxCodesize data passed is compatible 1st
// this is being handled separately as it can have breaks
// at multiple block heights and cannot be handled with in
// checkCompatible
// compare the maxCodeSize data between the old and new config
err, cBlock, newCfgBlock := isMaxCodeSizeConfigCompatible(c, newcfg, bhead)
if err != nil {
return newCompatError(err.Error(), cBlock, newCfgBlock)
}
// Iterate checkCompatible to find the lowest conflict.
var lasterr *ConfigCompatError
for {
err := c.checkCompatible(newcfg, bhead, isQuorumEIP155Activated)
if err == nil || (lasterr != nil && err.RewindTo == lasterr.RewindTo) {
break
}
lasterr = err
bhead.SetUint64(err.RewindTo)
}
return lasterr
}
// CheckConfigForkOrder checks that we don't "skip" any forks, geth isn't pluggable enough
// to guarantee that forks
func (c *ChainConfig) CheckConfigForkOrder() error {
type fork struct {
name string
block *big.Int
optional bool // if true, the fork may be nil and next fork is still allowed
}
var lastFork fork
for _, cur := range []fork{
{name: "homesteadBlock", block: c.HomesteadBlock},
{name: "daoForkBlock", block: c.DAOForkBlock, optional: true},
{name: "eip150Block", block: c.EIP150Block},
{name: "eip155Block", block: c.EIP155Block},
{name: "eip158Block", block: c.EIP158Block},
{name: "byzantiumBlock", block: c.ByzantiumBlock},
{name: "constantinopleBlock", block: c.ConstantinopleBlock},
{name: "petersburgBlock", block: c.PetersburgBlock},
{name: "istanbulBlock", block: c.IstanbulBlock},
{name: "muirGlacierBlock", block: c.MuirGlacierBlock, optional: true},
{name: "yoloV2Block", block: c.YoloV2Block},
} {
if lastFork.name != "" {
// Next one must be higher number
if lastFork.block == nil && cur.block != nil {
return fmt.Errorf("unsupported fork ordering: %v not enabled, but %v enabled at %v",
lastFork.name, cur.name, cur.block)
}
if lastFork.block != nil && cur.block != nil {
if lastFork.block.Cmp(cur.block) > 0 {
return fmt.Errorf("unsupported fork ordering: %v enabled at %v, but %v enabled at %v",
lastFork.name, lastFork.block, cur.name, cur.block)
}
}
}
// If it was optional and not set, then ignore it
if !cur.optional || cur.block != nil {
lastFork = cur
}
}
return nil
}
func (c *ChainConfig) checkCompatible(newcfg *ChainConfig, head *big.Int, isQuorumEIP155Activated bool) *ConfigCompatError {
if isForkIncompatible(c.HomesteadBlock, newcfg.HomesteadBlock, head) {
return newCompatError("Homestead fork block", c.HomesteadBlock, newcfg.HomesteadBlock)
}
if isForkIncompatible(c.DAOForkBlock, newcfg.DAOForkBlock, head) {
return newCompatError("DAO fork block", c.DAOForkBlock, newcfg.DAOForkBlock)
}
if c.IsDAOFork(head) && c.DAOForkSupport != newcfg.DAOForkSupport {
return newCompatError("DAO fork support flag", c.DAOForkBlock, newcfg.DAOForkBlock)
}
if isForkIncompatible(c.EIP150Block, newcfg.EIP150Block, head) {
return newCompatError("EIP150 fork block", c.EIP150Block, newcfg.EIP150Block)
}
if isQuorumEIP155Activated && c.ChainID != nil && isForkIncompatible(c.EIP155Block, newcfg.EIP155Block, head) {
return newCompatError("EIP155 fork block", c.EIP155Block, newcfg.EIP155Block)
}
if isQuorumEIP155Activated && c.ChainID != nil && c.IsEIP155(head) && !configNumEqual(c.ChainID, newcfg.ChainID) {
return newCompatError("EIP155 chain ID", c.ChainID, newcfg.ChainID)
}
if isForkIncompatible(c.EIP158Block, newcfg.EIP158Block, head) {
return newCompatError("EIP158 fork block", c.EIP158Block, newcfg.EIP158Block)
}
if c.IsEIP158(head) && !configNumEqual(c.ChainID, newcfg.ChainID) {
return newCompatError("EIP158 chain ID", c.EIP158Block, newcfg.EIP158Block)
}
if isForkIncompatible(c.ByzantiumBlock, newcfg.ByzantiumBlock, head) {
return newCompatError("Byzantium fork block", c.ByzantiumBlock, newcfg.ByzantiumBlock)
}
if isForkIncompatible(c.ConstantinopleBlock, newcfg.ConstantinopleBlock, head) {
return newCompatError("Constantinople fork block", c.ConstantinopleBlock, newcfg.ConstantinopleBlock)
}
if isForkIncompatible(c.PetersburgBlock, newcfg.PetersburgBlock, head) {
// the only case where we allow Petersburg to be set in the past is if it is equal to Constantinople
// mainly to satisfy fork ordering requirements which state that Petersburg fork be set if Constantinople fork is set
if isForkIncompatible(c.ConstantinopleBlock, newcfg.PetersburgBlock, head) {
return newCompatError("Petersburg fork block", c.PetersburgBlock, newcfg.PetersburgBlock)
}
}
if isForkIncompatible(c.IstanbulBlock, newcfg.IstanbulBlock, head) {
return newCompatError("Istanbul fork block", c.IstanbulBlock, newcfg.IstanbulBlock)
}
if isForkIncompatible(c.MuirGlacierBlock, newcfg.MuirGlacierBlock, head) {
return newCompatError("Muir Glacier fork block", c.MuirGlacierBlock, newcfg.MuirGlacierBlock)
}
if isForkIncompatible(c.YoloV2Block, newcfg.YoloV2Block, head) {
return newCompatError("YOLOv2 fork block", c.YoloV2Block, newcfg.YoloV2Block)
}
if isForkIncompatible(c.EWASMBlock, newcfg.EWASMBlock, head) {
return newCompatError("ewasm fork block", c.EWASMBlock, newcfg.EWASMBlock)
}
if c.Istanbul != nil && newcfg.Istanbul != nil && isForkIncompatible(c.Istanbul.Ceil2Nby3Block, newcfg.Istanbul.Ceil2Nby3Block, head) {
return newCompatError("Ceil 2N/3 fork block", c.Istanbul.Ceil2Nby3Block, newcfg.Istanbul.Ceil2Nby3Block)
}
if c.Istanbul != nil && newcfg.Istanbul != nil && isForkIncompatible(c.Istanbul.TestQBFTBlock, newcfg.Istanbul.TestQBFTBlock, head) {
return newCompatError("Test QBFT fork block", c.Istanbul.TestQBFTBlock, newcfg.Istanbul.TestQBFTBlock)
}
if isForkIncompatible(c.QIP714Block, newcfg.QIP714Block, head) {
return newCompatError("permissions fork block", c.QIP714Block, newcfg.QIP714Block)
}
if newcfg.MaxCodeSizeChangeBlock != nil && isForkIncompatible(c.MaxCodeSizeChangeBlock, newcfg.MaxCodeSizeChangeBlock, head) {
return newCompatError("max code size change fork block", c.MaxCodeSizeChangeBlock, newcfg.MaxCodeSizeChangeBlock)
}
if isForkIncompatible(c.PrivacyEnhancementsBlock, newcfg.PrivacyEnhancementsBlock, head) {
return newCompatError("Privacy Enhancements fork block", c.PrivacyEnhancementsBlock, newcfg.PrivacyEnhancementsBlock)
}
if isForkIncompatible(c.PrivacyPrecompileBlock, newcfg.PrivacyPrecompileBlock, head) {
return newCompatError("Privacy Precompile fork block", c.PrivacyPrecompileBlock, newcfg.PrivacyPrecompileBlock)
}
return nil
}
// isForkIncompatible returns true if a fork scheduled at s1 cannot be rescheduled to
// block s2 because head is already past the fork.
func isForkIncompatible(s1, s2, head *big.Int) bool {
return (isForked(s1, head) || isForked(s2, head)) && !configNumEqual(s1, s2)
}
// isForked returns whether a fork scheduled at block s is active at the given head block.
func isForked(s, head *big.Int) bool {
if s == nil || head == nil {
return false
}
return s.Cmp(head) <= 0
}
func configNumEqual(x, y *big.Int) bool {
if x == nil {
return y == nil
}
if y == nil {
return x == nil
}
return x.Cmp(y) == 0
}
// ConfigCompatError is raised if the locally-stored blockchain is initialised with a
// ChainConfig that would alter the past.
type ConfigCompatError struct {
What string
// block numbers of the stored and new configurations
StoredConfig, NewConfig *big.Int
// the block number to which the local chain must be rewound to correct the error
RewindTo uint64
}
func newCompatError(what string, storedblock, newblock *big.Int) *ConfigCompatError {
var rew *big.Int
switch {
case storedblock == nil:
rew = newblock
case newblock == nil || storedblock.Cmp(newblock) < 0:
rew = storedblock
default:
rew = newblock
}
err := &ConfigCompatError{what, storedblock, newblock, 0}
if rew != nil && rew.Sign() > 0 {
err.RewindTo = rew.Uint64() - 1
}
return err
}
func (err *ConfigCompatError) Error() string {
return fmt.Sprintf("mismatching %s in database (have %d, want %d, rewindto %d)", err.What, err.StoredConfig, err.NewConfig, err.RewindTo)
}
// Rules wraps ChainConfig and is merely syntactic sugar or can be used for functions
// that do not have or require information about the block.
//
// Rules is a one time interface meaning that it shouldn't be used in between transition
// phases.
type Rules struct {
ChainID *big.Int
IsHomestead, IsEIP150, IsEIP155, IsEIP158 bool
IsByzantium, IsConstantinople, IsPetersburg, IsIstanbul bool
IsYoloV2 bool
IsPrivacyEnhancementsEnabled bool // Quorum
IsPrivacyPrecompile bool // Quorum
}
// Rules ensures c's ChainID is not nil.
func (c *ChainConfig) Rules(num *big.Int) Rules {
chainID := c.ChainID
if chainID == nil {
chainID = new(big.Int)
}
return Rules{
ChainID: new(big.Int).Set(chainID),
IsHomestead: c.IsHomestead(num),
IsEIP150: c.IsEIP150(num),
IsEIP155: c.IsEIP155(num),
IsEIP158: c.IsEIP158(num),
IsByzantium: c.IsByzantium(num),
IsConstantinople: c.IsConstantinople(num),
IsPetersburg: c.IsPetersburg(num),
IsIstanbul: c.IsIstanbul(num),
IsYoloV2: c.IsYoloV2(num),
IsPrivacyEnhancementsEnabled: c.IsPrivacyEnhancementsEnabled(num), // Quorum
IsPrivacyPrecompile: c.IsPrivacyPrecompile(num), // Quorum
}
} | BloomRoot: common.HexToHash("0x8b0e7895bc39840d8dac857e26bdf3d0a07684b0b962b252546659e0337a9f70"),
}
// MainnetCheckpointOracle contains a set of configs for the main network oracle. |
sysinit.go | package docker
import (
"flag"
"fmt"
"log"
"os"
"os/exec"
"os/user"
"strconv"
"syscall"
)
// Setup networking
func setupNetworking(gw string) {
if gw == "" {
return
}
cmd := exec.Command("/sbin/route", "add", "default", "gw", gw)
if err := cmd.Run(); err != nil {
log.Fatalf("Unable to set up networking: %v", err)
}
}
// Takes care of dropping privileges to the desired user
func changeUser(u string) {
if u == "" {
return
}
userent, err := user.LookupId(u)
if err != nil {
userent, err = user.Lookup(u)
}
if err != nil {
log.Fatalf("Unable to find user %v: %v", u, err)
}
uid, err := strconv.Atoi(userent.Uid)
if err != nil {
log.Fatalf("Invalid uid: %v", userent.Uid)
}
gid, err := strconv.Atoi(userent.Gid)
if err != nil {
log.Fatalf("Invalid gid: %v", userent.Gid)
}
if err := syscall.Setgid(gid); err != nil {
log.Fatalf("setgid failed: %v", err)
}
if err := syscall.Setuid(uid); err != nil {
log.Fatalf("setuid failed: %v", err)
}
}
func executeProgram(name string, args []string) {
path, err := exec.LookPath(name)
if err != nil {
log.Printf("Unable to locate %v", name)
os.Exit(127)
}
if err := syscall.Exec(path, args, os.Environ()); err != nil {
panic(err)
}
}
// Sys Init code
// This code is run INSIDE the container and is responsible for setting
// up the environment before running the actual process
func SysInit() {
if len(os.Args) <= 1 {
fmt.Println("You should not invoke docker-init manually")
os.Exit(1)
}
var u = flag.String("u", "", "username or uid")
var gw = flag.String("g", "", "gateway address")
flag.Parse()
| setupNetworking(*gw)
changeUser(*u)
executeProgram(flag.Arg(0), flag.Args())
} |
|
deployment_controller_suite_test.go | /*
Copyright 2018 Pusher Ltd. and Wave Contributors
Licensed under the Apache License, Version 2.0 (the "License"); |
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package deployment
import (
"log"
"path/filepath"
"sync"
"testing"
"github.com/go-logr/glogr"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"github.com/wave-k8s/wave/pkg/apis"
"github.com/wave-k8s/wave/test/reporters"
"k8s.io/client-go/kubernetes/scheme"
"k8s.io/client-go/rest"
"sigs.k8s.io/controller-runtime/pkg/envtest"
"sigs.k8s.io/controller-runtime/pkg/manager"
"sigs.k8s.io/controller-runtime/pkg/reconcile"
logf "sigs.k8s.io/controller-runtime/pkg/runtime/log"
)
var cfg *rest.Config
func TestMain(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecsWithDefaultAndCustomReporters(t, "Wave Controller Suite", reporters.Reporters())
}
var t *envtest.Environment
var _ = BeforeSuite(func() {
t = &envtest.Environment{
CRDDirectoryPaths: []string{filepath.Join("..", "..", "..", "config", "crds")},
}
apis.AddToScheme(scheme.Scheme)
logf.SetLogger(glogr.New())
var err error
if cfg, err = t.Start(); err != nil {
log.Fatal(err)
}
})
var _ = AfterSuite(func() {
t.Stop()
})
// SetupTestReconcile returns a reconcile.Reconcile implementation that delegates to inner and
// writes the request to requests after Reconcile is finished.
func SetupTestReconcile(inner reconcile.Reconciler) (reconcile.Reconciler, chan reconcile.Request) {
requests := make(chan reconcile.Request)
fn := reconcile.Func(func(req reconcile.Request) (reconcile.Result, error) {
result, err := inner.Reconcile(req)
requests <- req
return result, err
})
return fn, requests
}
// StartTestManager adds recFn
func StartTestManager(mgr manager.Manager) (chan struct{}, *sync.WaitGroup) {
stop := make(chan struct{})
wg := &sync.WaitGroup{}
wg.Add(1)
go func() {
defer GinkgoRecover()
Expect(mgr.Start(stop)).NotTo(HaveOccurred())
wg.Done()
}()
return stop, wg
} | you may not use this file except in compliance with the License.
You may obtain a copy of the License at |
gitContentProvider.ts | import { Strings } from "system";
import {
Disposable,
TextDocumentContentProvider,
Uri,
workspace
} from "vscode";
import { Container } from "../container";
export class | implements TextDocumentContentProvider, Disposable {
private readonly _disposable: Disposable;
constructor() {
this._disposable = Disposable.from(
workspace.registerTextDocumentContentProvider("codestream-git", this)
);
}
async provideTextDocumentContent(uri: Uri): Promise<string> {
const { path, sha } = Strings.parseGitUrl(uri);
const contents = await Container.agent.scm.getFileContentsAtRevision(undefined, path, sha);
return contents.content || "";
}
dispose() {
this._disposable.dispose();
}
}
export function toCSGitUri(uri: Uri, sha: string): Uri {
return uri.with({
scheme: "codestream-git",
query: JSON.stringify({
sha: sha,
shortSha: sha.substr(0, 7)
})
});
}
| GitContentProvider |
watch.rs | use std::{
collections::BTreeMap,
env, fs,
path::PathBuf,
sync::{mpsc::channel, Arc, Mutex},
time::Duration,
};
use anyhow::{Context, Result};
use cargo_metadata::{Package, Target};
use futures::{select, FutureExt};
use sha2::Digest;
use structopt::StructOpt;
use crate::{
atcoder::AtCoder,
metadata::{self, MetadataExt, PackageExt},
session_file, test_samples,
};
// use termion::raw::IntoRawMode;
// use tui::backend::TermionBackend;
// use tui::layout::{Constraint, Direction, Layout};
// use tui::style::{Color, Modifier, Style};
// use tui::widgets::{Block, Borders, Widget};
// use tui::Terminal;
#[derive(StructOpt, Debug)]
pub struct WatchOpt {
/// [cargo] Package to watch
#[structopt(short, long, value_name("SPEC"))]
package: Option<String>,
/// [cargo] Path to Cargo.toml
#[structopt(long, value_name("PATH"))]
manifest_path: Option<PathBuf>,
}
pub async fn watch(opt: WatchOpt) -> Result<()> {
// let stdout = io::stdout().into_raw_mode()?;
// let backend = TermionBackend::new(stdout);
// let mut terminal = Terminal::new(backend)?;
// terminal.clear();
// terminal.draw(|mut f| {
// let size = f.size();
// Block::default()
// .title("Block")
// .borders(Borders::ALL)
// .render(&mut f, size);
// })?;
// let conf = read_config()?;
let cwd = env::current_dir().with_context(|| "failed to get CWD")?;
let metadata = metadata::cargo_metadata(opt.manifest_path.as_deref(), &cwd)?;
let package = metadata.query_for_member(opt.package.as_deref())?.clone();
let atc = AtCoder::new(&session_file()?)?;
let atc = Arc::new(atc);
// let submission_fut = {
// let atc = atc.clone();
// let contest_id = contest_id.clone();
// tokio::spawn(async move { watch_submission_status(&atc, &contest_id).await })
// };
let file_watcher_fut = {
let atc = atc.clone();
tokio::spawn(async move { watch_filesystem(&package, &atc).await })
};
// let ui_fut = {
// tokio::spawn(async move {
// for ev in io::stdin().events() {
// let ev = ev?;
// if ev == Event::Key(Key::Char('q')) || ev == Event::Key(Key::Ctrl('c')) {
// break;
// }
// }
// let ret: Result<()> = Ok(());
// ret
// })
// };
select! {
// _ = submission_fut.fuse() => (),
_ = file_watcher_fut.fuse() => (),
// _ = ui_fut.fuse() => (),
};
Ok(())
}
async fn | (package: &Package, atc: &AtCoder) -> Result<()> {
use notify::{DebouncedEvent, RecommendedWatcher, RecursiveMode, Watcher};
let contest_info = atc.contest_info(&package.name).await?;
let (tx, rx) = channel();
let mut watcher: RecommendedWatcher = Watcher::new(tx, Duration::from_millis(150))?;
let rx = Arc::new(Mutex::new(rx));
for Target { src_path, .. } in package.all_bins() {
watcher.watch(src_path, RecursiveMode::NonRecursive)?;
}
let mut file_hash = BTreeMap::<String, _>::new();
loop {
let rx = rx.clone();
let pb = tokio::task::spawn_blocking(move || -> Option<PathBuf> {
if let DebouncedEvent::Write(pb) = rx.lock().unwrap().recv().unwrap() {
let pb = pb.canonicalize().ok()?;
let r = pb.strip_prefix(pb.parent()?).ok()?;
Some(r.to_owned())
} else {
None
}
})
.await?;
if pb.is_none() {
continue;
}
let pb = pb.unwrap();
let problem_id = pb.file_stem().unwrap().to_string_lossy().into_owned();
let problem = if let Some(problem) = contest_info.problem(&problem_id) {
problem
} else {
eprintln!("Problem `{}` is not contained in this contest", &problem_id);
continue;
};
let source = fs::read(&pb).with_context(|| format!("Failed to read {}", pb.display()))?;
let hash = sha2::Sha256::digest(&source);
if file_hash.get(&problem_id) == Some(&hash) {
continue;
}
file_hash.insert(problem_id.clone(), hash);
let test_cases = atc.test_cases(&problem.url).await?;
let test_cases = test_cases.into_iter().enumerate().collect::<Vec<_>>();
let test_passed = test_samples(package, &problem_id, &test_cases, false, false)?;
if !test_passed {
continue;
}
// atc.submit(&contest_id, &problem_id, &String::from_utf8_lossy(&source))
// .await?;
}
}
| watch_filesystem |
args.rs | // use build_info::Version;
use build_info::{get_version, Version};
use clap::{App, AppSettings, Arg, ArgMatches, SubCommand};
pub fn get_matches() -> ArgMatches<'static> {
let version = get_version!();
App::new("ckb")
.author("Nervos Core Dev <[email protected]>")
.about("Nervos CKB - The Common Knowledge Base")
.version(version.short().as_str())
.long_version(version.long().as_str())
.setting(AppSettings::ArgRequiredElseHelp)
.arg(
Arg::with_name("config")
.short("c")
.long("config")
.value_name("CONFIG")
.takes_value(true)
.help("Specify the configuration file PATH. Tries ckb.json, nodes/default.json in working directory when omitted.")
)
.subcommand(run())
.subcommand(miner())
.subcommand(export())
.subcommand(import())
.subcommand(cli())
.get_matches()
}
fn run() -> App<'static, 'static> {
SubCommand::with_name("run").about("Running ckb node")
}
fn miner() -> App<'static, 'static> {
SubCommand::with_name("miner").about("Running ckb miner")
}
fn arg_format() -> Arg<'static, 'static> {
Arg::with_name("format")
.short("f")
.long("format")
.value_name("FORMAT")
.required(true)
.takes_value(true)
.help("Specify the format.")
}
fn export() -> App<'static, 'static> {
SubCommand::with_name("export")
.about("Export ckb data")
.arg(arg_format())
.arg(
Arg::with_name("target")
.short("t")
.long("target")
.value_name("PATH")
.required(true)
.index(1)
.help("Specify the export target path."),
)
}
fn import() -> App<'static, 'static> |
fn arg_private_key() -> Arg<'static, 'static> {
Arg::with_name("private-key")
.short("p")
.long("private-key")
.value_name("H256")
.help("Specify the private key")
.takes_value(true)
.required(true)
}
fn cli() -> App<'static, 'static> {
SubCommand::with_name("cli")
.about("Running ckb cli")
.setting(AppSettings::ArgRequiredElseHelp)
.subcommand(
SubCommand::with_name("sign")
.about("Sign transaction using sha3-secp256k1 defined in system cell")
.arg(arg_private_key())
.arg(
Arg::with_name("unsigned-transaction")
.short("u")
.long("unsigned-transaction")
.value_name("JSON")
.help("Specify the unsigned transaction json string")
.takes_value(true)
.required(true),
),
)
.subcommand(
SubCommand::with_name("type_hash")
.about("Generate script type hash using sha3-secp256k1 defined in system cell")
.arg(arg_private_key()),
)
.subcommand(SubCommand::with_name("keygen").about("Generate new key"))
}
| {
SubCommand::with_name("import")
.about("Import ckb data")
.arg(arg_format())
.arg(
Arg::with_name("source")
.short("s")
.long("source")
.value_name("PATH")
.required(true)
.index(1)
.help("Specify the exported data path."),
)
} |
PoolCache.ts | import { CacheManger } from "./CacheManger";
import { Utils } from "./Utils";
import { Pool } from "./Pool";
/**
* @private
* 基于个数的对象缓存管理器
*/
export class PoolCache {
//TODO:
/**
* 对象在Pool中的标识
*/
sign: string;
/**
* 允许缓存的最大数量
*/
maxCount: number = 1000;
/**
* 获取缓存的对象列表
* @return
*
*/
getCacheList(): any[] {
return Pool.getPoolBySign(this.sign);
}
/**
* 尝试清理缓存
* @param force 是否强制清理
*
*/
tryDispose(force: boolean): void {
var list: any[];
list = Pool.getPoolBySign(this.sign);
if (list.length > this.maxCount) {
list.splice(this.maxCount, list.length - this.maxCount);
}
}
| * @param maxCount 允许缓存的最大数量
*
*/
static addPoolCacheManager(sign: string, maxCount: number = 100): void {
var cache: PoolCache;
cache = new PoolCache();
cache.sign = sign;
cache.maxCount = maxCount;
CacheManger.regCacheByFunction(Utils.bind(cache.tryDispose, cache), Utils.bind(cache.getCacheList, cache));
}
} | /**
* 添加对象缓存管理
* @param sign 对象在Pool中的标识 |
pool_test.go | package pool
import (
"testing"
)
func TestNew(t *testing.T) {
p := New(func() interface{} {
return make([]byte, 4)
})
ss := "abcd"
if ss != "abcd" {
t.Error(p)
}
bb := p.Alloc()
b := bb.Value.([]byte)
println(&b)
copy(b, []byte("abcd")) | }
p.Free(bb)
if p.UsedSize() != 0 {
t.Error(p)
}
if p.FreeSize() != 1 {
t.Error(p)
}
b = p.Alloc().Value.([]byte)
s := string(b)
print(s)
if s != "abcd" {
t.Error(p)
}
} | if p.UsedSize() != 1 {
t.Error(p) |
interpolate_spline.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Polyharmonic spline interpolation."""
import tensorflow as tf
EPSILON = 0.0000000001
def _cross_squared_distance_matrix(x, y):
"""Pairwise squared distance between two (batch) matrices' rows (2nd dim).
Computes the pairwise distances between rows of x and rows of y
Args:
x: [batch_size, n, d] float `Tensor`
y: [batch_size, m, d] float `Tensor`
Returns:
squared_dists: [batch_size, n, m] float `Tensor`, where
squared_dists[b,i,j] = ||x[b,i,:] - y[b,j,:]||^2
"""
x_norm_squared = tf.reduce_sum(tf.square(x), 2)
y_norm_squared = tf.reduce_sum(tf.square(y), 2)
# Expand so that we can broadcast.
x_norm_squared_tile = tf.expand_dims(x_norm_squared, 2)
y_norm_squared_tile = tf.expand_dims(y_norm_squared, 1)
x_y_transpose = tf.matmul(x, y, adjoint_b=True)
# squared_dists[b,i,j] = ||x_bi - y_bj||^2 =
# x_bi'x_bi- 2x_bi'x_bj + x_bj'x_bj
squared_dists = (
x_norm_squared_tile - 2 * x_y_transpose + y_norm_squared_tile)
return squared_dists
def _pairwise_squared_distance_matrix(x):
"""Pairwise squared distance among a (batch) matrix's rows (2nd dim).
This saves a bit of computation vs. using
_cross_squared_distance_matrix(x,x)
Args:
x: `[batch_size, n, d]` float `Tensor`
Returns:
squared_dists: `[batch_size, n, n]` float `Tensor`, where
squared_dists[b,i,j] = ||x[b,i,:] - x[b,j,:]||^2
"""
x_x_transpose = tf.matmul(x, x, adjoint_b=True)
x_norm_squared = tf.linalg.diag_part(x_x_transpose)
x_norm_squared_tile = tf.expand_dims(x_norm_squared, 2)
# squared_dists[b,i,j] = ||x_bi - x_bj||^2 =
# = x_bi'x_bi- 2x_bi'x_bj + x_bj'x_bj
squared_dists = x_norm_squared_tile - 2 * x_x_transpose + tf.transpose(
x_norm_squared_tile, [0, 2, 1])
return squared_dists
def _solve_interpolation(train_points, train_values, order,
regularization_weight):
"""Solve for interpolation coefficients.
Computes the coefficients of the polyharmonic interpolant for the
'training' data defined by (train_points, train_values) using the kernel
phi.
Args:
train_points: `[b, n, d]` interpolation centers
train_values: `[b, n, k]` function values
order: order of the interpolation
regularization_weight: weight to place on smoothness regularization term
Returns:
w: `[b, n, k]` weights on each interpolation center
v: `[b, d, k]` weights on each input dimension
Raises:
ValueError: if d or k is not fully specified.
"""
# These dimensions are set dynamically at runtime.
b, n, _ = tf.unstack(tf.shape(train_points), num=3)
d = train_points.shape[-1]
if d is None:
raise ValueError('The dimensionality of the input points (d) must be '
'statically-inferrable.')
k = train_values.shape[-1]
if k is None:
raise ValueError('The dimensionality of the output values (k) must be '
'statically-inferrable.')
# First, rename variables so that the notation (c, f, w, v, A, B, etc.)
# follows https://en.wikipedia.org/wiki/Polyharmonic_spline.
# To account for python style guidelines we use
# matrix_a for A and matrix_b for B.
c = train_points
f = train_values
# Next, construct the linear system.
with tf.name_scope('construct_linear_system'):
matrix_a = _phi(_pairwise_squared_distance_matrix(c),
order) # [b, n, n]
if regularization_weight > 0:
batch_identity_matrix = tf.expand_dims(tf.eye(n, dtype=c.dtype), 0)
matrix_a += regularization_weight * batch_identity_matrix
# Append ones to the feature values for the bias term
# in the linear model.
ones = tf.ones_like(c[..., :1], dtype=c.dtype)
matrix_b = tf.concat([c, ones], 2) # [b, n, d + 1]
# [b, n + d + 1, n]
left_block = tf.concat(
[matrix_a, tf.transpose(matrix_b, [0, 2, 1])], 1)
num_b_cols = matrix_b.get_shape()[2] # d + 1
lhs_zeros = tf.zeros([b, num_b_cols, num_b_cols], train_points.dtype)
right_block = tf.concat([matrix_b, lhs_zeros],
1) # [b, n + d + 1, d + 1]
lhs = tf.concat([left_block, right_block],
2) # [b, n + d + 1, n + d + 1]
rhs_zeros = tf.zeros([b, d + 1, k], train_points.dtype)
rhs = tf.concat([f, rhs_zeros], 1) # [b, n + d + 1, k]
# Then, solve the linear system and unpack the results.
with tf.name_scope('solve_linear_system'):
w_v = tf.linalg.solve(lhs, rhs)
w = w_v[:, :n, :]
v = w_v[:, n:, :]
return w, v
def _apply_interpolation(query_points, train_points, w, v, order):
"""Apply polyharmonic interpolation model to data.
Given coefficients w and v for the interpolation model, we evaluate
interpolated function values at query_points.
Args:
query_points: `[b, m, d]` x values to evaluate the interpolation at
train_points: `[b, n, d]` x values that act as the interpolation centers
( the c variables in the wikipedia article)
w: `[b, n, k]` weights on each interpolation center
v: `[b, d, k]` weights on each input dimension
order: order of the interpolation
Returns:
Polyharmonic interpolation evaluated at points defined in query_points.
"""
# First, compute the contribution from the rbf term.
pairwise_dists = _cross_squared_distance_matrix(query_points, train_points)
phi_pairwise_dists = _phi(pairwise_dists, order)
rbf_term = tf.matmul(phi_pairwise_dists, w)
# Then, compute the contribution from the linear term.
# Pad query_points with ones, for the bias term in the linear model.
query_points_pad = tf.concat([
query_points,
tf.ones_like(query_points[..., :1], train_points.dtype)
], 2)
linear_term = tf.matmul(query_points_pad, v)
return rbf_term + linear_term
def _phi(r, order):
"""Coordinate-wise nonlinearity used to define the order of the
interpolation.
See https://en.wikipedia.org/wiki/Polyharmonic_spline for the definition.
Args:
r: input op
order: interpolation order
Returns:
phi_k evaluated coordinate-wise on r, for k = r
"""
# using EPSILON prevents log(0), sqrt0), etc.
# sqrt(0) is well-defined, but its gradient is not
with tf.name_scope('phi'):
if order == 1:
|
elif order == 2:
return 0.5 * r * tf.math.log(tf.maximum(r, EPSILON))
elif order == 4:
return 0.5 * tf.square(r) * tf.math.log(tf.maximum(r, EPSILON))
elif order % 2 == 0:
r = tf.maximum(r, EPSILON)
return 0.5 * tf.pow(r, 0.5 * order) * tf.math.log(r)
else:
r = tf.maximum(r, EPSILON)
return tf.pow(r, 0.5 * order)
def interpolate_spline(train_points,
train_values,
query_points,
order,
regularization_weight=0.0,
name='interpolate_spline'):
r"""Interpolate signal using polyharmonic interpolation.
The interpolant has the form
$$f(x) = \sum_{i = 1}^n w_i \phi(||x - c_i||) + v^T x + b.$$
This is a sum of two terms: (1) a weighted sum of radial basis function
(RBF) terms, with the centers \\(c_1, ... c_n\\), and (2) a linear term
with a bias. The \\(c_i\\) vectors are 'training' points.
In the code, b is absorbed into v
by appending 1 as a final dimension to x. The coefficients w and v are
estimated such that the interpolant exactly fits the value of the function
at the \\(c_i\\) points, the vector w is orthogonal to each \\(c_i\\),
and the vector w sums to 0. With these constraints, the coefficients
can be obtained by solving a linear system.
\\(\phi\\) is an RBF, parametrized by an interpolation
order. Using order=2 produces the well-known thin-plate spline.
We also provide the option to perform regularized interpolation. Here, the
interpolant is selected to trade off between the squared loss on the
training data and a certain measure of its curvature
([details](https://en.wikipedia.org/wiki/Polyharmonic_spline)).
Using a regularization weight greater than zero has the effect that the
interpolant will no longer exactly fit the training data. However, it may
be less vulnerable to overfitting, particularly for high-order
interpolation.
Note the interpolation procedure is differentiable with respect to all
inputs besides the order parameter.
We support dynamically-shaped inputs, where batch_size, n, and m are None
at graph construction time. However, d and k must be known.
Args:
train_points: `[batch_size, n, d]` float `Tensor` of n d-dimensional
locations. These do not need to be regularly-spaced.
train_values: `[batch_size, n, k]` float `Tensor` of n c-dimensional
values evaluated at train_points.
query_points: `[batch_size, m, d]` `Tensor` of m d-dimensional locations
where we will output the interpolant's values.
order: order of the interpolation. Common values are 1 for
\\(\phi(r) = r\\), 2 for \\(\phi(r) = r^2 * log(r)\\)
(thin-plate spline), or 3 for \\(\phi(r) = r^3\\).
regularization_weight: weight placed on the regularization term.
This will depend substantially on the problem, and it should always be
tuned. For many problems, it is reasonable to use no regularization.
If using a non-zero value, we recommend a small value like 0.001.
name: name prefix for ops created by this function
Returns:
`[b, m, k]` float `Tensor` of query values. We use train_points and
train_values to perform polyharmonic interpolation. The query values are
the values of the interpolant evaluated at the locations specified in
query_points.
"""
with tf.name_scope(name or "interpolate_spline"):
train_points = tf.convert_to_tensor(train_points)
train_values = tf.convert_to_tensor(train_values)
query_points = tf.convert_to_tensor(query_points)
# First, fit the spline to the observed data.
with tf.name_scope('solve'):
w, v = _solve_interpolation(train_points, train_values, order,
regularization_weight)
# Then, evaluate the spline at the query locations.
with tf.name_scope('predict'):
query_values = _apply_interpolation(query_points, train_points, w,
v, order)
return query_values
| r = tf.maximum(r, EPSILON)
r = tf.sqrt(r)
return r |
icon_wb_sunny.rs |
pub struct IconWbSunny {
props: crate::Props,
}
impl yew::Component for IconWbSunny {
type Properties = crate::Props;
type Message = ();
fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self
{
Self { props }
}
fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender
{
true
}
fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender
|
fn view(&self) -> yew::prelude::Html
{
yew::prelude::html! {
<svg
class=self.props.class.unwrap_or("")
width=self.props.size.unwrap_or(24).to_string()
height=self.props.size.unwrap_or(24).to_string()
viewBox="0 0 24 24"
fill=self.props.fill.unwrap_or("none")
stroke=self.props.color.unwrap_or("currentColor")
stroke-width=self.props.stroke_width.unwrap_or(2).to_string()
stroke-linecap=self.props.stroke_linecap.unwrap_or("round")
stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round")
>
<svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M6.76 4.84l-1.8-1.79-1.41 1.41 1.79 1.79 1.42-1.41zM4 10.5H1v2h3v-2zm9-9.95h-2V3.5h2V.55zm7.45 3.91l-1.41-1.41-1.79 1.79 1.41 1.41 1.79-1.79zm-3.21 13.7l1.79 1.8 1.41-1.41-1.8-1.79-1.4 1.4zM20 10.5v2h3v-2h-3zm-8-5c-3.31 0-6 2.69-6 6s2.69 6 6 6 6-2.69 6-6-2.69-6-6-6zm-1 16.95h2V19.5h-2v2.95zm-7.45-3.91l1.41 1.41 1.79-1.8-1.41-1.41-1.79 1.8z"/></svg>
</svg>
}
}
}
| {
false
} |
get_S_norm.py | # ##############################################################################
# Usage: python get_S_norm.py Subj I1 I2
# Time: ~ 20s
# Ref:
# ##############################################################################
# 20220118, In Kyu Lee
# No version suffix
# ##############################################################################
# v1c: 08/11/2021, In Kyu Lee
# - Fixed: when V_IN < V_EX, s_norm returns nan issue.
# - ownpow is used
# v1b: 08/10/2021, In Kyu Lee
# - S* stat is added
# 03/18/2021, In Kyu Lee
# Calculate S*
# ##############################################################################
# Input:
# - displacement img, ex) PMSN03001_EX0-TO-PMSN03001_IN0-SSTVD_disp_resample.mhd'
# - IN lobe mask, ex) PMSN03001_IN0_vida-lobes.img
# Output:
# - s* image, ex) PMSN03001_EX0-TO-PMSN03001_IN0-SSTVD_s_norm.img
# - s* stat, ex) PMSN03001_EX0-TO-PMSN03001_IN0-SSTVD_lobar_s_norm.txt
# ##############################################################################w
# import libraries
import os
import sys
import numpy as np
import time
import pandas as pd
from medpy.io import load, save
import SimpleITK as sitk
sitk.ProcessObject_SetGlobalWarningDisplay(False)
import warnings
warnings.filterwarnings("ignore")
def ownpow(a, b):
|
start = time.time()
Subj = str(sys.argv[1]) # PMSN03001
I1 = str(sys.argv[2]) # 'IN0'
I2 = str(sys.argv[3]) # 'EX0'
disp_path = f'{Subj}_{I2}-TO-{Subj}_{I1}-SSTVD_disp_resample.mhd'
histo_EX = pd.read_csv(f'{Subj}_{I2}_vida-histo.csv')
histo_IN = pd.read_csv(f'{Subj}_{I1}_vida-histo.csv')
s_norm_stat_path = f'{Subj}_{I2}-TO-{Subj}_{I1}-SSTVD_lobar_s_norm.txt'
IN_lobe_path = f'{Subj}_{I1}_vida-lobes.img'
if not os.path.exists(IN_lobe_path):
IN_lobe_path = f'{Subj}_{I1}_vida-lobes.img.gz'
s_norm_img_path = f'{Subj}_{I2}-TO-{Subj}_{I1}-SSTVD_s_norm.img'
# V_cm3_IN
V_EX = histo_EX.loc[histo_EX.location=='both', 'total-volume-cm3'].values[0]
V_IN = histo_IN.loc[histo_IN.location=='both', 'total-volume-cm3'].values[0]
# cm^3 -> mm^3
V_EX = V_EX * 1000
V_IN = V_IN * 1000
# Data Loading . . .
disp, disp_h = load(disp_path)
IN_lobe_img, IN_lobe_header = load(IN_lobe_path)
s_norm_h = disp_h
# [mm]
s = (disp[:,:,:,0]**2+disp[:,:,:,1]**2+disp[:,:,:,2]**2)**0.5
# This doesn't work if V_IN- V_EX is negative
# s_norm = s/((V_IN-V_EX)**(1/3))
s_norm = s/ownpow(V_IN-V_EX,1/3)
# Prep stat
s_norm_l0 = np.mean(s_norm[IN_lobe_img==8])
s_norm_l1 = np.mean(s_norm[IN_lobe_img==16])
s_norm_l2 = np.mean(s_norm[IN_lobe_img==32])
s_norm_l3 = np.mean(s_norm[IN_lobe_img==64])
s_norm_l4 = np.mean(s_norm[IN_lobe_img==128])
s_norm_mean = (s_norm_l0 + s_norm_l1 + s_norm_l2 + s_norm_l3 + s_norm_l4)/5
s_norm_l0_sd = np.std(s_norm[IN_lobe_img==8])
s_norm_l1_sd = np.std(s_norm[IN_lobe_img==16])
s_norm_l2_sd = np.std(s_norm[IN_lobe_img==32])
s_norm_l3_sd = np.std(s_norm[IN_lobe_img==64])
s_norm_l4_sd = np.std(s_norm[IN_lobe_img==128])
s_norm_sd = np.std(s_norm[IN_lobe_img!=0])
# CV = std/mean
s_norm_l0_cv = s_norm_l0_sd/s_norm_l0
s_norm_l1_cv = s_norm_l1_sd/s_norm_l1
s_norm_l2_cv = s_norm_l2_sd/s_norm_l2
s_norm_l3_cv = s_norm_l3_sd/s_norm_l3
s_norm_l4_cv = s_norm_l4_sd/s_norm_l4
s_norm_cv = s_norm_sd/s_norm_mean
s_norm_stat = pd.DataFrame({'Lobes':['Lobe0','Lobe1','Lobe2','Lobe3','Lobe4','All'],
'sStar_m':np.float16([s_norm_l0,s_norm_l1,s_norm_l2,s_norm_l3,s_norm_l4,s_norm_mean]),
'sStar_sd':np.float16([s_norm_l0_sd,s_norm_l1_sd,s_norm_l2_sd,s_norm_l3_sd,s_norm_l4_sd,s_norm_sd]),
'sStar_cv':np.float16([s_norm_l0_cv,s_norm_l1_cv,s_norm_l2_cv,s_norm_l3_cv,s_norm_l4_cv,s_norm_cv])})
# Save
save(s_norm,s_norm_img_path,hdr=s_norm_h)
s_norm_stat.to_csv(s_norm_stat_path, index=False, sep=' ')
end = time.time()
print(f'Elapsed time: {end-start}s')
| if a > 0:
return a**b
if a < 0:
temp = abs(a)**b
return -1*temp |
distributeHI.py | import numpy as np
import re, os
from pmesh.pm import ParticleMesh
from nbodykit.lab import BigFileCatalog, BigFileMesh, MultipleSpeciesCatalog, FFTPower
from nbodykit import setup_logging
from mpi4py import MPI
import HImodels
# enable logging, we have some clue what's going on.
setup_logging('info')
#Get model as parameter
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--size', help='for small or big box', default='small')
parser.add_argument('-m', '--model', help='model name to use')
args = parser.parse_args()
if args.model == None:
print('Specify a model name')
sys.exit()
#print(args, args.model)
model = args.model #'ModelD'
boxsize = args.size
#
#
#Global, fixed things
scratchyf = '/global/cscratch1/sd/yfeng1/m3127/'
scratchcm = '/global/cscratch1/sd/chmodi/m3127/H1mass/'
project = '/project/projectdirs/m3127/H1mass/'
cosmodef = {'omegam':0.309167, 'h':0.677, 'omegab':0.048}
alist = [0.1429,0.1538,0.1667,0.1818,0.2000,0.2222,0.2500,0.2857,0.3333]
#Parameters, box size, number of mesh cells, simulation, ...
if boxsize == 'small':
bs, nc, ncsim, sim, prefix = 256, 512, 2560, 'highres/%d-9100-fixed'%2560, 'highres'
elif boxsize == 'big':
bs, nc, ncsim, sim, prefix = 1024, 1024, 10240, 'highres/%d-9100-fixed'%10240, 'highres'
else:
print('Box size not understood, should be "big" or "small"')
sys.exit()
# It's useful to have my rank for printing...
pm = ParticleMesh(BoxSize=bs, Nmesh=[nc, nc, nc])
rank = pm.comm.rank
comm = pm.comm
#Which model & configuration to use
modeldict = {'ModelA':HImodels.ModelA, 'ModelB':HImodels.ModelB, 'ModelC':HImodels.ModelC}
modedict = {'ModelA':'galaxies', 'ModelB':'galaxies', 'ModelC':'halos'}
HImodel = modeldict[model] #HImodels.ModelB
modelname = model
mode = modedict[model]
ofolder = '../data/outputs/'
def | (aa, halocat, cencat, satcat, outfolder, mbins=None):
'''Compute the fraction of HI in halos, centrals, satellites'''
if rank==0: print('Calculating distribution')
if mbins is None: mbins = np.logspace(9, 15, 100)
hmass = halocat['Mass'].compute()
htotal, hsize, h1total = [], [], []
for im in range(mbins.size-1):
mask = (hmass >= mbins[im]) & (hmass < mbins[im+1])
rankweight = (hmass*mask).sum()
htotal.append(comm.allreduce(rankweight))
rankweight = (mask).sum()
hsize.append(comm.allreduce(rankweight))
h1bin = []
for cat in [halocat['HImass'], cencat['HImass'], cencat['HIsat']]:
rankweight = (cat.compute()*mask).sum()
h1bin.append(comm.allreduce(rankweight))
h1total.append(h1bin)
#
if rank==0:
tosave = np.zeros((len(hsize), 5))
tosave[:, 1] = hsize
tosave[:, 0] = htotal / (tosave[:, 1])
tosave[:, 2:] = h1total/ (tosave[:, 1].reshape(-1, 1))
tosave[np.isnan(tosave)] = 0
header = 'Halo Mass, Number Halos, HI halos, HI centrals, HI satellites'
np.savetxt(outfolder + "HI_dist_{:6.4f}.txt".format(aa), tosave, fmt='%0.6e', header=header)
if __name__=="__main__":
if rank==0: print('Starting')
suff='-m1_00p3mh-alpha-0p8-subvol'
outfolder = ofolder + suff[1:]
if bs == 1024: outfolder = outfolder + "-big"
outfolder += "/%s/"%modelname
if rank == 0: print(outfolder)
#outfolder = ofolder + suff[1:] + "/%s/"%modelname
try:
os.makedirs(outfolder)
except : pass
for aa in alist:
if rank == 0: print('\n ############## Redshift = %0.2f ############## \n'%(1/aa-1))
halocat = BigFileCatalog(scratchyf + sim+ '/fastpm_%0.4f//'%aa, dataset='LL-0.200')
mp = halocat.attrs['MassTable'][1]*1e10##
halocat['Mass'] = halocat['Length'].compute() * mp
cencat = BigFileCatalog(scratchcm + sim+'/fastpm_%0.4f/cencat'%aa+suff)
satcat = BigFileCatalog(scratchcm + sim+'/fastpm_%0.4f/satcat'%aa+suff)
#
HImodelz = HImodel(aa)
halocat['HImass'], cencat['HImass'], satcat['HImass'] = HImodelz.assignHI(halocat, cencat, satcat)
cencat['HIsat'] = HImodelz.getinsat(satcat['HImass'].compute(), satcat['GlobalID'].compute(),
cencat.csize, cencat['Mass'].size, cencat.comm).local
mbins = 10**np.arange(9, 15.1, 0.2)
distribution(aa, halocat, cencat, satcat, outfolder, mbins=mbins)
| distribution |
ecdfilevehicleunionpkloader_gen.go | // Code generated by github.com/vektah/dataloaden, DO NOT EDIT.
package model
import (
"sync"
"time"
)
// EcdFileVehicleUnionPkLoaderConfig captures the config to create a new EcdFileVehicleUnionPkLoader
type EcdFileVehicleUnionPkLoaderConfig struct {
// Fetch is a method that provides the data for the loader
Fetch func(keys []string) ([]*EcdFileVehicle, []error)
// Wait is how long wait before sending a batch
Wait time.Duration
// MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit
MaxBatch int
}
// NewEcdFileVehicleUnionPkLoader creates a new EcdFileVehicleUnionPkLoader given a fetch, wait, and maxBatch
func NewEcdFileVehicleUnionPkLoader(config EcdFileVehicleUnionPkLoaderConfig) *EcdFileVehicleUnionPkLoader {
return &EcdFileVehicleUnionPkLoader{
fetch: config.Fetch,
wait: config.Wait,
maxBatch: config.MaxBatch,
}
}
// EcdFileVehicleUnionPkLoader batches and caches requests
type EcdFileVehicleUnionPkLoader struct {
// this method provides the data for the loader
fetch func(keys []string) ([]*EcdFileVehicle, []error)
// how long to done before sending a batch
wait time.Duration
// this will limit the maximum number of keys to send in one batch, 0 = no limit
maxBatch int
// INTERNAL
// lazily created cache
cache map[string]*EcdFileVehicle
// the current batch. keys will continue to be collected until timeout is hit,
// then everything will be sent to the fetch method and out to the listeners
batch *ecdFileVehicleUnionPkLoaderBatch
// mutex to prevent races
mu sync.Mutex
}
type ecdFileVehicleUnionPkLoaderBatch struct {
keys []string
data []*EcdFileVehicle
error []error
closing bool
done chan struct{}
}
// Load a EcdFileVehicle by key, batching and caching will be applied automatically
func (l *EcdFileVehicleUnionPkLoader) Load(key string) (*EcdFileVehicle, error) {
return l.LoadThunk(key)()
}
// LoadThunk returns a function that when called will block waiting for a EcdFileVehicle.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *EcdFileVehicleUnionPkLoader) LoadThunk(key string) func() (*EcdFileVehicle, error) {
l.mu.Lock()
if it, ok := l.cache[key]; ok {
l.mu.Unlock()
return func() (*EcdFileVehicle, error) {
return it, nil
}
}
if l.batch == nil {
l.batch = &ecdFileVehicleUnionPkLoaderBatch{done: make(chan struct{})}
}
batch := l.batch
pos := batch.keyIndex(l, key)
l.mu.Unlock()
return func() (*EcdFileVehicle, error) {
<-batch.done
var data *EcdFileVehicle
if pos < len(batch.data) {
data = batch.data[pos]
}
var err error
// its convenient to be able to return a single error for everything
if len(batch.error) == 1 {
err = batch.error[0]
} else if batch.error != nil {
err = batch.error[pos]
}
if err == nil {
l.mu.Lock()
l.unsafeSet(key, data)
l.mu.Unlock()
}
return data, err
}
}
// LoadAll fetches many keys at once. It will be broken into appropriate sized
// sub batches depending on how the loader is configured
func (l *EcdFileVehicleUnionPkLoader) LoadAll(keys []string) ([]*EcdFileVehicle, []error) {
results := make([]func() (*EcdFileVehicle, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
ecdFileVehicles := make([]*EcdFileVehicle, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ecdFileVehicles[i], errors[i] = thunk()
}
return ecdFileVehicles, errors
}
// LoadAllThunk returns a function that when called will block waiting for a EcdFileVehicles.
// This method should be used if you want one goroutine to make requests to many
// different data loaders without blocking until the thunk is called.
func (l *EcdFileVehicleUnionPkLoader) LoadAllThunk(keys []string) func() ([]*EcdFileVehicle, []error) {
results := make([]func() (*EcdFileVehicle, error), len(keys))
for i, key := range keys {
results[i] = l.LoadThunk(key)
}
return func() ([]*EcdFileVehicle, []error) {
ecdFileVehicles := make([]*EcdFileVehicle, len(keys))
errors := make([]error, len(keys))
for i, thunk := range results {
ecdFileVehicles[i], errors[i] = thunk()
}
return ecdFileVehicles, errors
}
}
// Prime the cache with the provided key and value. If the key already exists, no change is made
// and false is returned.
// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).)
func (l *EcdFileVehicleUnionPkLoader) Prime(key string, value *EcdFileVehicle) bool {
l.mu.Lock()
var found bool
if _, found = l.cache[key]; !found |
l.mu.Unlock()
return !found
}
// Clear the value at key from the cache, if it exists
func (l *EcdFileVehicleUnionPkLoader) Clear(key string) {
l.mu.Lock()
delete(l.cache, key)
l.mu.Unlock()
}
func (l *EcdFileVehicleUnionPkLoader) unsafeSet(key string, value *EcdFileVehicle) {
if l.cache == nil {
l.cache = map[string]*EcdFileVehicle{}
}
l.cache[key] = value
}
// keyIndex will return the location of the key in the batch, if its not found
// it will add the key to the batch
func (b *ecdFileVehicleUnionPkLoaderBatch) keyIndex(l *EcdFileVehicleUnionPkLoader, key string) int {
for i, existingKey := range b.keys {
if key == existingKey {
return i
}
}
pos := len(b.keys)
b.keys = append(b.keys, key)
if pos == 0 {
go b.startTimer(l)
}
if l.maxBatch != 0 && pos >= l.maxBatch-1 {
if !b.closing {
b.closing = true
l.batch = nil
go b.end(l)
}
}
return pos
}
func (b *ecdFileVehicleUnionPkLoaderBatch) startTimer(l *EcdFileVehicleUnionPkLoader) {
time.Sleep(l.wait)
l.mu.Lock()
// we must have hit a batch limit and are already finalizing this batch
if b.closing {
l.mu.Unlock()
return
}
l.batch = nil
l.mu.Unlock()
b.end(l)
}
func (b *ecdFileVehicleUnionPkLoaderBatch) end(l *EcdFileVehicleUnionPkLoader) {
b.data, b.error = l.fetch(b.keys)
close(b.done)
}
| {
// make a copy when writing to the cache, its easy to pass a pointer in from a loop var
// and end up with the whole cache pointing to the same value.
cpy := *value
l.unsafeSet(key, &cpy)
} |
model.js | import { updateOrCreate, findUserById, findUserByCredentials, fetchUserByTwitterId } from './db';
import makeLogger from '../logger';
const logger = makeLogger('user/model.js');
export class | {
id;
email;
password;
twitter;
constructor({ id, email, password, twitter }) {
this.id = id;
this.email = email;
this.password = password;
this.twitter = twitter;
}
save = async function () {
const { id } = await updateOrCreate(this);
logger.info(' New User created with id: ', id);
this.id = id;
}
load = async function () {
const userdb = await findUserById(this.id);
this.email = userdb.email;
this.twitter = userdb.twitter;
}
toSession = function () {
if (!this.id) {
throw new Error('Id is required for to String. Please persist the data first.')
}
return {
id: this.id,
email: this.email,
};
}
login = async function () {
const data = await findUserByCredentials({ username: this.email, password: this.password });
if(!data){
throw new Error('Agent now found');
}
this.id = data._id;
this.twitter = data.twitter;
}
addAgent = async function (username, password) {
const agent = new User({ email: username, password, twitter: this.twitter, })
const { id } = await updateOrCreate(agent);
agent.id = id;
return agent;
}
fetchAgents = async function (){
const admin = this;
return fetchUserByTwitterId(admin.twitter, admin.email);
}
static makeFromTwitter = async function (twitterUser) {
if (twitterUser.email == null) {
throw new Error('Email is absent in the user info');
}
const user = new User({ email: twitterUser.email, twitter: twitterUser.id });
if (!user) {
throw new Error('No user found with the authenticated token.')
}
await user.save();
logger.info('User is persisted...')
return user;
}
static loginAgent = async function ({ username, password }) {
const { } = await findUserByCredentials({ username, password });
if (!user) {
throw new Error('No user found with the credentials')
}
const user = new User({ email: twitterUser.email, twitter: { twitter: twitterUser } });
return user;
}
} | User |
QueryMatcher.ts | /*
Copyright 2017 Aviral Dasgupta
Copyright 2018 Michael Telatynski <[email protected]>
Copyright 2018 New Vector Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import _at from 'lodash/at';
import _uniq from 'lodash/uniq';
import {removeHiddenChars} from "matrix-js-sdk/src/utils";
interface IOptions<T extends {}> {
keys: Array<string | keyof T>;
funcs?: Array<(T) => string>;
shouldMatchWordsOnly?: boolean;
shouldMatchPrefix?: boolean;
// whether to apply unhomoglyph and strip diacritics to fuzz up the search. Defaults to true
fuzzy?: boolean;
}
/**
* Simple search matcher that matches any results with the query string anywhere
* in the search string. Returns matches in the order the query string appears
* in the search key, earliest first, then in the order the search key appears
* in the provided array of keys, then in the order the items appeared in the
* source array.
*
* @param {Object[]} objects Initial list of objects. Equivalent to calling
* setObjects() after construction
* @param {Object} options Options object
* @param {string[]} options.keys List of keys to use as indexes on the objects
* @param {function[]} options.funcs List of functions that when called with the
* object as an arg will return a string to use as an index
*/
export default class | <T extends Object> {
private _options: IOptions<T>;
private _items: Map<string, {object: T, keyWeight: number}[]>;
constructor(objects: T[], options: IOptions<T> = { keys: [] }) {
this._options = options;
this.setObjects(objects);
// By default, we remove any non-alphanumeric characters ([^A-Za-z0-9_]) from the
// query and the value being queried before matching
if (this._options.shouldMatchWordsOnly === undefined) {
this._options.shouldMatchWordsOnly = true;
}
// By default, match anywhere in the string being searched. If enabled, only return
// matches that are prefixed with the query.
if (this._options.shouldMatchPrefix === undefined) {
this._options.shouldMatchPrefix = false;
}
}
setObjects(objects: T[]) {
this._items = new Map();
for (const object of objects) {
// Need to use unsafe coerce here because the objects can have any
// type for their values. We assume that those values who's keys have
// been specified will be string. Also, we cannot infer all the
// types of the keys of the objects at compile.
const keyValues = _at<string>(<any>object, this._options.keys);
if (this._options.funcs) {
for (const f of this._options.funcs) {
keyValues.push(f(object));
}
}
for (const [index, keyValue] of Object.entries(keyValues)) {
if (!keyValue) continue; // skip falsy keyValues
const key = this.processQuery(keyValue);
if (!this._items.has(key)) {
this._items.set(key, []);
}
this._items.get(key).push({
keyWeight: Number(index),
object,
});
}
}
}
match(query: string): T[] {
query = this.processQuery(query);
if (this._options.shouldMatchWordsOnly) {
query = query.replace(/[^\w]/g, '');
}
if (query.length === 0) {
return [];
}
const matches = [];
// Iterate through the map & check each key.
// ES6 Map iteration order is defined to be insertion order, so results
// here will come out in the order they were put in.
for (const [key, candidates] of this._items.entries()) {
let resultKey = key;
if (this._options.shouldMatchWordsOnly) {
resultKey = resultKey.replace(/[^\w]/g, '');
}
const index = resultKey.indexOf(query);
if (index !== -1 && (!this._options.shouldMatchPrefix || index === 0)) {
matches.push(
...candidates.map((candidate) => ({index, ...candidate})),
);
}
}
// Sort matches by where the query appeared in the search key, then by
// where the matched key appeared in the provided array of keys.
matches.sort((a, b) => {
if (a.index < b.index) {
return -1;
} else if (a.index === b.index) {
if (a.keyWeight < b.keyWeight) {
return -1;
} else if (a.keyWeight === b.keyWeight) {
return 0;
}
}
return 1;
});
// Now map the keys to the result objects. Also remove any duplicates.
return _uniq(matches.map((match) => match.object));
}
private processQuery(query: string): string {
if (this._options.fuzzy !== false) {
return removeHiddenChars(query).toLowerCase();
}
return query.toLowerCase();
}
}
| QueryMatcher |
main.go | package main
// A simple example that shows how to send activity to Bubble Tea in real-time
// through a channel.
import (
"fmt"
"math/rand"
"os"
"time"
"github.com/anhoder/bubbles/spinner"
tea "github.com/anhoder/bubbletea"
)
// A message used to indicate that activity has occured. In the real world (for
// example, chat) this would contain actual data.
type responseMsg struct{}
// Simulate a process that sends events at an irregular interval in real time.
// In this case, we'll send events on the channel at a random interval between
// 100 to 1000 milliseconds. As a command, Bubble Tea will run this
// asyncronously.
func listenForActivity(sub chan struct{}) tea.Cmd {
return func() tea.Msg {
for {
time.Sleep(time.Millisecond * time.Duration(rand.Int63n(900)+100))
sub <- struct{}{}
}
}
}
// A command that waits for the activity on a channel.
func | (sub chan struct{}) tea.Cmd {
return func() tea.Msg {
return responseMsg(<-sub)
}
}
type model struct {
sub chan struct{} // where we'll receive activity notifications
responses int // how many responses we've received
spinner spinner.Model
quitting bool
}
func (m model) Init() tea.Cmd {
return tea.Batch(
spinner.Tick,
listenForActivity(m.sub), // generate activity
waitForActivity(m.sub), // wait for activity
)
}
func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
switch msg.(type) {
case tea.KeyMsg:
m.quitting = true
return m, tea.Quit
case responseMsg:
m.responses++ // record external activity
return m, waitForActivity(m.sub) // wait for next event
case spinner.TickMsg:
var cmd tea.Cmd
m.spinner, cmd = m.spinner.Update(msg)
return m, cmd
default:
return m, nil
}
}
func (m model) View() string {
s := fmt.Sprintf("\n %s Events received: %d\n\n Press any key to exit\n", m.spinner.View(), m.responses)
if m.quitting {
s += "\n"
}
return s
}
func main() {
rand.Seed(time.Now().UTC().UnixNano())
p := tea.NewProgram(model{
sub: make(chan struct{}),
spinner: spinner.NewModel(),
})
if p.Start() != nil {
fmt.Println("could not start program")
os.Exit(1)
}
}
| waitForActivity |
__init__.py | CODEOWNERS = ["@esphome/core"] |
||
ipv4.d.ts | export declare type IpV4Like = IpV4 | string | number | Array<number>;
export declare function castIpV4(ip: IpV4Like): IpV4;
export declare class | {
private _b1;
private _b2;
private _b3;
private _b4;
private _int;
static fromString(str: string): IpV4;
static tryString(str: string): IpV4 | null;
static fromInt(int: number): IpV4;
static tryInt(int: number): IpV4 | null;
static fromIntBe(int: number): IpV4;
static tryIntBe(int: number): IpV4 | null;
static fromIntLe(int: number): IpV4;
static tryIntLe(int: number): IpV4 | null;
static fromBytes(b1: number, b2: number, b3: number, b4: number): IpV4;
static tryBytes(b1: number, b2: number, b3: number, b4: number): IpV4 | null;
static fromArray(array: Array<number>): IpV4;
static tryArray(array: Array<number>): IpV4 | null;
toString(): string;
toInt(): number;
toIntBe(): number;
toIntLe(): number;
toArray(): Array<number>;
equal(ip: IpV4): boolean;
static equal(ip1: IpV4Like, ip2: IpV4Like): boolean;
isUnspecified(): boolean;
static isUnspecified(ip: IpV4Like): boolean;
isLoopback(): boolean;
static isLoopback(ip: IpV4Like): boolean;
isPrivate(): boolean;
static isPrivate(ip: IpV4Like): boolean;
isLinkLocal(): boolean;
static isLinkLocal(ip: IpV4Like): boolean;
isMulticast(): boolean;
static isMulticast(ip: IpV4Like): boolean;
isBroadcast(): boolean;
static isBroadcast(ip: IpV4Like): boolean;
isDocumentation(): boolean;
static isDocumentation(ip: IpV4Like): boolean;
isGlobal(): boolean;
static isGlobal(ip: IpV4Like): boolean;
}
| IpV4 |
openstack_blobstore.go | package openstack
import (
"context"
"io"
"sync"
"time"
"github.com/cloudfoundry-incubator/bits-service/util"
"github.com/ncw/swift"
"io/ioutil"
"bytes"
"strings"
bitsgo "github.com/cloudfoundry-incubator/bits-service"
"github.com/cloudfoundry-incubator/bits-service/blobstores/validate"
"github.com/cloudfoundry-incubator/bits-service/config"
"github.com/cloudfoundry-incubator/bits-service/logger"
"github.com/pkg/errors"
"golang.org/x/sync/semaphore"
)
type Blobstore struct {
containerName string
swiftConn *swift.Connection
accountMetaTempURLKey string
}
func NewBlobstore(config config.OpenstackBlobstoreConfig) *Blobstore {
validate.NotEmpty(config.Username)
validate.NotEmpty(config.ApiKey)
validate.NotEmpty(config.AuthURL)
validate.NotEmpty(config.ContainerName)
swiftConn := &swift.Connection{
UserName: config.Username,
ApiKey: config.ApiKey,
AuthUrl: config.AuthURL,
AuthVersion: config.AuthVersion,
DomainId: config.DomainId,
Domain: config.DomainName,
Region: config.Region,
Internal: config.Internal,
EndpointType: swift.EndpointType(config.EndpointType),
Tenant: config.Tenant,
}
e := swiftConn.Authenticate()
if e != nil {
panic(e)
}
// https://docs.openstack.org/kilo/config-reference/content/object-storage-tempurl.html
e = swiftConn.AccountUpdate(map[string]string{"X-Account-Meta-Temp-URL-Key": config.AccountMetaTempURLKey})
if e != nil {
panic(e)
}
return &Blobstore{
swiftConn: swiftConn,
containerName: config.ContainerName,
accountMetaTempURLKey: config.AccountMetaTempURLKey,
}
}
func (blobstore *Blobstore) Exists(path string) (bool, error) {
if !blobstore.containerExists() {
return false, errors.Errorf("Container not found: '%v'", blobstore.containerName)
}
_, _, e := blobstore.swiftConn.Object(blobstore.containerName, path)
if e == swift.ObjectNotFound {
return false, nil
}
if e != nil {
return false, errors.Wrapf(e, "Failed to check for %v/%v", blobstore.containerName, path)
}
return true, nil
}
func (blobstore *Blobstore) containerExists() bool {
_, _, e := blobstore.swiftConn.Container(blobstore.containerName)
return e != swift.ContainerNotFound
}
func (blobstore *Blobstore) Get(path string) (body io.ReadCloser, err error) {
logger.Log.Debugw("Get", "bucket", blobstore.containerName, "path", path)
if !blobstore.containerExists() {
return nil, errors.Errorf("Container not found: '%v'", blobstore.containerName)
}
buf, e := blobstore.swiftConn.ObjectGetBytes(blobstore.containerName, path)
if e == swift.ObjectNotFound {
return nil, bitsgo.NewNotFoundError()
}
if e != nil {
return nil, errors.Wrapf(e, "Container: '%v', path: '%v'", blobstore.containerName, path)
}
return ioutil.NopCloser(bytes.NewBuffer(buf)), nil
}
func (blobstore *Blobstore) GetOrRedirect(path string) (body io.ReadCloser, redirectLocation string, err error) {
return nil, blobstore.swiftConn.ObjectTempUrl(blobstore.containerName, path, blobstore.accountMetaTempURLKey, "GET", time.Now().Add(time.Hour)), nil
}
func (blobstore *Blobstore) Put(path string, src io.ReadSeeker) error {
logger.Log.Debugw("Put", "bucket", blobstore.containerName, "path", path)
if !blobstore.containerExists() {
return errors.Errorf("Container not found: '%v'", blobstore.containerName)
}
_, e := blobstore.swiftConn.ObjectPut(blobstore.containerName, path, src, false, "", "", nil)
if e != nil {
return errors.Wrapf(e, "Container: '%v', path: '%v'", blobstore.containerName, path)
}
return nil
}
func (blobstore *Blobstore) Copy(src, dest string) error {
logger.Log.Debugw("Copy", "container", blobstore.containerName, "src", src, "dest", dest)
if !blobstore.containerExists() {
return errors.Errorf("Container not found: '%v'", blobstore.containerName)
}
_, e := blobstore.swiftConn.ObjectCopy(blobstore.containerName, src, blobstore.containerName, dest, nil)
if e == swift.ObjectNotFound {
return bitsgo.NewNotFoundError()
}
if e != nil {
return errors.Wrapf(e, "Container: '%v', src: '%v', dst: '%v'", blobstore.containerName, src, dest)
}
return nil
}
func (blobstore *Blobstore) Delete(path string) error {
if !blobstore.containerExists() {
return errors.Errorf("Container not found: '%v'", blobstore.containerName)
}
e := blobstore.swiftConn.ObjectDelete(blobstore.containerName, path)
if e == swift.ObjectNotFound {
return bitsgo.NewNotFoundError()
}
if e != nil {
return errors.Wrapf(e, "Container: '%v', path: '%v'", blobstore.containerName, path)
}
return nil
}
func (blobstore *Blobstore) DeleteDir(prefix string) error {
if !blobstore.containerExists() {
return errors.Errorf("Container not found: '%v'", blobstore.containerName)
}
names, e := blobstore.swiftConn.ObjectNames(blobstore.containerName, &swift.ObjectsOpts{Prefix: prefix})
if e != nil {
return errors.Wrapf(e, "Container: '%v', prefix: '%v'", blobstore.containerName, prefix)
}
const numWorkers = 10
deletionErrs := DeleteInParallel(names, numWorkers, func(name string) error {
return blobstore.Delete(name)
})
if len(deletionErrs) != 0 {
return errors.Errorf("Prefix '%v', errors from deleting: %v", prefix, deletionErrs)
}
return nil
}
// Visible for testing only
func DeleteInParallel(names []string, numWorkers int64, deletetionFunc func(name string) error) []error |
func (blobstore *Blobstore) Sign(resource string, method string, expirationTime time.Time) (signedURL string) {
if strings.ToLower(method) != "get" && method != "put" {
panic("The only supported methods are 'put' and 'get'")
}
signedURL = blobstore.swiftConn.ObjectTempUrl(blobstore.containerName, resource, blobstore.accountMetaTempURLKey, strings.ToUpper(method), time.Now().Add(time.Hour))
logger.Log.Debugw("Signed URL", "verb", method, "signed-url", signedURL)
return
}
| {
var errMutex sync.Mutex
deletionErrs := []error{}
ctx := context.TODO()
sem := semaphore.NewWeighted(numWorkers)
for _, name := range names {
util.Must(sem.Acquire(ctx, 1))
go func(name string) {
defer sem.Release(1)
e := deletetionFunc(name)
if e != nil {
if !bitsgo.IsNotFoundError(e) {
errMutex.Lock()
defer errMutex.Unlock()
deletionErrs = append(deletionErrs, e)
}
}
}(name)
}
util.Must(sem.Acquire(ctx, numWorkers))
return deletionErrs
} |
decode.go | package interp
import (
"bytes"
"errors"
"fmt"
"io"
"io/ioutil"
"math/big"
"strings"
"time"
"github.com/mitchellh/mapstructure"
"github.com/wader/fq/internal/bitioextra"
"github.com/wader/fq/internal/gojqextra"
"github.com/wader/fq/internal/ioextra"
"github.com/wader/fq/pkg/bitio"
"github.com/wader/fq/pkg/decode"
"github.com/wader/fq/pkg/scalar"
"github.com/wader/gojq"
)
func init() {
functionRegisterFns = append(functionRegisterFns, func(i *Interp) []Function {
return []Function{
{"_registry", 0, 0, i._registry, nil},
{"_tovalue", 1, 1, i._toValue, nil},
{"_decode", 2, 2, i._decode, nil},
}
})
}
type expectedExtkeyError struct {
Key string
}
func (err expectedExtkeyError) Error() string {
return "expected a extkey but got: " + err.Key
}
// TODO: redo/rename
// used by _isDecodeValue
type DecodeValue interface {
Value
ToBinary
DecodeValue() *decode.Value
}
func (i *Interp) _registry(c interface{}, a []interface{}) interface{} {
uniqueFormats := map[string]decode.Format{}
groups := map[string]interface{}{}
formats := map[string]interface{}{}
for fsName := range i.registry.Groups {
var group []interface{}
for _, f := range i.registry.MustGroup(fsName) {
group = append(group, f.Name)
if _, ok := uniqueFormats[f.Name]; ok {
continue
}
uniqueFormats[f.Name] = f
}
groups[fsName] = group
}
for _, f := range uniqueFormats {
vf := map[string]interface{}{
"name": f.Name,
"description": f.Description,
"probe_order": f.ProbeOrder,
"root_name": f.RootName,
"root_array": f.RootArray,
"to_repr": f.ToRepr,
}
var dependenciesVs []interface{}
for _, d := range f.Dependencies {
var dNamesVs []interface{}
for _, n := range d.Names {
dNamesVs = append(dNamesVs, n)
}
dependenciesVs = append(dependenciesVs, dNamesVs)
}
if len(dependenciesVs) > 0 {
vf["dependencies"] = dependenciesVs
}
var groupsVs []interface{}
for _, n := range f.Groups {
groupsVs = append(groupsVs, n)
}
if len(groupsVs) > 0 {
vf["groups"] = groupsVs
}
if f.Files != nil {
files := map[string]interface{}{}
entries, err := f.Files.ReadDir(".")
if err != nil {
return err
}
for _, e := range entries {
f, err := f.Files.Open(e.Name())
if err != nil {
return err
}
b, err := ioutil.ReadAll(f)
if err != nil {
return err
}
files[e.Name()] = string(b)
}
vf["files"] = files
}
formats[f.Name] = vf
}
return map[string]interface{}{
"groups": groups,
"formats": formats,
}
}
func (i *Interp) _toValue(c interface{}, a []interface{}) interface{} {
v, _ := toValue(
func() Options { return i.Options(a[0]) },
c,
)
return v
}
func (i *Interp) _decode(c interface{}, a []interface{}) interface{} {
var opts struct {
Filename string `mapstructure:"filename"`
Force bool `mapstructure:"force"`
Progress string `mapstructure:"_progress"`
Remain map[string]interface{} `mapstructure:",remain"`
}
_ = mapstructure.Decode(a[1], &opts)
// TODO: progress hack
// would be nice to move all progress code into decode but it might be
// tricky to keep track of absolute positions in the underlaying readers
// when it uses BitBuf slices, maybe only in Pos()?
if bbf, ok := c.(*openFile); ok {
opts.Filename = bbf.filename
if opts.Progress != "" {
evalProgress := func(c interface{}) {
// {approx_read_bytes: 123, total_size: 123} | opts.Progress
_, _ = i.EvalFuncValues(
i.evalInstance.ctx,
c,
opts.Progress,
nil,
EvalOpts{output: ioextra.DiscardCtxWriter{Ctx: i.evalInstance.ctx}},
)
}
lastProgress := time.Now()
bbf.progressFn = func(approxReadBytes, totalSize int64) {
// make sure to not call too often as it's quite expensive
n := time.Now()
if n.Sub(lastProgress) < 200*time.Millisecond {
return
}
lastProgress = n
evalProgress(
map[string]interface{}{
"approx_read_bytes": approxReadBytes,
"total_size": totalSize,
},
)
}
// when done decoding, tell progress function were done and disable it
defer func() {
bbf.progressFn = nil
evalProgress(nil)
}()
}
}
bv, err := toBinary(c)
if err != nil {
return err
}
formatName, err := toString(a[0])
if err != nil {
return err
}
decodeFormat, err := i.registry.Group(formatName)
if err != nil {
return err
}
dv, _, err := decode.Decode(i.evalInstance.ctx, bv.br, decodeFormat,
decode.Options{
IsRoot: true,
FillGaps: true,
Force: opts.Force,
Range: bv.r,
Description: opts.Filename,
FormatOptions: opts.Remain,
},
)
if dv == nil {
var decodeFormatsErr decode.FormatsError
if errors.As(err, &decodeFormatsErr) {
var vs []interface{}
for _, fe := range decodeFormatsErr.Errs {
vs = append(vs, fe.Value())
}
return valueError{vs}
}
return valueError{err}
}
return makeDecodeValue(dv)
}
func valueKey(name string, a, b func(name string) interface{}) interface{} {
if strings.HasPrefix(name, "_") {
return a(name)
}
return b(name)
}
func valueHas(key interface{}, a func(name string) interface{}, b func(key interface{}) interface{}) interface{} {
stringKey, ok := key.(string)
if ok && strings.HasPrefix(stringKey, "_") {
if err, ok := a(stringKey).(error); ok {
return err
}
return true
}
return b(key)
}
// optsFn is a function as toValue is used by tovalue/0 so needs to be fast
func toValue(optsFn func() Options, v interface{}) (interface{}, bool) {
switch v := v.(type) {
case JQValueEx:
return v.JQValueToGoJQEx(optsFn), true
case gojq.JQValue:
return v.JQValueToGoJQ(), true
case nil, bool, float64, int, string, *big.Int, map[string]interface{}, []interface{}:
return v, true
default:
return nil, false
}
}
func makeDecodeValue(dv *decode.Value) interface{} {
switch vv := dv.V.(type) {
case *decode.Compound:
if vv.IsArray {
return NewArrayDecodeValue(dv, vv)
}
return NewStructDecodeValue(dv, vv)
case *scalar.S:
switch vv := vv.Value().(type) {
case bitio.ReaderAtSeeker:
// is lazy so that in situations where the decode value is only used to
// create another binary we don't have to read and create a string, ex:
// .unknown0 | tobytes[1:] | ...
return decodeValue{
JQValue: &gojqextra.Lazy{
Type: "string",
IsScalar: true,
Fn: func() (gojq.JQValue, error) {
buf := &bytes.Buffer{}
vvC, err := bitioextra.Clone(vv)
if err != nil {
return nil, err
}
if _, err := bitioextra.CopyBits(buf, vvC); err != nil {
return nil, err | return gojqextra.String([]rune(buf.String())), nil
},
},
decodeValueBase: decodeValueBase{dv},
bitsFormat: true,
}
case bool:
return decodeValue{
JQValue: gojqextra.Boolean(vv),
decodeValueBase: decodeValueBase{dv},
}
case int:
return decodeValue{
JQValue: gojqextra.Number{V: vv},
decodeValueBase: decodeValueBase{dv},
}
case int64:
return decodeValue{
JQValue: gojqextra.Number{V: big.NewInt(vv)},
decodeValueBase: decodeValueBase{dv},
}
case uint64:
return decodeValue{
JQValue: gojqextra.Number{V: new(big.Int).SetUint64(vv)},
decodeValueBase: decodeValueBase{dv},
}
case float64:
return decodeValue{
JQValue: gojqextra.Number{V: vv},
decodeValueBase: decodeValueBase{dv},
}
case string:
return decodeValue{
JQValue: gojqextra.String(vv),
decodeValueBase: decodeValueBase{dv},
}
case []interface{}:
return decodeValue{
JQValue: gojqextra.Array(vv),
decodeValueBase: decodeValueBase{dv},
}
case map[string]interface{}:
return decodeValue{
JQValue: gojqextra.Object(vv),
decodeValueBase: decodeValueBase{dv},
}
case nil:
return decodeValue{
JQValue: gojqextra.Null{},
decodeValueBase: decodeValueBase{dv},
}
case *big.Int:
return decodeValue{
JQValue: gojqextra.Number{V: vv},
decodeValueBase: decodeValueBase{dv},
}
default:
panic(fmt.Sprintf("unreachable vv %#+v", vv))
}
default:
panic(fmt.Sprintf("unreachable dv %#+v", dv))
}
}
type decodeValueBase struct {
dv *decode.Value
}
func (dvb decodeValueBase) DecodeValue() *decode.Value {
return dvb.dv
}
func (dvb decodeValueBase) Display(w io.Writer, opts Options) error { return dump(dvb.dv, w, opts) }
func (dvb decodeValueBase) ToBinary() (Binary, error) {
return Binary{br: dvb.dv.RootReader, r: dvb.dv.InnerRange(), unit: 8}, nil
}
func (decodeValueBase) ExtType() string { return "decode_value" }
func (dvb decodeValueBase) ExtKeys() []string {
kv := []string{
"_start",
"_stop",
"_len",
"_name",
"_root",
"_buffer_root",
"_format_root",
"_parent",
"_actual",
"_sym",
"_description",
"_path",
"_bits",
"_bytes",
"_unknown",
"_index", // TODO: only if parent is array?
}
if _, ok := dvb.dv.V.(*decode.Compound); ok {
kv = append(kv,
"_error",
"_format",
)
if dvb.dv.Index != -1 {
kv = append(kv, "_index")
}
}
return kv
}
func (dvb decodeValueBase) JQValueKey(name string) interface{} {
dv := dvb.dv
switch name {
case "_start":
return big.NewInt(dv.Range.Start)
case "_stop":
return big.NewInt(dv.Range.Stop())
case "_len":
return big.NewInt(dv.Range.Len)
case "_name":
return dv.Name
case "_root":
return makeDecodeValue(dv.Root())
case "_buffer_root":
// TODO: rename?
return makeDecodeValue(dv.BufferRoot())
case "_format_root":
// TODO: rename?
return makeDecodeValue(dv.FormatRoot())
case "_parent":
if dv.Parent == nil {
return nil
}
return makeDecodeValue(dv.Parent)
case "_actual":
switch vv := dv.V.(type) {
case *scalar.S:
jv, ok := gojqextra.ToGoJQValue(vv.Actual)
if !ok {
return fmt.Errorf("can't convert actual value jq value %#+v", vv.Actual)
}
return jv
default:
return nil
}
case "_sym":
switch vv := dv.V.(type) {
case *scalar.S:
jv, ok := gojqextra.ToGoJQValue(vv.Sym)
if !ok {
return fmt.Errorf("can't convert sym value jq value %#+v", vv.Actual)
}
return jv
default:
return nil
}
case "_description":
switch vv := dv.V.(type) {
case *decode.Compound:
if vv.Description == "" {
return nil
}
return vv.Description
case *scalar.S:
if vv.Description == "" {
return nil
}
return vv.Description
default:
return nil
}
case "_path":
return valuePath(dv)
case "_error":
switch vv := dv.V.(type) {
case *decode.Compound:
var formatErr decode.FormatError
if errors.As(vv.Err, &formatErr) {
return formatErr.Value()
}
return vv.Err
default:
return nil
}
case "_bits":
return Binary{
br: dv.RootReader,
r: dv.Range,
unit: 1,
}
case "_bytes":
return Binary{
br: dv.RootReader,
r: dv.Range,
unit: 8,
}
case "_format":
switch vv := dv.V.(type) {
case *decode.Compound:
if vv.Format != nil {
return vv.Format.Name
}
return nil
case *scalar.S:
// TODO: hack, Scalar interface?
switch vv.Actual.(type) {
case map[string]interface{}, []interface{}:
return "json"
default:
return nil
}
default:
return nil
}
case "_unknown":
switch vv := dv.V.(type) {
case *scalar.S:
return vv.Unknown
default:
return false
}
case "_index":
if dv.Index != -1 {
return dv.Index
}
}
return expectedExtkeyError{Key: name}
}
var _ DecodeValue = decodeValue{}
type decodeValue struct {
gojq.JQValue
decodeValueBase
bitsFormat bool
}
func (v decodeValue) JQValueKey(name string) interface{} {
return valueKey(name, v.decodeValueBase.JQValueKey, v.JQValue.JQValueKey)
}
func (v decodeValue) JQValueHas(key interface{}) interface{} {
return valueHas(key, v.decodeValueBase.JQValueKey, v.JQValue.JQValueHas)
}
func (v decodeValue) JQValueToGoJQEx(optsFn func() Options) interface{} {
if !v.bitsFormat {
return v.JQValueToGoJQ()
}
bv, err := v.decodeValueBase.ToBinary()
if err != nil {
return err
}
br, err := bv.toReader()
if err != nil {
return err
}
brC, err := bitioextra.Clone(br)
if err != nil {
return err
}
s, err := optsFn().BitsFormatFn(brC)
if err != nil {
return err
}
return s
}
// decode value array
var _ DecodeValue = ArrayDecodeValue{}
type ArrayDecodeValue struct {
gojqextra.Base
decodeValueBase
*decode.Compound
}
func NewArrayDecodeValue(dv *decode.Value, c *decode.Compound) ArrayDecodeValue {
return ArrayDecodeValue{
decodeValueBase: decodeValueBase{dv},
Base: gojqextra.Base{Typ: "array"},
Compound: c,
}
}
func (v ArrayDecodeValue) JQValueKey(name string) interface{} {
return valueKey(name, v.decodeValueBase.JQValueKey, v.Base.JQValueKey)
}
func (v ArrayDecodeValue) JQValueSliceLen() interface{} { return len(v.Compound.Children) }
func (v ArrayDecodeValue) JQValueLength() interface{} { return len(v.Compound.Children) }
func (v ArrayDecodeValue) JQValueIndex(index int) interface{} {
// -1 outside after string, -2 outside before string
if index < 0 {
return nil
}
return makeDecodeValue((v.Compound.Children)[index])
}
func (v ArrayDecodeValue) JQValueSlice(start int, end int) interface{} {
vs := make([]interface{}, end-start)
for i, e := range (v.Compound.Children)[start:end] {
vs[i] = makeDecodeValue(e)
}
return vs
}
func (v ArrayDecodeValue) JQValueUpdate(key interface{}, u interface{}, delpath bool) interface{} {
return gojqextra.NonUpdatableTypeError{Key: fmt.Sprintf("%v", key), Typ: "array"}
}
func (v ArrayDecodeValue) JQValueEach() interface{} {
props := make([]gojq.PathValue, len(v.Compound.Children))
for i, f := range v.Compound.Children {
props[i] = gojq.PathValue{Path: i, Value: makeDecodeValue(f)}
}
return props
}
func (v ArrayDecodeValue) JQValueKeys() interface{} {
vs := make([]interface{}, len(v.Compound.Children))
for i := range v.Compound.Children {
vs[i] = i
}
return vs
}
func (v ArrayDecodeValue) JQValueHas(key interface{}) interface{} {
return valueHas(
key,
v.decodeValueBase.JQValueKey,
func(key interface{}) interface{} {
intKey, ok := key.(int)
if !ok {
return gojqextra.HasKeyTypeError{L: "array", R: fmt.Sprintf("%v", key)}
}
return intKey >= 0 && intKey < len(v.Compound.Children)
})
}
func (v ArrayDecodeValue) JQValueToGoJQ() interface{} {
vs := make([]interface{}, len(v.Compound.Children))
for i, f := range v.Compound.Children {
vs[i] = makeDecodeValue(f)
}
return vs
}
// decode value struct
var _ DecodeValue = StructDecodeValue{}
type StructDecodeValue struct {
gojqextra.Base
decodeValueBase
*decode.Compound
}
func NewStructDecodeValue(dv *decode.Value, c *decode.Compound) StructDecodeValue {
return StructDecodeValue{
decodeValueBase: decodeValueBase{dv},
Base: gojqextra.Base{Typ: "object"},
Compound: c,
}
}
func (v StructDecodeValue) JQValueLength() interface{} { return len(v.Compound.Children) }
func (v StructDecodeValue) JQValueSliceLen() interface{} { return len(v.Compound.Children) }
func (v StructDecodeValue) JQValueKey(name string) interface{} {
if strings.HasPrefix(name, "_") {
return v.decodeValueBase.JQValueKey(name)
}
for _, f := range v.Compound.Children {
if f.Name == name {
return makeDecodeValue(f)
}
}
return nil
}
func (v StructDecodeValue) JQValueUpdate(key interface{}, u interface{}, delpath bool) interface{} {
return gojqextra.NonUpdatableTypeError{Key: fmt.Sprintf("%v", key), Typ: "object"}
}
func (v StructDecodeValue) JQValueEach() interface{} {
props := make([]gojq.PathValue, len(v.Compound.Children))
for i, f := range v.Compound.Children {
props[i] = gojq.PathValue{Path: f.Name, Value: makeDecodeValue(f)}
}
return props
}
func (v StructDecodeValue) JQValueKeys() interface{} {
vs := make([]interface{}, len(v.Compound.Children))
for i, f := range v.Compound.Children {
vs[i] = f.Name
}
return vs
}
func (v StructDecodeValue) JQValueHas(key interface{}) interface{} {
return valueHas(
key,
v.decodeValueBase.JQValueKey,
func(key interface{}) interface{} {
stringKey, ok := key.(string)
if !ok {
return gojqextra.HasKeyTypeError{L: "object", R: fmt.Sprintf("%v", key)}
}
for _, f := range v.Compound.Children {
if f.Name == stringKey {
return true
}
}
return false
},
)
}
func (v StructDecodeValue) JQValueToGoJQ() interface{} {
vm := make(map[string]interface{}, len(v.Compound.Children))
for _, f := range v.Compound.Children {
vm[f.Name] = makeDecodeValue(f)
}
return vm
} | } |
get_registered_server.py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetRegisteredServerResult',
'AwaitableGetRegisteredServerResult',
'get_registered_server',
]
@pulumi.output_type
class GetRegisteredServerResult:
"""
Registered Server resource.
"""
def __init__(__self__, agent_version=None, cluster_id=None, cluster_name=None, discovery_endpoint_uri=None, friendly_name=None, id=None, last_heart_beat=None, last_operation_name=None, last_workflow_id=None, management_endpoint_uri=None, monitoring_configuration=None, name=None, provisioning_state=None, resource_location=None, server_certificate=None, server_id=None, server_managementt_error_code=None, server_os_version=None, server_role=None, service_location=None, storage_sync_service_uid=None, type=None):
if agent_version and not isinstance(agent_version, str):
raise TypeError("Expected argument 'agent_version' to be a str")
pulumi.set(__self__, "agent_version", agent_version)
if cluster_id and not isinstance(cluster_id, str):
raise TypeError("Expected argument 'cluster_id' to be a str")
pulumi.set(__self__, "cluster_id", cluster_id)
if cluster_name and not isinstance(cluster_name, str):
raise TypeError("Expected argument 'cluster_name' to be a str")
pulumi.set(__self__, "cluster_name", cluster_name)
if discovery_endpoint_uri and not isinstance(discovery_endpoint_uri, str):
raise TypeError("Expected argument 'discovery_endpoint_uri' to be a str")
pulumi.set(__self__, "discovery_endpoint_uri", discovery_endpoint_uri)
if friendly_name and not isinstance(friendly_name, str):
raise TypeError("Expected argument 'friendly_name' to be a str")
pulumi.set(__self__, "friendly_name", friendly_name)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if last_heart_beat and not isinstance(last_heart_beat, str):
raise TypeError("Expected argument 'last_heart_beat' to be a str")
pulumi.set(__self__, "last_heart_beat", last_heart_beat)
if last_operation_name and not isinstance(last_operation_name, str):
raise TypeError("Expected argument 'last_operation_name' to be a str")
pulumi.set(__self__, "last_operation_name", last_operation_name)
if last_workflow_id and not isinstance(last_workflow_id, str):
raise TypeError("Expected argument 'last_workflow_id' to be a str")
pulumi.set(__self__, "last_workflow_id", last_workflow_id)
if management_endpoint_uri and not isinstance(management_endpoint_uri, str):
raise TypeError("Expected argument 'management_endpoint_uri' to be a str")
pulumi.set(__self__, "management_endpoint_uri", management_endpoint_uri)
if monitoring_configuration and not isinstance(monitoring_configuration, str):
raise TypeError("Expected argument 'monitoring_configuration' to be a str")
pulumi.set(__self__, "monitoring_configuration", monitoring_configuration)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if resource_location and not isinstance(resource_location, str):
raise TypeError("Expected argument 'resource_location' to be a str")
pulumi.set(__self__, "resource_location", resource_location)
if server_certificate and not isinstance(server_certificate, str):
raise TypeError("Expected argument 'server_certificate' to be a str")
pulumi.set(__self__, "server_certificate", server_certificate)
if server_id and not isinstance(server_id, str):
raise TypeError("Expected argument 'server_id' to be a str")
pulumi.set(__self__, "server_id", server_id)
if server_managementt_error_code and not isinstance(server_managementt_error_code, int):
raise TypeError("Expected argument 'server_managementt_error_code' to be a int")
pulumi.set(__self__, "server_managementt_error_code", server_managementt_error_code)
if server_os_version and not isinstance(server_os_version, str):
raise TypeError("Expected argument 'server_os_version' to be a str")
pulumi.set(__self__, "server_os_version", server_os_version)
if server_role and not isinstance(server_role, str):
raise TypeError("Expected argument 'server_role' to be a str")
pulumi.set(__self__, "server_role", server_role)
if service_location and not isinstance(service_location, str):
raise TypeError("Expected argument 'service_location' to be a str")
pulumi.set(__self__, "service_location", service_location)
if storage_sync_service_uid and not isinstance(storage_sync_service_uid, str):
raise TypeError("Expected argument 'storage_sync_service_uid' to be a str")
pulumi.set(__self__, "storage_sync_service_uid", storage_sync_service_uid)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="agentVersion")
def agent_version(self) -> Optional[str]:
"""
Registered Server Agent Version
"""
return pulumi.get(self, "agent_version")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[str]:
"""
Registered Server clusterId
"""
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="clusterName")
def cluster_name(self) -> Optional[str]:
"""
Registered Server clusterName
"""
return pulumi.get(self, "cluster_name")
@property
@pulumi.getter(name="discoveryEndpointUri")
def discovery_endpoint_uri(self) -> Optional[str]:
"""
Resource discoveryEndpointUri
"""
return pulumi.get(self, "discovery_endpoint_uri")
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> Optional[str]:
"""
Friendly Name
"""
return pulumi.get(self, "friendly_name")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lastHeartBeat")
def last_heart_beat(self) -> Optional[str]:
"""
Registered Server last heart beat
"""
return pulumi.get(self, "last_heart_beat")
@property
@pulumi.getter(name="lastOperationName")
def last_operation_name(self) -> Optional[str]:
"""
Resource Last Operation Name
"""
return pulumi.get(self, "last_operation_name")
@property
@pulumi.getter(name="lastWorkflowId")
def last_workflow_id(self) -> Optional[str]:
"""
Registered Server lastWorkflowId
"""
return pulumi.get(self, "last_workflow_id")
@property
@pulumi.getter(name="managementEndpointUri")
def management_endpoint_uri(self) -> Optional[str]:
"""
Management Endpoint Uri
"""
return pulumi.get(self, "management_endpoint_uri")
@property
@pulumi.getter(name="monitoringConfiguration")
def monitoring_configuration(self) -> Optional[str]:
"""
Monitoring Configuration
"""
return pulumi.get(self, "monitoring_configuration")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Registered Server Provisioning State
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceLocation")
def resource_location(self) -> Optional[str]:
"""
Resource Location
"""
return pulumi.get(self, "resource_location")
@property
@pulumi.getter(name="serverCertificate")
def server_certificate(self) -> Optional[str]:
"""
Registered Server Certificate
"""
return pulumi.get(self, "server_certificate")
@property
@pulumi.getter(name="serverId")
def server_id(self) -> Optional[str]:
"""
Registered Server serverId
"""
return pulumi.get(self, "server_id")
@property
@pulumi.getter(name="serverManagementtErrorCode")
def server_managementt_error_code(self) -> Optional[int]:
"""
Registered Server Management Error Code
"""
return pulumi.get(self, "server_managementt_error_code")
@property
@pulumi.getter(name="serverOSVersion")
def server_os_version(self) -> Optional[str]:
"""
Registered Server OS Version
"""
return pulumi.get(self, "server_os_version")
@property
@pulumi.getter(name="serverRole")
def server_role(self) -> Optional[str]:
"""
Registered Server serverRole
"""
return pulumi.get(self, "server_role")
@property
@pulumi.getter(name="serviceLocation")
def service_location(self) -> Optional[str]:
"""
Service Location
"""
return pulumi.get(self, "service_location")
@property
@pulumi.getter(name="storageSyncServiceUid")
def storage_sync_service_uid(self) -> Optional[str]:
"""
Registered Server storageSyncServiceUid
"""
return pulumi.get(self, "storage_sync_service_uid")
@property
@pulumi.getter
def | (self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetRegisteredServerResult(GetRegisteredServerResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetRegisteredServerResult(
agent_version=self.agent_version,
cluster_id=self.cluster_id,
cluster_name=self.cluster_name,
discovery_endpoint_uri=self.discovery_endpoint_uri,
friendly_name=self.friendly_name,
id=self.id,
last_heart_beat=self.last_heart_beat,
last_operation_name=self.last_operation_name,
last_workflow_id=self.last_workflow_id,
management_endpoint_uri=self.management_endpoint_uri,
monitoring_configuration=self.monitoring_configuration,
name=self.name,
provisioning_state=self.provisioning_state,
resource_location=self.resource_location,
server_certificate=self.server_certificate,
server_id=self.server_id,
server_managementt_error_code=self.server_managementt_error_code,
server_os_version=self.server_os_version,
server_role=self.server_role,
service_location=self.service_location,
storage_sync_service_uid=self.storage_sync_service_uid,
type=self.type)
def get_registered_server(resource_group_name: Optional[str] = None,
server_id: Optional[str] = None,
storage_sync_service_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetRegisteredServerResult:
"""
Registered Server resource.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str server_id: GUID identifying the on-premises server.
:param str storage_sync_service_name: Name of Storage Sync Service resource.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['serverId'] = server_id
__args__['storageSyncServiceName'] = storage_sync_service_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:storagesync/v20180701:getRegisteredServer', __args__, opts=opts, typ=GetRegisteredServerResult).value
return AwaitableGetRegisteredServerResult(
agent_version=__ret__.agent_version,
cluster_id=__ret__.cluster_id,
cluster_name=__ret__.cluster_name,
discovery_endpoint_uri=__ret__.discovery_endpoint_uri,
friendly_name=__ret__.friendly_name,
id=__ret__.id,
last_heart_beat=__ret__.last_heart_beat,
last_operation_name=__ret__.last_operation_name,
last_workflow_id=__ret__.last_workflow_id,
management_endpoint_uri=__ret__.management_endpoint_uri,
monitoring_configuration=__ret__.monitoring_configuration,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
resource_location=__ret__.resource_location,
server_certificate=__ret__.server_certificate,
server_id=__ret__.server_id,
server_managementt_error_code=__ret__.server_managementt_error_code,
server_os_version=__ret__.server_os_version,
server_role=__ret__.server_role,
service_location=__ret__.service_location,
storage_sync_service_uid=__ret__.storage_sync_service_uid,
type=__ret__.type)
| type |
manifest.go | package main
var manifest = struct {
Id string
Version string
}{ | Version: "0.0.1",
} | Id: "com.mattermost.webex", |
ports.go | package system
import (
"net"
"time"
)
// IsPortOpen returns (true, nil) if a given TCP port is open on a given host.
// (false, err) - otherwise or if timeout is reached. 'hostPort' is in format <host>:<port>
func IsPortOpen(hostPort string, timeout time.Duration) (bool, error) {
conn, err := net.DialTimeout("tcp", hostPort, timeout)
if err != nil |
defer conn.Close()
return true, nil
}
| {
return false, err
} |
models.py | from django.db import models, connection
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
class DataOceanManager(models.Manager):
# exclude soft-deleted objects from queryset
def get_queryset(self):
return super().get_queryset().exclude(deleted_at__isnull=False)
class DataOceanModel(models.Model):
name = "No name field in model!"
created_at = models.DateTimeField(auto_now_add=True,
help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.')
updated_at = models.DateTimeField(
auto_now=True, null=True, blank=True,
help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'
)
deleted_at = models.DateTimeField(null=True, blank=True, default=None, editable=False)
objects = DataOceanManager()
include_deleted_objects = models.Manager()
@property
def is_deleted(self):
return bool(self.deleted_at)
def soft_delete(self):
self.deleted_at = timezone.now()
self.save(update_fields=['deleted_at', 'updated_at'])
@classmethod
def truncate(cls):
with connection.cursor() as c:
c.execute('TRUNCATE TABLE "{0}"'.format(cls._meta.db_table))
@classmethod
def truncate_cascade(cls):
with connection.cursor() as c:
c.execute('TRUNCATE TABLE "{0}" CASCADE'.format(cls._meta.db_table))
def __str__(self):
return self.name
class Meta:
abstract = True
ordering = ['id']
class Status(DataOceanModel):
name = models.CharField('name', max_length=100, unique=True)
class Meta:
verbose_name = _('status')
class Authority(DataOceanModel):
name = models.CharField('name', max_length=500, unique=True)
code = models.CharField('number', max_length=10, unique=True, null=True)
class Meta:
verbose_name = _('registration authority')
class TaxpayerType(DataOceanModel):
name = models.CharField('name', max_length=200, unique=True)
class Meta:
verbose_name = _('taxpayer type')
class Register(DataOceanModel):
|
class Report(DataOceanModel):
registry_name = models.CharField(max_length=20, db_index=True)
download_start = models.DateTimeField(auto_now_add=True)
download_finish = models.DateTimeField(null=True, blank=True)
download_status = models.BooleanField(blank=True, default=False)
download_message = models.CharField(max_length=255, null=True, blank=True)
download_file_name = models.CharField(max_length=255, null=True, blank=True)
download_file_length = models.PositiveIntegerField(blank=True, default=0)
unzip_file_name = models.CharField(max_length=255, null=True, blank=True)
unzip_file_arch_length = models.PositiveIntegerField(blank=True, default=0)
unzip_file_real_length = models.PositiveIntegerField(blank=True, default=0)
unzip_status = models.BooleanField(blank=True, default=False)
unzip_message = models.CharField(max_length=255, null=True, blank=True)
update_start = models.DateTimeField(null=True, blank=True)
update_finish = models.DateTimeField(null=True, blank=True)
update_status = models.BooleanField(blank=True, default=False)
update_message = models.CharField(max_length=300, null=True, blank=True)
records_added = models.IntegerField(blank=True, default=0)
records_changed = models.IntegerField(blank=True, default=0)
records_deleted = models.IntegerField(blank=True, default=0)
invalid_data = models.IntegerField(blank=True, default=0)
@staticmethod
def collect_last_day_reports():
day_ago = timezone.now() - timezone.timedelta(hours=24)
return list(Report.objects.filter(created_at__gt=day_ago))
def __str__(self):
return self.registry_name
class Meta:
ordering = ['id']
verbose_name = _('data update report')
verbose_name_plural = _('data update reports')
| RELEVANT = 'relevant'
OUTDATED = 'outdated'
NOT_SUPPORTED = 'not supported'
STATUSES = [
(RELEVANT, _('Relevant')),
(OUTDATED, _('Outdated')),
(NOT_SUPPORTED, _('Not supported')),
]
name = models.CharField(_('name'), max_length=500, unique=True)
name_eng = models.CharField('name eng', max_length=500, unique=True, null=True)
source_name = models.CharField(_('source'), max_length=300)
source_register_id = models.CharField(_('source ID'), max_length=36, null=True)
source_url_address = models.URLField(_('source url'), max_length=500)
source_api_address = models.URLField(_('source API'), max_length=500, null=True)
api_list = models.CharField(_('API list'), max_length=30, unique=True, null=True, blank=True)
api_detail = models.CharField(_("API detail"), max_length=30, unique=True, null=True, blank=True)
total_records = models.PositiveIntegerField(_('total records'), default=1, blank=True)
status = models.CharField(_('status'), max_length=15, choices=STATUSES, default=RELEVANT,
blank=True)
class Meta:
ordering = ['id']
verbose_name = _('dataset')
verbose_name_plural = _('datasets') |
wrap_test.go | // Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package errors_test
import (
"os"
"testing"
"golang.org/x/exp/errors"
"golang.org/x/exp/errors/fmt"
)
func TestIs(t *testing.T) {
err1 := errors.New("1")
erra := fmt.Errorf("wrap 2: %w", err1)
errb := fmt.Errorf("wrap 3: %w", erra)
erro := errors.Opaque(err1)
errco := fmt.Errorf("opaque: %w", erro)
err3 := errors.New("3")
poser := &poser{"either 1 or 3", func(err error) bool {
return err == err1 || err == err3
}}
testCases := []struct {
err error
target error
match bool
}{
{nil, nil, true},
{err1, nil, false},
{err1, err1, true},
{erra, err1, true},
{errb, err1, true},
{errco, erro, true},
{errco, err1, false},
{erro, erro, true},
{err1, err3, false},
{erra, err3, false},
{errb, err3, false},
{poser, err1, true},
{poser, err3, true},
{poser, erra, false},
{poser, errb, false},
{poser, erro, false},
{poser, errco, false},
}
for _, tc := range testCases {
t.Run("", func(t *testing.T) {
if got := errors.Is(tc.err, tc.target); got != tc.match {
t.Errorf("Is(%v, %v) = %v, want %v", tc.err, tc.target, got, tc.match)
}
})
}
}
type poser struct {
msg string
f func(error) bool
}
func (p *poser) Error() string { return p.msg }
func (p *poser) Is(err error) bool { return p.f(err) }
func (p *poser) As(err interface{}) bool {
switch x := err.(type) {
case **poser:
*x = p
case *errorT:
*x = errorT{}
case **os.PathError:
*x = &os.PathError{}
default:
return false
}
return true
}
func TestAs(t *testing.T) {
var errT errorT
var errP *os.PathError
var p *poser
_, errF := os.Open("non-existing")
testCases := []struct {
err error
target interface{}
match bool
}{{
fmt.Errorf("pittied the fool: %w", errorT{}),
&errT,
true,
}, {
errF,
&errP,
true,
}, {
errors.Opaque(errT),
&errT,
false,
}, {
errorT{},
&errP,
false,
}, {
wrapped{nil},
&errT,
false,
}, {
&poser{"error", nil},
&errT,
true,
}, {
&poser{"path", nil},
&errP,
true,
}, {
&poser{"oh no", nil},
&p,
true,
}, {
&poser{"oo", nil},
&errF,
false,
}}
for _, tc := range testCases {
name := fmt.Sprintf("As(Errorf(..., %v), %v)", tc.err, tc.target)
t.Run(name, func(t *testing.T) {
match := errors.As(tc.err, tc.target)
if match != tc.match {
t.Fatalf("match: got %v; want %v", match, tc.match)
}
if !match {
return
}
if tc.target == nil {
t.Fatalf("non-nil result after match")
}
})
}
}
func | (t *testing.T) {
err1 := errors.New("1")
erra := fmt.Errorf("wrap 2: %w", err1)
erro := errors.Opaque(err1)
testCases := []struct {
err error
want error
}{
{nil, nil},
{wrapped{nil}, nil},
{err1, nil},
{erra, err1},
{fmt.Errorf("wrap 3: %w", erra), erra},
{erro, nil},
{fmt.Errorf("opaque: %w", erro), erro},
}
for _, tc := range testCases {
if got := errors.Unwrap(tc.err); got != tc.want {
t.Errorf("Unwrap(%v) = %v, want %v", tc.err, got, tc.want)
}
}
}
func TestOpaque(t *testing.T) {
got := fmt.Errorf("foo: %+v", errors.Opaque(errorT{}))
want := "foo: errorT"
if got.Error() != want {
t.Errorf("error without Format: got %v; want %v", got, want)
}
got = fmt.Errorf("foo: %+v", errors.Opaque(errorD{}))
want = "foo: errorD:\n detail"
if got.Error() != want {
t.Errorf("error with Format: got %v; want %v", got, want)
}
}
type errorT struct{}
func (errorT) Error() string { return "errorT" }
type errorD struct{}
func (errorD) Error() string { return "errorD" }
func (errorD) FormatError(p errors.Printer) error {
p.Print("errorD")
p.Detail()
p.Print("detail")
return nil
}
type wrapped struct{ error }
func (wrapped) Error() string { return "wrapped" }
func (wrapped) Unwrap() error { return nil }
| TestUnwrap |
stomp-config.d.ts | import { StompHeaders } from './stomp-headers';
import { closeEventCallbackType, debugFnType, frameCallbackType, messageCallbackType, wsErrorCallbackType } from './types';
import { Versions } from './versions';
/**
* Configuration options for STOMP Client, each key corresponds to
* field by the same name in {@link Client}. This can be passed to
* the constructor of {@link Client} or to [Client#configure]{@link Client#configure}.
*
* Part of `@stomp/stompjs`.
*/
export declare class StompConfig {
/**
* See [Client#brokerURL]{@link Client#brokerURL}.
*/
brokerURL?: string;
/**
* See See [Client#stompVersions]{@link Client#stompVersions}.
*/
stompVersions?: Versions;
/**
* See [Client#webSocketFactory]{@link Client#webSocketFactory}.
*/
webSocketFactory?: () => any;
/**
* See [Client#reconnectDelay]{@link Client#reconnectDelay}.
*/
reconnectDelay?: number;
/**
* See [Client#heartbeatIncoming]{@link Client#heartbeatIncoming}.
*/
heartbeatIncoming?: number;
/**
* See [Client#heartbeatOutgoing]{@link Client#heartbeatOutgoing}.
*/
heartbeatOutgoing?: number;
/**
* See [Client#useLegacyHeartbeatLogic]{@link Client#useLegacyHeartbeatLogic}.
*/
useLegacyHeartbeatLogic?: boolean;
/**
* See [Client#splitLargeFrames]{@link Client#splitLargeFrames}.
*/
splitLargeFrames?: boolean;
/**
* See [Client#forceBinaryWSFrames]{@link Client#forceBinaryWSFrames}.
*/
forceBinaryWSFrames?: boolean;
/**
* See [Client#appendMissingNULLonIncoming]{@link Client#appendMissingNULLonIncoming}.
*/
appendMissingNULLonIncoming?: boolean;
/**
* See [Client#maxWebSocketChunkSize]{@link Client#maxWebSocketChunkSize}.
*/
maxWebSocketChunkSize?: number;
/**
* See [Client#connectHeaders]{@link Client#connectHeaders}.
*/
connectHeaders?: StompHeaders;
/**
* See [Client#disconnectHeaders]{@link Client#disconnectHeaders}.
*/
disconnectHeaders?: StompHeaders;
/**
* See [Client#onUnhandledMessage]{@link Client#onUnhandledMessage}.
*/
onUnhandledMessage?: messageCallbackType;
/**
* See [Client#onUnhandledReceipt]{@link Client#onUnhandledReceipt}.
*/
onUnhandledReceipt?: frameCallbackType;
/**
* See [Client#onUnhandledFrame]{@link Client#onUnhandledFrame}.
*/
onUnhandledFrame?: frameCallbackType;
/**
* See [Client#beforeConnect]{@link Client#beforeConnect}.
*/
beforeConnect?: () => void | Promise<void>;
/**
* See [Client#onConnect]{@link Client#onConnect}.
*/
onConnect?: frameCallbackType;
/**
* See [Client#onDisconnect]{@link Client#onDisconnect}.
*/
onDisconnect?: frameCallbackType;
/** | /**
* See [Client#onWebSocketClose]{@link Client#onWebSocketClose}.
*/
onWebSocketClose?: closeEventCallbackType;
/**
* See [Client#onWebSocketError]{@link Client#onWebSocketError}.
*/
onWebSocketError?: wsErrorCallbackType;
/**
* See [Client#logRawCommunication]{@link Client#logRawCommunication}.
*/
logRawCommunication?: boolean;
/**
* See [Client#debug]{@link Client#debug}.
*/
debug?: debugFnType;
} | * See [Client#onStompError]{@link Client#onStompError}.
*/
onStompError?: frameCallbackType; |
config.py | import os
class Config(object):
|
class ProductionConfig(Config):
DEBUG = False
DEVELOPMENT = False
| DEBUG = True
DEVELOPMENT = True
PG_USER = os.environ.get("PG_USER", "")
PG_PASS = os.environ.get("PG_PASS", "")
PG_HOST = os.environ.get("PG_HOST", "")
PG_PORT = os.environ.get("PG_PORT", "")
PG_DB = os.environ.get("PG_DB", "")
SENDGRID_KEY = os.environ.get("SENDGRID_KEY", "") |
context_test.go | // Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package context
import (
"fmt"
"math/rand"
"runtime"
"strings"
"sync"
"time"
)
type testingT interface {
Error(args ...interface{})
Errorf(format string, args ...interface{})
Fail()
FailNow()
Failed() bool
Fatal(args ...interface{})
Fatalf(format string, args ...interface{})
Log(args ...interface{})
Logf(format string, args ...interface{})
Name() string
Skip(args ...interface{})
SkipNow()
Skipf(format string, args ...interface{})
Skipped() bool
}
// otherContext is a Context that's not one of the types defined in context.go.
// This lets us test code paths that differ based on the underlying type of the
// Context.
type otherContext struct {
Context
}
func XTestBackground(t testingT) {
c := Background()
if c == nil {
t.Fatalf("Background returned nil")
}
select {
case x := <-c.Done():
t.Errorf("<-c.Done() == %v want nothing (it should block)", x)
default:
}
if got, want := fmt.Sprint(c), "context.Background"; got != want {
t.Errorf("Background().String() = %q want %q", got, want)
}
}
func XTestTODO(t testingT) {
c := TODO()
if c == nil {
t.Fatalf("TODO returned nil")
}
select {
case x := <-c.Done():
t.Errorf("<-c.Done() == %v want nothing (it should block)", x)
default:
}
if got, want := fmt.Sprint(c), "context.TODO"; got != want {
t.Errorf("TODO().String() = %q want %q", got, want)
}
}
func XTestWithCancel(t testingT) {
c1, cancel := WithCancel(Background())
if got, want := fmt.Sprint(c1), "context.Background.WithCancel"; got != want {
t.Errorf("c1.String() = %q want %q", got, want)
}
o := otherContext{c1}
c2, _ := WithCancel(o)
contexts := []Context{c1, o, c2}
for i, c := range contexts {
if d := c.Done(); d == nil {
t.Errorf("c[%d].Done() == %v want non-nil", i, d)
}
if e := c.Err(); e != nil {
t.Errorf("c[%d].Err() == %v want nil", i, e)
}
select {
case x := <-c.Done():
t.Errorf("<-c.Done() == %v want nothing (it should block)", x)
default:
}
}
cancel()
time.Sleep(100 * time.Millisecond) // let cancelation propagate
for i, c := range contexts {
select {
case <-c.Done():
default:
t.Errorf("<-c[%d].Done() blocked, but shouldn't have", i)
}
if e := c.Err(); e != Canceled {
t.Errorf("c[%d].Err() == %v want %v", i, e, Canceled)
}
}
}
func contains(m map[canceler]struct{}, key canceler) bool {
_, ret := m[key]
return ret
}
func XTestParentFinishesChild(t testingT) {
// Context tree:
// parent -> cancelChild
// parent -> valueChild -> timerChild
parent, cancel := WithCancel(Background())
cancelChild, stop := WithCancel(parent)
defer stop()
valueChild := WithValue(parent, "key", "value")
timerChild, stop := WithTimeout(valueChild, 10000*time.Hour)
defer stop()
select {
case x := <-parent.Done():
t.Errorf("<-parent.Done() == %v want nothing (it should block)", x)
case x := <-cancelChild.Done():
t.Errorf("<-cancelChild.Done() == %v want nothing (it should block)", x)
case x := <-timerChild.Done():
t.Errorf("<-timerChild.Done() == %v want nothing (it should block)", x)
case x := <-valueChild.Done():
t.Errorf("<-valueChild.Done() == %v want nothing (it should block)", x)
default:
}
// The parent's children should contain the two cancelable children.
pc := parent.(*cancelCtx)
cc := cancelChild.(*cancelCtx)
tc := timerChild.(*timerCtx)
pc.mu.Lock()
if len(pc.children) != 2 || !contains(pc.children, cc) || !contains(pc.children, tc) {
t.Errorf("bad linkage: pc.children = %v, want %v and %v",
pc.children, cc, tc)
}
pc.mu.Unlock()
if p, ok := parentCancelCtx(cc.Context); !ok || p != pc {
t.Errorf("bad linkage: parentCancelCtx(cancelChild.Context) = %v, %v want %v, true", p, ok, pc)
}
if p, ok := parentCancelCtx(tc.Context); !ok || p != pc {
t.Errorf("bad linkage: parentCancelCtx(timerChild.Context) = %v, %v want %v, true", p, ok, pc)
}
cancel()
pc.mu.Lock()
if len(pc.children) != 0 {
t.Errorf("pc.cancel didn't clear pc.children = %v", pc.children)
}
pc.mu.Unlock()
// parent and children should all be finished.
check := func(ctx Context, name string) {
select {
case <-ctx.Done():
default:
t.Errorf("<-%s.Done() blocked, but shouldn't have", name)
}
if e := ctx.Err(); e != Canceled {
t.Errorf("%s.Err() == %v want %v", name, e, Canceled)
}
}
check(parent, "parent")
check(cancelChild, "cancelChild")
check(valueChild, "valueChild")
check(timerChild, "timerChild")
// WithCancel should return a canceled context on a canceled parent.
precanceledChild := WithValue(parent, "key", "value")
select {
case <-precanceledChild.Done():
default:
t.Errorf("<-precanceledChild.Done() blocked, but shouldn't have")
}
if e := precanceledChild.Err(); e != Canceled {
t.Errorf("precanceledChild.Err() == %v want %v", e, Canceled)
}
}
func XTestChildFinishesFirst(t testingT) {
cancelable, stop := WithCancel(Background())
defer stop()
for _, parent := range []Context{Background(), cancelable} {
child, cancel := WithCancel(parent)
select {
case x := <-parent.Done():
t.Errorf("<-parent.Done() == %v want nothing (it should block)", x)
case x := <-child.Done():
t.Errorf("<-child.Done() == %v want nothing (it should block)", x)
default:
}
cc := child.(*cancelCtx)
pc, pcok := parent.(*cancelCtx) // pcok == false when parent == Background()
if p, ok := parentCancelCtx(cc.Context); ok != pcok || (ok && pc != p) {
t.Errorf("bad linkage: parentCancelCtx(cc.Context) = %v, %v want %v, %v", p, ok, pc, pcok)
}
if pcok {
pc.mu.Lock()
if len(pc.children) != 1 || !contains(pc.children, cc) {
t.Errorf("bad linkage: pc.children = %v, cc = %v", pc.children, cc)
}
pc.mu.Unlock()
}
cancel()
if pcok {
pc.mu.Lock()
if len(pc.children) != 0 {
t.Errorf("child's cancel didn't remove self from pc.children = %v", pc.children)
}
pc.mu.Unlock()
}
// child should be finished.
select {
case <-child.Done():
default:
t.Errorf("<-child.Done() blocked, but shouldn't have")
}
if e := child.Err(); e != Canceled {
t.Errorf("child.Err() == %v want %v", e, Canceled)
}
// parent should not be finished.
select {
case x := <-parent.Done():
t.Errorf("<-parent.Done() == %v want nothing (it should block)", x)
default:
}
if e := parent.Err(); e != nil {
t.Errorf("parent.Err() == %v want nil", e)
}
}
}
func testDeadline(c Context, name string, failAfter time.Duration, t testingT) {
select {
case <-time.After(failAfter):
t.Fatalf("%s: context should have timed out", name)
case <-c.Done():
}
if e := c.Err(); e != DeadlineExceeded {
t.Errorf("%s: c.Err() == %v; want %v", name, e, DeadlineExceeded)
}
}
func XTestDeadline(t testingT) {
c, _ := WithDeadline(Background(), time.Now().Add(50*time.Millisecond))
if got, prefix := fmt.Sprint(c), "context.Background.WithDeadline("; !strings.HasPrefix(got, prefix) {
t.Errorf("c.String() = %q want prefix %q", got, prefix)
}
testDeadline(c, "WithDeadline", time.Second, t)
c, _ = WithDeadline(Background(), time.Now().Add(50*time.Millisecond))
o := otherContext{c}
testDeadline(o, "WithDeadline+otherContext", time.Second, t)
c, _ = WithDeadline(Background(), time.Now().Add(50*time.Millisecond))
o = otherContext{c}
c, _ = WithDeadline(o, time.Now().Add(4*time.Second))
testDeadline(c, "WithDeadline+otherContext+WithDeadline", 2*time.Second, t)
c, _ = WithDeadline(Background(), time.Now().Add(-time.Millisecond))
testDeadline(c, "WithDeadline+inthepast", time.Second, t)
c, _ = WithDeadline(Background(), time.Now())
testDeadline(c, "WithDeadline+now", time.Second, t)
}
func XTestTimeout(t testingT) {
c, _ := WithTimeout(Background(), 50*time.Millisecond)
if got, prefix := fmt.Sprint(c), "context.Background.WithDeadline("; !strings.HasPrefix(got, prefix) {
t.Errorf("c.String() = %q want prefix %q", got, prefix)
}
testDeadline(c, "WithTimeout", time.Second, t)
c, _ = WithTimeout(Background(), 50*time.Millisecond)
o := otherContext{c}
testDeadline(o, "WithTimeout+otherContext", time.Second, t)
c, _ = WithTimeout(Background(), 50*time.Millisecond)
o = otherContext{c}
c, _ = WithTimeout(o, 3*time.Second)
testDeadline(c, "WithTimeout+otherContext+WithTimeout", 2*time.Second, t)
}
func XTestCanceledTimeout(t testingT) {
c, _ := WithTimeout(Background(), time.Second)
o := otherContext{c}
c, cancel := WithTimeout(o, 2*time.Second)
cancel()
time.Sleep(100 * time.Millisecond) // let cancelation propagate
select {
case <-c.Done():
default:
t.Errorf("<-c.Done() blocked, but shouldn't have")
}
if e := c.Err(); e != Canceled {
t.Errorf("c.Err() == %v want %v", e, Canceled)
}
}
type key1 int
type key2 int
var k1 = key1(1)
var k2 = key2(1) // same int as k1, different type
var k3 = key2(3) // same type as k2, different int
func XTestValues(t testingT) {
check := func(c Context, nm, v1, v2, v3 string) {
if v, ok := c.Value(k1).(string); ok == (len(v1) == 0) || v != v1 {
t.Errorf(`%s.Value(k1).(string) = %q, %t want %q, %t`, nm, v, ok, v1, len(v1) != 0)
}
if v, ok := c.Value(k2).(string); ok == (len(v2) == 0) || v != v2 {
t.Errorf(`%s.Value(k2).(string) = %q, %t want %q, %t`, nm, v, ok, v2, len(v2) != 0)
}
if v, ok := c.Value(k3).(string); ok == (len(v3) == 0) || v != v3 {
t.Errorf(`%s.Value(k3).(string) = %q, %t want %q, %t`, nm, v, ok, v3, len(v3) != 0)
}
}
c0 := Background()
check(c0, "c0", "", "", "")
c1 := WithValue(Background(), k1, "c1k1")
check(c1, "c1", "c1k1", "", "")
if got, want := fmt.Sprint(c1), `context.Background.WithValue(1, "c1k1")`; got != want {
t.Errorf("c.String() = %q want %q", got, want)
}
c2 := WithValue(c1, k2, "c2k2")
check(c2, "c2", "c1k1", "c2k2", "")
c3 := WithValue(c2, k3, "c3k3")
check(c3, "c2", "c1k1", "c2k2", "c3k3")
c4 := WithValue(c3, k1, nil)
check(c4, "c4", "", "c2k2", "c3k3")
o0 := otherContext{Background()}
check(o0, "o0", "", "", "")
o1 := otherContext{WithValue(Background(), k1, "c1k1")}
check(o1, "o1", "c1k1", "", "")
o2 := WithValue(o1, k2, "o2k2")
check(o2, "o2", "c1k1", "o2k2", "")
o3 := otherContext{c4}
check(o3, "o3", "", "c2k2", "c3k3")
o4 := WithValue(o3, k3, nil)
check(o4, "o4", "", "c2k2", "")
}
func XTestAllocs(t testingT, testingShort func() bool, testingAllocsPerRun func(int, func()) float64) {
bg := Background()
for _, test := range []struct {
desc string
f func()
limit float64
gccgoLimit float64
}{
{
desc: "Background()",
f: func() { Background() },
limit: 0,
gccgoLimit: 0,
},
{
desc: fmt.Sprintf("WithValue(bg, %v, nil)", k1),
f: func() {
c := WithValue(bg, k1, nil)
c.Value(k1)
},
limit: 3,
gccgoLimit: 3,
},
{
desc: "WithTimeout(bg, 15*time.Millisecond)",
f: func() {
c, _ := WithTimeout(bg, 15*time.Millisecond)
<-c.Done()
},
limit: 8,
gccgoLimit: 15,
},
{
desc: "WithCancel(bg)",
f: func() {
c, cancel := WithCancel(bg)
cancel()
<-c.Done()
},
limit: 5,
gccgoLimit: 8,
},
{
desc: "WithTimeout(bg, 5*time.Millisecond)",
f: func() {
c, cancel := WithTimeout(bg, 5*time.Millisecond)
cancel()
<-c.Done()
},
limit: 8,
gccgoLimit: 25,
},
} {
limit := test.limit
if runtime.Compiler == "gccgo" {
// gccgo does not yet do escape analysis.
// TODO(iant): Remove this when gccgo does do escape analysis.
limit = test.gccgoLimit
}
numRuns := 100
if testingShort() { | t.Errorf("%s allocs = %f want %d", test.desc, n, int(limit))
}
}
}
func XTestSimultaneousCancels(t testingT) {
root, cancel := WithCancel(Background())
m := map[Context]CancelFunc{root: cancel}
q := []Context{root}
// Create a tree of contexts.
for len(q) != 0 && len(m) < 100 {
parent := q[0]
q = q[1:]
for i := 0; i < 4; i++ {
ctx, cancel := WithCancel(parent)
m[ctx] = cancel
q = append(q, ctx)
}
}
// Start all the cancels in a random order.
var wg sync.WaitGroup
wg.Add(len(m))
for _, cancel := range m {
go func(cancel CancelFunc) {
cancel()
wg.Done()
}(cancel)
}
// Wait on all the contexts in a random order.
for ctx := range m {
select {
case <-ctx.Done():
case <-time.After(1 * time.Second):
buf := make([]byte, 10<<10)
n := runtime.Stack(buf, true)
t.Fatalf("timed out waiting for <-ctx.Done(); stacks:\n%s", buf[:n])
}
}
// Wait for all the cancel functions to return.
done := make(chan struct{})
go func() {
wg.Wait()
close(done)
}()
select {
case <-done:
case <-time.After(1 * time.Second):
buf := make([]byte, 10<<10)
n := runtime.Stack(buf, true)
t.Fatalf("timed out waiting for cancel functions; stacks:\n%s", buf[:n])
}
}
func XTestInterlockedCancels(t testingT) {
parent, cancelParent := WithCancel(Background())
child, cancelChild := WithCancel(parent)
go func() {
parent.Done()
cancelChild()
}()
cancelParent()
select {
case <-child.Done():
case <-time.After(1 * time.Second):
buf := make([]byte, 10<<10)
n := runtime.Stack(buf, true)
t.Fatalf("timed out waiting for child.Done(); stacks:\n%s", buf[:n])
}
}
func XTestLayersCancel(t testingT) {
testLayers(t, time.Now().UnixNano(), false)
}
func XTestLayersTimeout(t testingT) {
testLayers(t, time.Now().UnixNano(), true)
}
func testLayers(t testingT, seed int64, testTimeout bool) {
rand.Seed(seed)
errorf := func(format string, a ...interface{}) {
t.Errorf(fmt.Sprintf("seed=%d: %s", seed, format), a...)
}
const (
timeout = 200 * time.Millisecond
minLayers = 30
)
type value int
var (
vals []*value
cancels []CancelFunc
numTimers int
ctx = Background()
)
for i := 0; i < minLayers || numTimers == 0 || len(cancels) == 0 || len(vals) == 0; i++ {
switch rand.Intn(3) {
case 0:
v := new(value)
ctx = WithValue(ctx, v, v)
vals = append(vals, v)
case 1:
var cancel CancelFunc
ctx, cancel = WithCancel(ctx)
cancels = append(cancels, cancel)
case 2:
var cancel CancelFunc
ctx, cancel = WithTimeout(ctx, timeout)
cancels = append(cancels, cancel)
numTimers++
}
}
checkValues := func(when string) {
for _, key := range vals {
if val := ctx.Value(key).(*value); key != val {
errorf("%s: ctx.Value(%p) = %p want %p", when, key, val, key)
}
}
}
select {
case <-ctx.Done():
errorf("ctx should not be canceled yet")
default:
}
if s, prefix := fmt.Sprint(ctx), "context.Background."; !strings.HasPrefix(s, prefix) {
t.Errorf("ctx.String() = %q want prefix %q", s, prefix)
}
t.Log(ctx)
checkValues("before cancel")
if testTimeout {
select {
case <-ctx.Done():
case <-time.After(timeout + time.Second):
errorf("ctx should have timed out")
}
checkValues("after timeout")
} else {
cancel := cancels[rand.Intn(len(cancels))]
cancel()
select {
case <-ctx.Done():
default:
errorf("ctx should be canceled")
}
checkValues("after cancel")
}
}
func XTestCancelRemoves(t testingT) {
checkChildren := func(when string, ctx Context, want int) {
if got := len(ctx.(*cancelCtx).children); got != want {
t.Errorf("%s: context has %d children, want %d", when, got, want)
}
}
ctx, _ := WithCancel(Background())
checkChildren("after creation", ctx, 0)
_, cancel := WithCancel(ctx)
checkChildren("with WithCancel child ", ctx, 1)
cancel()
checkChildren("after canceling WithCancel child", ctx, 0)
ctx, _ = WithCancel(Background())
checkChildren("after creation", ctx, 0)
_, cancel = WithTimeout(ctx, 60*time.Minute)
checkChildren("with WithTimeout child ", ctx, 1)
cancel()
checkChildren("after canceling WithTimeout child", ctx, 0)
}
func XTestWithCancelCanceledParent(t testingT) {
parent, pcancel := WithCancel(Background())
pcancel()
c, _ := WithCancel(parent)
select {
case <-c.Done():
case <-time.After(5 * time.Second):
t.Fatal("timeout waiting for Done")
}
if got, want := c.Err(), Canceled; got != want {
t.Errorf("child not cancelled; got = %v, want = %v", got, want)
}
}
func XTestWithValueChecksKey(t testingT) {
panicVal := recoveredValue(func() { WithValue(Background(), []byte("foo"), "bar") })
if panicVal == nil {
t.Error("expected panic")
}
panicVal = recoveredValue(func() { WithValue(Background(), nil, "bar") })
if got, want := fmt.Sprint(panicVal), "nil key"; got != want {
t.Errorf("panic = %q; want %q", got, want)
}
}
func recoveredValue(fn func()) (v interface{}) {
defer func() { v = recover() }()
fn()
return
}
func XTestDeadlineExceededSupportsTimeout(t testingT) {
i, ok := DeadlineExceeded.(interface {
Timeout() bool
})
if !ok {
t.Fatal("DeadlineExceeded does not support Timeout interface")
}
if !i.Timeout() {
t.Fatal("wrong value for timeout")
}
} | numRuns = 10
}
if n := testingAllocsPerRun(numRuns, test.f); n > limit { |
auth_server.go | package server
import (
"context"
"strings"
"github.com/golang/protobuf/ptypes/empty"
api "github.com/kubeflow/pipelines/backend/api/go_client"
"github.com/kubeflow/pipelines/backend/src/apiserver/common"
"github.com/kubeflow/pipelines/backend/src/apiserver/resource"
"github.com/kubeflow/pipelines/backend/src/common/util"
authorizationv1 "k8s.io/api/authorization/v1"
)
var rbacResourceTypeToGroup = map[string]string{
common.RbacResourceTypePipelines: common.RbacPipelinesGroup,
common.RbacResourceTypeExperiments: common.RbacPipelinesGroup,
common.RbacResourceTypeRuns: common.RbacPipelinesGroup,
common.RbacResourceTypeJobs: common.RbacPipelinesGroup,
common.RbacResourceTypeViewers: common.RbacKubeflowGroup,
common.RbacResourceTypeVisualizations: common.RbacPipelinesGroup,
}
type AuthServer struct {
resourceManager *resource.ResourceManager
}
func (s *AuthServer) Authorize(ctx context.Context, request *api.AuthorizeRequest) (
*empty.Empty, error) {
err := ValidateAuthorizeRequest(request)
if err != nil {
return nil, util.Wrap(err, "Authorize request is not valid")
}
namespace := strings.ToLower(request.GetNamespace())
verb := strings.ToLower(request.GetVerb().String())
resource := strings.ToLower(request.GetResources().String())
resourceAttributes := &authorizationv1.ResourceAttributes{
Namespace: namespace,
Verb: verb,
Group: rbacResourceTypeToGroup[resource],
Version: common.RbacPipelinesVersion,
Resource: resource,
Subresource: "",
Name: "",
}
err = isAuthorized(s.resourceManager, ctx, resourceAttributes)
if err != nil {
return nil, util.Wrap(err, "Failed to authorize the request")
}
return &empty.Empty{}, nil
}
func ValidateAuthorizeRequest(request *api.AuthorizeRequest) error |
func NewAuthServer(resourceManager *resource.ResourceManager) *AuthServer {
return &AuthServer{resourceManager: resourceManager}
}
| {
if request == nil {
return util.NewInvalidInputError("request object is empty.")
}
if len(request.Namespace) == 0 {
return util.NewInvalidInputError("Namespace is empty. Please specify a valid namespace.")
}
if request.Resources == api.AuthorizeRequest_UNASSIGNED_RESOURCES {
return util.NewInvalidInputError("Resources not specified. Please specify a valid resources.")
}
if request.Verb == api.AuthorizeRequest_UNASSIGNED_VERB {
return util.NewInvalidInputError("Verb not specified. Please specify a valid verb.")
}
return nil
} |
version.py | from __future__ import unicode_literals
import datetime
import os
import subprocess
from django.utils.lru_cache import lru_cache
def get_version(version=None):
"Returns a PEP 386-compliant version number from VERSION."
version = get_complete_version(version)
# Now build the two parts of the version number:
# major = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
major = get_major_version(version)
sub = ''
if version[3] == 'alpha' and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = '.dev%s' % git_changeset
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(major + sub)
def get_major_version(version=None):
"Returns major version from VERSION."
version = get_complete_version(version)
parts = 2 if version[2] == 0 else 3
major = '.'.join(str(x) for x in version[:parts])
return major
def get_complete_version(version=None):
"""Returns a tuple of the django version. If version argument is non-empty,
then checks for correctness of the tuple provided.
"""
if version is None:
from django import VERSION as version
else:
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
return version
def get_docs_version(version=None):
version = get_complete_version(version)
if version[3] != 'final':
return 'dev'
else:
return '%d.%d' % version[:2]
@lru_cache()
def get_git_changeset():
"""Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
"""
repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
git_log = subprocess.Popen('git log --pretty=format:%ct --quiet -1 HEAD',
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True, cwd=repo_dir, universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None | return timestamp.strftime('%Y%m%d%H%M%S') |
|
manta.go | /*
The gomanta/manta package interacts with the Manta API (http://apidocs.joyent.com/manta/api.html).
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
Copyright (c) 2016 Joyent Inc.
Written by Daniele Stroppa <[email protected]>
*/
package manta
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"net/http"
"path"
"time"
"github.com/joyent/gocommon/client"
"github.com/joyent/gocommon/errors"
jh "github.com/joyent/gocommon/http"
)
const (
// The default version of the Manta API to use
DefaultAPIVersion = "7.1"
// Manta API URL parts
apiStorage = "stor"
apiJobs = "jobs"
apiJobsLive = "live"
apiJobsIn = "in"
apiJobsOut = "out"
apiJobsFail = "fail"
apiJobsErr = "err"
apiJobsEnd = "end"
apiJobsCancel = "cancel"
apiJobsStatus = "status"
)
// Client provides a means to access Joyent Manta
type Client struct {
client client.Client
}
// New creates a new Client.
func New(client client.Client) *Client {
return &Client{client}
}
// request represents an API request
type request struct {
method string
url string
reqValue interface{}
reqHeader http.Header
reqReader io.Reader
reqLength int
resp interface{}
respHeader *http.Header
expectedStatus int
}
// Helper method to send an API request
func (c *Client) sendRequest(req request) (*jh.ResponseData, error) {
request := jh.RequestData{
ReqValue: req.reqValue,
ReqHeaders: req.reqHeader,
ReqReader: req.reqReader,
ReqLength: req.reqLength,
}
if req.expectedStatus == 0 {
req.expectedStatus = http.StatusOK
}
respData := jh.ResponseData{
RespValue: req.resp,
RespHeaders: req.respHeader,
ExpectedStatus: []int{req.expectedStatus},
}
err := c.client.SendRequest(req.method, req.url, "", &request, &respData)
return &respData, err
}
// Helper method to create the API URL
func | (parts ...string) string {
return path.Join(parts...)
}
// ListDirectoryOpts represent the option that can be specified
// when listing a directory.
type ListDirectoryOpts struct {
Limit int `json:"limit"` // Limit to the number of records returned (default and max is 1000)
Marker string `json:"marker"` // Key name at which to start the next listing
}
// Entry represents an object stored in Manta, either a file or a directory
type Entry struct {
Name string `json:"name"` // Entry name
Etag string `json:"etag,omitempty"` // If type is 'object', object UUID
Size int `json:"size,omitempty"` // If type is 'object', object size (content-length)
Type string `json:"type"` // Entry type, one of 'directory' or 'object'
Mtime string `json:"mtime"` // ISO8601 timestamp of the last update
}
// Creates a directory at the specified path. Any parent directory must exist.
// See API docs: http://apidocs.joyent.com/manta/api.html#PutDirectory
func (c *Client) PutDirectory(path string) error {
requestHeaders := make(http.Header)
requestHeaders.Set("Content-Type", "application/json; type=directory")
requestHeaders.Set("Accept", "*/*")
req := request{
method: client.PUT,
url: makeURL(apiStorage, path),
reqHeader: requestHeaders,
expectedStatus: http.StatusNoContent,
}
if _, err := c.sendRequest(req); err != nil {
return errors.Newf(err, "failed to create directory: %s", path)
}
return nil
}
// Returns the content of the specified directory, using the specified options.
// See API docs: http://apidocs.joyent.com/manta/api.html#ListDirectory
func (c *Client) ListDirectory(directory string, opts ListDirectoryOpts) ([]Entry, error) {
var resp []Entry
requestHeaders := make(http.Header)
requestHeaders.Set("Accept", "*/*")
req := request{
method: client.GET,
url: makeURL(apiStorage, directory),
reqHeader: requestHeaders,
resp: &resp,
reqValue: opts,
}
if _, err := c.sendRequest(req); err != nil {
return nil, errors.Newf(err, "failed to list directory %s", directory)
}
return resp, nil
}
// Deletes the specified directory. Directory must be empty.
// See API docs: http://apidocs.joyent.com/manta/api.html#DeleteDirectory
func (c *Client) DeleteDirectory(path string) error {
req := request{
method: client.DELETE,
url: makeURL(apiStorage, path),
expectedStatus: http.StatusNoContent,
}
if _, err := c.sendRequest(req); err != nil {
return errors.Newf(err, "failed to delete directory %s", path)
}
return nil
}
// Creates an object at the specified path. Any parent directory must exist.
// See API docs: http://apidocs.joyent.com/manta/api.html#PutObject
func (c *Client) PutObject(path, objectName string, object []byte) error {
r := bytes.NewReader(object)
req := request{
method: client.PUT,
url: makeURL(apiStorage, path, objectName),
reqReader: r,
reqLength: len(object),
expectedStatus: http.StatusNoContent,
}
if _, err := c.sendRequest(req); err != nil {
return errors.Newf(err, "failed to create object: %s/%s", path, objectName)
}
return nil
}
// Retrieves the specified object from the specified location.
// See API docs: http://apidocs.joyent.com/manta/api.html#GetObject
func (c *Client) GetObject(path, objectName string) ([]byte, error) {
var resp []byte
requestHeaders := make(http.Header)
requestHeaders.Set("Accept", "*/*")
req := request{
method: client.GET,
url: makeURL(apiStorage, path, objectName),
reqHeader: requestHeaders,
resp: &resp,
}
respData, err := c.sendRequest(req)
if err != nil {
return nil, errors.Newf(err, "failed to get object %s/%s", path, objectName)
}
res, ok := respData.RespValue.(*[]byte)
if !ok {
return nil, errors.Newf(err, "failed to assert downloaded data as type *[]byte for object %s/%s", path, objectName)
}
return *res, nil
}
// Deletes the specified object from the specified location.
// See API docs: http://apidocs.joyent.com/manta/api.html#DeleteObject
func (c *Client) DeleteObject(path, objectName string) error {
req := request{
method: client.DELETE,
url: makeURL(apiStorage, path, objectName),
expectedStatus: http.StatusNoContent,
}
if _, err := c.sendRequest(req); err != nil {
return errors.Newf(err, "failed to delete object %s/%s", path, objectName)
}
return nil
}
// Creates a link (similar to a Unix hard link) from location to path/linkName.
// See API docs: http://apidocs.joyent.com/manta/api.html#PutSnapLink
func (c *Client) PutSnapLink(path, linkName, location string) error {
requestHeaders := make(http.Header)
requestHeaders.Set("Accept", "application/json; type=link")
requestHeaders.Set("Location", location)
req := request{
method: client.PUT,
url: makeURL(apiStorage, path, linkName),
reqHeader: requestHeaders,
expectedStatus: http.StatusNoContent,
}
if _, err := c.sendRequest(req); err != nil {
return errors.Newf(err, "failed to create snap link: %s/%s", path, linkName)
}
return nil
}
// CreateJobOpts represent the option that can be specified
// when creating a job.
type CreateJobOpts struct {
Name string `json:"name,omitempty"` // Job Name (optional)
Phases []Phase `json:"phases"` // Tasks to execute as part of this job
}
// Job represents the status of a job.
type Job struct {
Id string // Job unique identifier
Name string `json:"name,omitempty"` // Job Name
State string // Job state
Cancelled bool // Whether the job has been cancelled or not
InputDone bool // Whether the inputs for the job is still open or not
Stats JobStats `json:"stats,omitempty"` // Job statistics
TimeCreated string // Time the job was created at
TimeDone string `json:"timeDone,omitempty"` // Time the job was completed
TimeArchiveStarted string `json:"timeArchiveStarted,omitempty"` // Time the job archiving started
TimeArchiveDone string `json:"timeArchiveDone,omitempty"` // Time the job archiving completed
Phases []Phase `json:"phases"` // Job tasks
Options interface{} // Job options
}
// JobStats represents statistics about a job
type JobStats struct {
Errors int // Number or errors
Outputs int // Number of output produced
Retries int // Number of retries
Tasks int // Total number of task in the job
TasksDone int // number of tasks done
}
// Phase represents a task to be executed as part of a Job
type Phase struct {
Type string `json:"type,omitempty"` // Task type, one of 'map' or 'reduce' (optional)
Assets []string `json:"assets,omitempty"` // An array of objects to be placed in the compute zones (optional)
Exec string `json:"exec"` // The actual shell statement to execute
Init string `json:"init"` // Shell statement to execute in each compute zone before any tasks are executed
Count int `json:"count,omitempty"` // If type is 'reduce', an optional number of reducers for this phase (default is 1)
Memory int `json:"memory,omitempty"` // Amount of DRAM to give to your compute zone (in Mb, optional)
Disk int `json:"disk,omitempty"` // Amount of disk space to give to your compute zone (in Gb, optional)
}
// JobError represents an error occurred during a job execution
type JobError struct {
Id string // Job Id
Phase string // Phase number of the failure
What string // A human readable summary of what failed
Code string // Error code
Message string // Human readable error message
Stderr string // A key that saved the stderr for the given command (optional)
Key string // The input key being processed when the task failed (optional)
}
// Creates a job with the given options.
// See API docs: http://apidocs.joyent.com/manta/api.html#CreateJob
func (c *Client) CreateJob(opts CreateJobOpts) (string, error) {
var resp string
var respHeader http.Header
req := request{
method: client.POST,
url: apiJobs,
reqValue: opts,
respHeader: &respHeader,
resp: &resp,
expectedStatus: http.StatusCreated,
}
respData, err := c.sendRequest(req)
if err != nil {
return "", errors.Newf(err, "failed to create job with name: %s", opts.Name)
}
return respData.RespHeaders.Get("Location"), nil
}
// Submits inputs to an already created job.
// See API docs: http://apidocs.joyent.com/manta/api.html#AddJobInputs
func (c *Client) AddJobInputs(jobId string, jobInputs io.Reader) error {
inputData, errI := ioutil.ReadAll(jobInputs)
if errI != nil {
return errors.Newf(errI, "failed to read inputs for job %s", jobId)
}
requestHeaders := make(http.Header)
requestHeaders.Set("Accept", "*/*")
requestHeaders.Set("Content-Type", "text/plain")
req := request{
method: client.POST,
url: makeURL(apiJobs, jobId, apiJobsLive, apiJobsIn),
reqValue: string(inputData),
reqHeader: requestHeaders,
expectedStatus: http.StatusNoContent,
}
if _, err := c.sendRequest(req); err != nil {
return errors.Newf(err, "failed to add inputs to job %s", jobId)
}
return nil
}
// This closes input for a job, and finalize the job.
// See API docs: http://apidocs.joyent.com/manta/api.html#EndJobInput
func (c *Client) EndJobInputs(jobId string) error {
req := request{
method: client.POST,
url: makeURL(apiJobs, jobId, apiJobsLive, apiJobsIn, apiJobsEnd),
expectedStatus: http.StatusAccepted,
}
if _, err := c.sendRequest(req); err != nil {
return errors.Newf(err, "failed to end inputs for job %s", jobId)
}
return nil
}
// This cancels a job from doing any further work.
// Cancellation is asynchronous and "best effort"; there is no guarantee the job will actually stop
// See API docs: http://apidocs.joyent.com/manta/api.html#CancelJob
func (c *Client) CancelJob(jobId string) error {
req := request{
method: client.POST,
url: makeURL(apiJobs, jobId, apiJobsLive, apiJobsCancel),
expectedStatus: http.StatusAccepted,
}
if _, err := c.sendRequest(req); err != nil {
return errors.Newf(err, "failed to cancel job %s", jobId)
}
return nil
}
// Returns the list of jobs.
// Note you can filter the set of jobs down to only live jobs by setting the liveOnly flag.
// See API docs: http://apidocs.joyent.com/manta/api.html#ListJobs
func (c *Client) ListJobs(liveOnly bool) ([]Entry, error) {
var resp []Entry
var url string
if liveOnly {
url = fmt.Sprintf("%s?state=running", apiJobs)
} else {
url = apiJobs
}
req := request{
method: client.GET,
url: url,
resp: &resp,
}
if _, err := c.sendRequest(req); err != nil {
return nil, errors.Newf(err, "failed to list jobs")
}
return resp, nil
}
// Gets the high-level job container object for a given job.
// See API docs: http://apidocs.joyent.com/manta/api.html#GetJob
func (c *Client) GetJob(jobId string) (Job, error) {
var resp Job
req := request{
method: client.GET,
url: makeURL(apiJobs, jobId, apiJobsLive, apiJobsStatus),
resp: &resp,
}
if _, err := c.sendRequest(req); err != nil {
return Job{}, errors.Newf(err, "failed to get job with id: %s", jobId)
}
return resp, nil
}
// Returns the current "live" set of outputs from a given job.
// See API docs: http://apidocs.joyent.com/manta/api.html#GetJobOutput
func (c *Client) GetJobOutput(jobId string) (string, error) {
var resp string
req := request{
method: client.GET,
url: makeURL(apiJobs, jobId, apiJobsLive, apiJobsOut),
resp: &resp,
}
if _, err := c.sendRequest(req); err != nil {
return "", errors.Newf(err, "failed to get output for job with id: %s", jobId)
}
return resp, nil
}
// Returns the submitted input objects for a given job, available while the job is running.
// See API docs: http://apidocs.joyent.com/manta/api.html#GetJobInput
func (c *Client) GetJobInput(jobId string) (string, error) {
var resp string
req := request{
method: client.GET,
url: makeURL(apiJobs, jobId, apiJobsLive, apiJobsIn),
resp: &resp,
}
if _, err := c.sendRequest(req); err != nil {
return "", errors.Newf(err, "failed to get input for job with id: %s", jobId)
}
return resp, nil
}
// Returns the current "live" set of failures from a given job.
// See API docs: http://apidocs.joyent.com/manta/api.html#GetJobFailures
func (c *Client) GetJobFailures(jobId string) (interface{}, error) {
var resp interface{}
req := request{
method: client.GET,
url: makeURL(apiJobs, jobId, apiJobsLive, apiJobsFail),
resp: &resp,
}
if _, err := c.sendRequest(req); err != nil {
return nil, errors.Newf(err, "failed to get failures for job with id: %s", jobId)
}
return resp, nil
}
// Returns the current "live" set of errors from a given job.
// See API docs: http://apidocs.joyent.com/manta/api.html#GetJobErrors
func (c *Client) GetJobErrors(jobId string) ([]JobError, error) {
var resp []JobError
req := request{
method: client.GET,
url: makeURL(apiJobs, jobId, apiJobsLive, apiJobsErr),
resp: &resp,
}
if _, err := c.sendRequest(req); err != nil {
return nil, errors.Newf(err, "failed to get errors for job with id: %s", jobId)
}
return resp, nil
}
// Returns a signed URL to retrieve the object at path.
func (c *Client) SignURL(path string, expires time.Time) (string, error) {
return c.client.SignURL(path, expires)
}
| makeURL |
sed_6_1_5.py | #!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
# | # 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class SED_6_1_5(HarnessCase):
role = HarnessCase.ROLE_SED
case = '6 1 5'
golden_devices_required = 3
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main() | # Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer. |
conf.py | # -*- coding: utf-8 -*-
#
# FeinCMS documentation build configuration file, created by
# sphinx-quickstart on Mon Aug 10 17:03:33 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
#sys.path.append(os.path.abspath('..'))
#os.environ['DJANGO_SETTINGS_MODULE'] = 'example.settings'
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Banana'
copyright = u'2009, Banana contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from banana import *
version = '0.1a'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'footerbgcolor': '#213C63', # (CSS color): Background color for the footer line.
#'footertextcolor': '', # (CSS color): Text color for the footer line.
'sidebarbgcolor': '#305791', # (CSS color): Background color for the sidebar.
#'sidebartextcolor': '', # (CSS color): Text color for the sidebar.
'sidebarlinkcolor': '#98CCDB', # (CSS color): Link color for the sidebar.
'relbarbgcolor': '#213C63', # (CSS color): Background color for the relation bar.
#'relbartextcolor': '', # (CSS color): Text color for the relation bar.
#'relbarlinkcolor': '', # (CSS color): Link color for the relation bar.
#'bgcolor': '', # (CSS color): Body background color.
#'textcolor': '', # (CSS color): Body text color.
#'linkcolor': '', # (CSS color): Body link color.
#'headbgcolor': '', # (CSS color): Background color for headings.
#'headtextcolor': '', # (CSS color): Text color for headings.
#'headlinkcolor': '', # (CSS color): Link color for headings.
'codebgcolor': '#E0E8FF', # (CSS color): Background color for code blocks.
#'codetextcolor': '', # (CSS color): Default text color for code blocks, if not set differently by the highlighting style.
'bodyfont': 'Helvetica, Arial, sans-serif', # (CSS font-family): Font for normal text.
'headfont': 'Helvetica, Arial, sans-serif', # (CSS font-family): Font for headings.
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities. | # Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'bananadoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
latex_paper_size = 'a4'
# The font size ('10pt', '11pt' or '12pt').
latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
#latex_documents = []
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True | html_use_smartypants = True
|
strings.go | package gotool
import (
"strings"
"unicode"
)
// RemoveChar removes rune from string.
func RemoveRune(source string, runeToBeRemoved rune) string {
return strings.Map(func(r rune) rune {
if r != runeToBeRemoved {
return r
}
return -1
}, source)
}
func RemoveBlankRunes(source string) string {
return strings.Map(func(r rune) rune {
if !unicode.IsSpace(r) {
return r
}
return -1
}, source)
}
// ReplaceRunes replaces blank chars in string with new rune.
func ReplaceBlankRunes(source string, newRune rune) string {
return strings.Map(func(r rune) rune {
if unicode.IsSpace(r) {
return newRune
}
return r
}, source)
}
// ReplaceRunes replaces blank chars in string with new token.
func ReplaceRunes(source string, oldRune, newRune rune) string |
// CheckSubStringExistence checks whether substring exists in string.
func CheckSubStringExistence(str, subString string) bool {
index := strings.Index(str, subString)
if index >= 0 {
return true
}
return false
}
| {
return strings.Map(func(r rune) rune {
if r == oldRune {
return newRune
}
return r
}, source)
} |
EncryptionSettingIface.go | // Code generated by mockery (devel). DO NOT EDIT.
package mocks
import mock "github.com/stretchr/testify/mock"
// EncryptionSettingIface is an autogenerated mock type for the EncryptionSettingIface type
type EncryptionSettingIface struct {
mock.Mock
}
// IsStrictEncryption provides a mock function with given fields:
func (_m *EncryptionSettingIface) IsStrictEncryption() bool { |
var r0 bool
if rf, ok := ret.Get(0).(func() bool); ok {
r0 = rf()
} else {
r0 = ret.Get(0).(bool)
}
return r0
} | ret := _m.Called() |
exception_default.go | package exc
import (
`encoding/json`
`fmt`
`strings`
`github.com/goexl/gox`
)
var (
_ = NewException
_ Exception = (*exceptionDefault)(nil)
)
type exceptionDefault struct {
code int
message string
fields gox.Fields
}
// NewException 创建异常
func NewException(code int, message string, fields ...gox.Field) *exceptionDefault {
retur | e *exceptionDefault) Code() int {
return e.code
}
func (e *exceptionDefault) Message() string {
return e.message
}
func (e *exceptionDefault) Fields() gox.Fields {
return e.fields
}
func (e *exceptionDefault) MarshalJSON() (bytes []byte, err error) {
output := make(map[string]interface{})
output[`code`] = e.code
output[`message`] = e.message
if 0 < len(e.fields) {
data := make(map[string]interface{})
for _, field := range e.fields {
data[field.Key()] = field.Value()
}
output[`data`] = data
}
bytes, err = json.Marshal(output)
return
}
func (e *exceptionDefault) Error() (str string) {
if bytes, err := e.MarshalJSON(); nil != err {
str = e.error()
} else {
str = string(bytes)
}
return
}
func (e *exceptionDefault) error() string {
var sb strings.Builder
sb.WriteRune('{')
sb.WriteString(fmt.Sprintf(`code = %d, `, e.code))
sb.WriteString(fmt.Sprintf(`message = %s, `, e.message))
sb.WriteString(fmt.Sprintf(`data = %s`, e.fields.String()))
sb.WriteRune('}')
return sb.String()
}
| n &exceptionDefault{
code: code,
message: message,
fields: fields,
}
}
func ( |
client_test.go | // Copyright 2019 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package client
import (
"net/http"
"net/http/httptest"
"net/url"
"testing"
"cloud.google.com/go/compute/metadata"
"go.chromium.org/luci/gce/appengine/testing/roundtripper"
"go.chromium.org/luci/gce/vmtoken"
. "github.com/smartystreets/goconvey/convey"
)
func TestNewClient(t *testing.T) | {
t.Parallel()
Convey("NewClient", t, func(c C) {
// Create a test server which expects the token.
srv := httptest.NewServer(http.HandlerFunc(func(_ http.ResponseWriter, req *http.Request) {
c.So(req.Header.Get(vmtoken.Header), ShouldEqual, "token")
}))
// Create a mock metadata client which returns the token.
meta := metadata.NewClient(&http.Client{
Transport: &roundtripper.StringRoundTripper{
Handler: func(req *http.Request) (int, string) {
So(req.URL.Path, ShouldEqual, "/computeMetadata/v1/instance/service-accounts/account/identity")
url, err := url.Parse(srv.URL)
So(err, ShouldBeNil)
So(req.URL.Query().Get("audience"), ShouldEqual, "http://"+url.Host)
return http.StatusOK, "token"
},
},
})
cli := NewClient(meta, "account")
_, err := cli.Get(srv.URL)
So(err, ShouldBeNil)
})
} |
|
builtin_attrs.rs | //! Built-in attributes and `cfg` flag gating.
use AttributeGate::*;
use AttributeType::*;
use crate::{Features, Stability};
use rustc_data_structures::fx::FxHashMap;
use rustc_span::symbol::{sym, Symbol};
use std::lazy::SyncLazy;
type GateFn = fn(&Features) -> bool;
macro_rules! cfg_fn {
($field: ident) => {
(|features| features.$field) as GateFn
};
}
pub type GatedCfg = (Symbol, Symbol, GateFn);
/// `cfg(...)`'s that are feature gated.
const GATED_CFGS: &[GatedCfg] = &[
// (name in cfg, feature, function to check if the feature is enabled)
(sym::target_thread_local, sym::cfg_target_thread_local, cfg_fn!(cfg_target_thread_local)),
(sym::target_has_atomic, sym::cfg_target_has_atomic, cfg_fn!(cfg_target_has_atomic)),
(sym::target_has_atomic_load_store, sym::cfg_target_has_atomic, cfg_fn!(cfg_target_has_atomic)),
(
sym::target_has_atomic_equal_alignment,
sym::cfg_target_has_atomic,
cfg_fn!(cfg_target_has_atomic),
),
(sym::sanitize, sym::cfg_sanitize, cfg_fn!(cfg_sanitize)),
(sym::version, sym::cfg_version, cfg_fn!(cfg_version)),
];
/// Find a gated cfg determined by the `pred`icate which is given the cfg's name.
pub fn find_gated_cfg(pred: impl Fn(Symbol) -> bool) -> Option<&'static GatedCfg> {
GATED_CFGS.iter().find(|(cfg_sym, ..)| pred(*cfg_sym))
}
// If you change this, please modify `src/doc/unstable-book` as well. You must
// move that documentation into the relevant place in the other docs, and
// remove the chapter on the flag.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum AttributeType {
/// Normal, builtin attribute that is consumed
/// by the compiler before the unused_attribute check
Normal,
/// Builtin attribute that may not be consumed by the compiler
/// before the unused_attribute check. These attributes
/// will be ignored by the unused_attribute lint
AssumedUsed,
/// Builtin attribute that is only allowed at the crate level
CrateLevel,
}
#[derive(Clone, Copy)]
pub enum AttributeGate {
/// Is gated by a given feature gate, reason
/// and function to check if enabled
Gated(Stability, Symbol, &'static str, fn(&Features) -> bool),
/// Ungated attribute, can be used on all release channels
Ungated,
}
// fn() is not Debug
impl std::fmt::Debug for AttributeGate {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match *self {
Self::Gated(ref stab, name, expl, _) => {
write!(fmt, "Gated({:?}, {}, {})", stab, name, expl)
}
Self::Ungated => write!(fmt, "Ungated"),
}
}
}
impl AttributeGate {
fn is_deprecated(&self) -> bool {
match *self {
Self::Gated(Stability::Deprecated(_, _), ..) => true,
_ => false,
}
}
}
/// A template that the attribute input must match.
/// Only top-level shape (`#[attr]` vs `#[attr(...)]` vs `#[attr = ...]`) is considered now.
#[derive(Clone, Copy, Default)]
pub struct AttributeTemplate {
pub word: bool,
pub list: Option<&'static str>,
pub name_value_str: Option<&'static str>,
}
/// A convenience macro for constructing attribute templates.
/// E.g., `template!(Word, List: "description")` means that the attribute
/// supports forms `#[attr]` and `#[attr(description)]`.
macro_rules! template {
(Word) => { template!(@ true, None, None) };
(List: $descr: expr) => { template!(@ false, Some($descr), None) };
(NameValueStr: $descr: expr) => { template!(@ false, None, Some($descr)) };
(Word, List: $descr: expr) => { template!(@ true, Some($descr), None) };
(Word, NameValueStr: $descr: expr) => { template!(@ true, None, Some($descr)) };
(List: $descr1: expr, NameValueStr: $descr2: expr) => {
template!(@ false, Some($descr1), Some($descr2))
};
(Word, List: $descr1: expr, NameValueStr: $descr2: expr) => {
template!(@ true, Some($descr1), Some($descr2))
};
(@ $word: expr, $list: expr, $name_value_str: expr) => { AttributeTemplate {
word: $word, list: $list, name_value_str: $name_value_str
} };
}
macro_rules! ungated {
($attr:ident, $typ:expr, $tpl:expr $(,)?) => {
(sym::$attr, $typ, $tpl, Ungated)
};
}
macro_rules! gated {
($attr:ident, $typ:expr, $tpl:expr, $gate:ident, $msg:expr $(,)?) => {
(sym::$attr, $typ, $tpl, Gated(Stability::Unstable, sym::$gate, $msg, cfg_fn!($gate)))
};
($attr:ident, $typ:expr, $tpl:expr, $msg:expr $(,)?) => {
(sym::$attr, $typ, $tpl, Gated(Stability::Unstable, sym::$attr, $msg, cfg_fn!($attr)))
};
}
macro_rules! rustc_attr {
(TEST, $attr:ident, $typ:expr, $tpl:expr $(,)?) => {
rustc_attr!(
$attr,
$typ,
$tpl,
concat!(
"the `#[",
stringify!($attr),
"]` attribute is just used for rustc unit tests \
and will never be stable",
),
)
};
($attr:ident, $typ:expr, $tpl:expr, $msg:expr $(,)?) => {
(
sym::$attr,
$typ,
$tpl,
Gated(Stability::Unstable, sym::rustc_attrs, $msg, cfg_fn!(rustc_attrs)),
)
};
}
macro_rules! experimental {
($attr:ident) => {
concat!("the `#[", stringify!($attr), "]` attribute is an experimental feature")
};
}
const IMPL_DETAIL: &str = "internal implementation detail";
const INTERNAL_UNSTABLE: &str = "this is an internal attribute that will never be stable";
pub type BuiltinAttribute = (Symbol, AttributeType, AttributeTemplate, AttributeGate);
/// Attributes that have a special meaning to rustc or rustdoc.
#[rustfmt::skip]
pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
// ==========================================================================
// Stable attributes:
// ==========================================================================
// Conditional compilation:
ungated!(cfg, Normal, template!(List: "predicate")),
ungated!(cfg_attr, Normal, template!(List: "predicate, attr1, attr2, ...")),
// Testing:
ungated!(ignore, Normal, template!(Word, NameValueStr: "reason")),
ungated!(
should_panic, Normal,
template!(Word, List: r#"expected = "reason"#, NameValueStr: "reason"),
),
// FIXME(Centril): This can be used on stable but shouldn't.
ungated!(reexport_test_harness_main, Normal, template!(NameValueStr: "name")),
// Macros:
ungated!(derive, Normal, template!(List: "Trait1, Trait2, ...")),
ungated!(automatically_derived, Normal, template!(Word)),
// FIXME(#14407)
ungated!(macro_use, Normal, template!(Word, List: "name1, name2, ...")),
ungated!(macro_escape, Normal, template!(Word)), // Deprecated synonym for `macro_use`.
ungated!(macro_export, Normal, template!(Word, List: "local_inner_macros")),
ungated!(proc_macro, Normal, template!(Word)),
ungated!(
proc_macro_derive, Normal,
template!(List: "TraitName, /*opt*/ attributes(name1, name2, ...)"),
),
ungated!(proc_macro_attribute, Normal, template!(Word)),
// Lints:
ungated!(warn, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)),
ungated!(allow, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)),
ungated!(forbid, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)),
ungated!(deny, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)),
ungated!(must_use, AssumedUsed, template!(Word, NameValueStr: "reason")),
// FIXME(#14407)
ungated!(
deprecated, Normal,
template!(
Word,
List: r#"/*opt*/ since = "version", /*opt*/ note = "reason""#,
NameValueStr: "reason"
),
),
// Crate properties:
ungated!(crate_name, CrateLevel, template!(NameValueStr: "name")),
ungated!(crate_type, CrateLevel, template!(NameValueStr: "bin|lib|...")),
ungated!(crate_id, CrateLevel, template!(NameValueStr: "ignored")),
// ABI, linking, symbols, and FFI
ungated!(
link, AssumedUsed,
template!(List: r#"name = "...", /*opt*/ kind = "dylib|static|...", /*opt*/ wasm_import_module = "...""#),
),
ungated!(link_name, AssumedUsed, template!(NameValueStr: "name")),
ungated!(no_link, Normal, template!(Word)),
ungated!(repr, Normal, template!(List: "C")),
ungated!(export_name, AssumedUsed, template!(NameValueStr: "name")),
ungated!(link_section, AssumedUsed, template!(NameValueStr: "name")),
ungated!(no_mangle, AssumedUsed, template!(Word)),
ungated!(used, AssumedUsed, template!(Word)),
// Limits:
ungated!(recursion_limit, CrateLevel, template!(NameValueStr: "N")),
ungated!(type_length_limit, CrateLevel, template!(NameValueStr: "N")),
gated!(
const_eval_limit, CrateLevel, template!(NameValueStr: "N"), const_eval_limit,
experimental!(const_eval_limit)
),
// Entry point:
ungated!(main, Normal, template!(Word)),
ungated!(start, Normal, template!(Word)),
ungated!(no_start, CrateLevel, template!(Word)),
ungated!(no_main, CrateLevel, template!(Word)),
// Modules, prelude, and resolution:
ungated!(path, Normal, template!(NameValueStr: "file")),
ungated!(no_std, CrateLevel, template!(Word)),
ungated!(no_implicit_prelude, Normal, template!(Word)),
ungated!(non_exhaustive, AssumedUsed, template!(Word)),
// Runtime
ungated!(windows_subsystem, AssumedUsed, template!(NameValueStr: "windows|console")),
ungated!(panic_handler, Normal, template!(Word)), // RFC 2070
// Code generation:
ungated!(inline, AssumedUsed, template!(Word, List: "always|never")),
ungated!(cold, AssumedUsed, template!(Word)),
ungated!(no_builtins, AssumedUsed, template!(Word)),
ungated!(target_feature, AssumedUsed, template!(List: r#"enable = "name""#)),
ungated!(track_caller, AssumedUsed, template!(Word)),
gated!(
no_sanitize, AssumedUsed,
template!(List: "address, memory, thread"),
experimental!(no_sanitize)
),
// FIXME: #14408 assume docs are used since rustdoc looks at them.
ungated!(doc, AssumedUsed, template!(List: "hidden|inline|...", NameValueStr: "string")),
// ==========================================================================
// Unstable attributes:
// ==========================================================================
// Linking:
gated!(naked, AssumedUsed, template!(Word), naked_functions, experimental!(naked)),
gated!(
link_args, Normal, template!(NameValueStr: "args"),
"the `link_args` attribute is experimental and not portable across platforms, \
it is recommended to use `#[link(name = \"foo\")] instead",
),
gated!(
link_ordinal, AssumedUsed, template!(List: "ordinal"), raw_dylib,
experimental!(link_ordinal)
),
// Plugins:
(
sym::plugin_registrar, Normal, template!(Word),
Gated(
Stability::Deprecated(
"https://github.com/rust-lang/rust/pull/64675",
Some("may be removed in a future compiler version"),
),
sym::plugin_registrar,
"compiler plugins are deprecated",
cfg_fn!(plugin_registrar)
)
),
(
sym::plugin, CrateLevel, template!(List: "name"),
Gated(
Stability::Deprecated(
"https://github.com/rust-lang/rust/pull/64675",
Some("may be removed in a future compiler version"),
),
sym::plugin,
"compiler plugins are deprecated",
cfg_fn!(plugin)
)
),
// Testing:
gated!(allow_fail, Normal, template!(Word), experimental!(allow_fail)),
gated!(
test_runner, CrateLevel, template!(List: "path"), custom_test_frameworks,
"custom test frameworks are an unstable feature",
),
// RFC #1268
gated!(marker, Normal, template!(Word), marker_trait_attr, experimental!(marker)),
gated!(
thread_local, AssumedUsed, template!(Word),
"`#[thread_local]` is an experimental feature, and does not currently handle destructors",
),
gated!(no_core, CrateLevel, template!(Word), experimental!(no_core)),
// RFC 2412
gated!(
optimize, AssumedUsed, template!(List: "size|speed"), optimize_attribute,
experimental!(optimize),
),
// RFC 2867
gated!(instruction_set, AssumedUsed, template!(List: "set"), isa_attribute, experimental!(instruction_set)),
gated!(ffi_returns_twice, AssumedUsed, template!(Word), experimental!(ffi_returns_twice)),
gated!(ffi_pure, AssumedUsed, template!(Word), experimental!(ffi_pure)),
gated!(ffi_const, AssumedUsed, template!(Word), experimental!(ffi_const)),
gated!(
register_attr, CrateLevel, template!(List: "attr1, attr2, ..."),
experimental!(register_attr),
),
gated!(
register_tool, CrateLevel, template!(List: "tool1, tool2, ..."),
experimental!(register_tool),
),
gated!(cmse_nonsecure_entry, AssumedUsed, template!(Word), experimental!(cmse_nonsecure_entry)),
// ==========================================================================
// Internal attributes: Stability, deprecation, and unsafe:
// ==========================================================================
ungated!(feature, CrateLevel, template!(List: "name1, name1, ...")),
// FIXME(#14407) -- only looked at on-demand so we can't
// guarantee they'll have already been checked.
ungated!(
rustc_deprecated, AssumedUsed,
template!(List: r#"since = "version", reason = "...""#)
),
// FIXME(#14407)
ungated!(stable, AssumedUsed, template!(List: r#"feature = "name", since = "version""#)),
// FIXME(#14407)
ungated!(
unstable, AssumedUsed,
template!(List: r#"feature = "name", reason = "...", issue = "N""#),
),
// FIXME(#14407)
ungated!(rustc_const_unstable, AssumedUsed, template!(List: r#"feature = "name""#)),
// FIXME(#14407)
ungated!(rustc_const_stable, AssumedUsed, template!(List: r#"feature = "name""#)),
gated!(
allow_internal_unstable, AssumedUsed, template!(Word, List: "feat1, feat2, ..."),
"allow_internal_unstable side-steps feature gating and stability checks",
),
gated!(
rustc_allow_const_fn_unstable, AssumedUsed, template!(Word, List: "feat1, feat2, ..."),
"rustc_allow_const_fn_unstable side-steps feature gating and stability checks"
),
gated!(
allow_internal_unsafe, Normal, template!(Word),
"allow_internal_unsafe side-steps the unsafe_code lint",
),
// ==========================================================================
// Internal attributes: Type system related:
// ==========================================================================
gated!(fundamental, AssumedUsed, template!(Word), experimental!(fundamental)),
gated!(
may_dangle, Normal, template!(Word), dropck_eyepatch,
"`may_dangle` has unstable semantics and may be removed in the future",
),
// ==========================================================================
// Internal attributes: Runtime related:
// ==========================================================================
rustc_attr!(rustc_allocator, AssumedUsed, template!(Word), IMPL_DETAIL),
rustc_attr!(rustc_allocator_nounwind, AssumedUsed, template!(Word), IMPL_DETAIL),
gated!(alloc_error_handler, Normal, template!(Word), experimental!(alloc_error_handler)),
gated!(
default_lib_allocator, AssumedUsed, template!(Word), allocator_internals,
experimental!(default_lib_allocator),
),
gated!(
needs_allocator, Normal, template!(Word), allocator_internals,
experimental!(needs_allocator),
),
gated!(panic_runtime, AssumedUsed, template!(Word), experimental!(panic_runtime)),
gated!(needs_panic_runtime, AssumedUsed, template!(Word), experimental!(needs_panic_runtime)),
gated!(
unwind, AssumedUsed, template!(List: "allowed|aborts"), unwind_attributes,
experimental!(unwind),
),
gated!(
compiler_builtins, AssumedUsed, template!(Word),
"the `#[compiler_builtins]` attribute is used to identify the `compiler_builtins` crate \
which contains compiler-rt intrinsics and will never be stable",
),
gated!(
profiler_runtime, AssumedUsed, template!(Word),
"the `#[profiler_runtime]` attribute is used to identify the `profiler_builtins` crate \
which contains the profiler runtime and will never be stable",
),
// ==========================================================================
// Internal attributes, Linkage:
// ==========================================================================
gated!(
linkage, AssumedUsed, template!(NameValueStr: "external|internal|..."),
"the `linkage` attribute is experimental and not portable across platforms",
),
rustc_attr!(rustc_std_internal_symbol, AssumedUsed, template!(Word), INTERNAL_UNSTABLE),
// ==========================================================================
// Internal attributes, Macro related:
// ==========================================================================
rustc_attr!(rustc_builtin_macro, AssumedUsed, template!(Word), IMPL_DETAIL),
rustc_attr!(rustc_proc_macro_decls, Normal, template!(Word), INTERNAL_UNSTABLE),
rustc_attr!(
rustc_macro_transparency, AssumedUsed,
template!(NameValueStr: "transparent|semitransparent|opaque"),
"used internally for testing macro hygiene",
),
// ==========================================================================
// Internal attributes, Diagnostics related:
// ==========================================================================
rustc_attr!(
rustc_on_unimplemented, AssumedUsed,
template!(
List: r#"/*opt*/ message = "...", /*opt*/ label = "...", /*opt*/ note = "...""#,
NameValueStr: "message"
),
INTERNAL_UNSTABLE
),
// Enumerates "identity-like" conversion methods to suggest on type mismatch.
rustc_attr!(rustc_conversion_suggestion, AssumedUsed, template!(Word), INTERNAL_UNSTABLE),
// ==========================================================================
// Internal attributes, Const related:
// ==========================================================================
rustc_attr!(rustc_promotable, AssumedUsed, template!(Word), IMPL_DETAIL),
rustc_attr!(rustc_args_required_const, AssumedUsed, template!(List: "N"), INTERNAL_UNSTABLE),
// ==========================================================================
// Internal attributes, Layout related:
// ==========================================================================
rustc_attr!(
rustc_layout_scalar_valid_range_start, AssumedUsed, template!(List: "value"),
"the `#[rustc_layout_scalar_valid_range_start]` attribute is just used to enable \
niche optimizations in libcore and will never be stable",
),
rustc_attr!(
rustc_layout_scalar_valid_range_end, AssumedUsed, template!(List: "value"),
"the `#[rustc_layout_scalar_valid_range_end]` attribute is just used to enable \
niche optimizations in libcore and will never be stable",
),
rustc_attr!(
rustc_nonnull_optimization_guaranteed, AssumedUsed, template!(Word),
"the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable \
niche optimizations in libcore and will never be stable",
),
// ==========================================================================
// Internal attributes, Misc:
// ==========================================================================
gated!(
lang, Normal, template!(NameValueStr: "name"), lang_items,
"language items are subject to change",
),
(
sym::rustc_diagnostic_item,
Normal,
template!(NameValueStr: "name"),
Gated(
Stability::Unstable,
sym::rustc_attrs,
"diagnostic items compiler internal support for linting",
cfg_fn!(rustc_attrs),
),
),
gated!(
// Used in resolve:
prelude_import, AssumedUsed, template!(Word),
"`#[prelude_import]` is for use by rustc only",
),
gated!(
rustc_paren_sugar, Normal, template!(Word), unboxed_closures,
"unboxed_closures are still evolving",
),
rustc_attr!(
rustc_inherit_overflow_checks, AssumedUsed, template!(Word),
"the `#[rustc_inherit_overflow_checks]` attribute is just used to control \
overflow checking behavior of several libcore functions that are inlined \
across crates and will never be stable",
),
rustc_attr!(rustc_reservation_impl, Normal, template!(NameValueStr: "reservation message"),
"the `#[rustc_reservation_impl]` attribute is internally used \
for reserving for `for<T> From<!> for T` impl"
),
rustc_attr!(
rustc_test_marker, Normal, template!(Word),
"the `#[rustc_test_marker]` attribute is used internally to track tests",
),
rustc_attr!(
rustc_unsafe_specialization_marker, Normal, template!(Word),
"the `#[rustc_unsafe_specialization_marker]` attribute is used to check specializations"
),
rustc_attr!(
rustc_specialization_trait, Normal, template!(Word),
"the `#[rustc_specialization_trait]` attribute is used to check specializations"
),
// ==========================================================================
// Internal attributes, Testing:
// ==========================================================================
rustc_attr!(TEST, rustc_outlives, Normal, template!(Word)),
rustc_attr!(TEST, rustc_variance, Normal, template!(Word)),
rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ...")),
rustc_attr!(TEST, rustc_regions, Normal, template!(Word)),
rustc_attr!(
TEST, rustc_error, AssumedUsed,
template!(Word, List: "delay_span_bug_from_inside_query")
),
rustc_attr!(TEST, rustc_dump_user_substs, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_if_this_changed, AssumedUsed, template!(Word, List: "DepNode")),
rustc_attr!(TEST, rustc_then_this_would_need, AssumedUsed, template!(List: "DepNode")),
rustc_attr!(
TEST, rustc_dirty, AssumedUsed,
template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#),
),
rustc_attr!(
TEST, rustc_clean, AssumedUsed,
template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#),
),
rustc_attr!(
TEST, rustc_partition_reused, AssumedUsed,
template!(List: r#"cfg = "...", module = "...""#),
),
rustc_attr!(
TEST, rustc_partition_codegened, AssumedUsed,
template!(List: r#"cfg = "...", module = "...""#),
),
rustc_attr!(
TEST, rustc_expected_cgu_reuse, AssumedUsed,
template!(List: r#"cfg = "...", module = "...", kind = "...""#),
),
rustc_attr!(TEST, rustc_synthetic, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_symbol_name, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_polymorphize_error, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_def_path, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_mir, AssumedUsed, template!(List: "arg1, arg2, ...")),
rustc_attr!(TEST, rustc_dump_program_clauses, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_dump_env_program_clauses, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_object_lifetime_default, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_dummy, Normal, template!(Word /* doesn't matter*/)),
gated!(
omit_gdb_pretty_printer_section, AssumedUsed, template!(Word),
"the `#[omit_gdb_pretty_printer_section]` attribute is just used for the Rust test suite",
),
];
pub fn | () -> Vec<&'static BuiltinAttribute> {
BUILTIN_ATTRIBUTES.iter().filter(|(.., gate)| gate.is_deprecated()).collect()
}
pub fn is_builtin_attr_name(name: Symbol) -> bool {
BUILTIN_ATTRIBUTE_MAP.get(&name).is_some()
}
pub static BUILTIN_ATTRIBUTE_MAP: SyncLazy<FxHashMap<Symbol, &BuiltinAttribute>> =
SyncLazy::new(|| {
let mut map = FxHashMap::default();
for attr in BUILTIN_ATTRIBUTES.iter() {
if map.insert(attr.0, attr).is_some() {
panic!("duplicate builtin attribute `{}`", attr.0);
}
}
map
});
| deprecated_attributes |
url_scheme.go | package weapp
const (
apiURLScheme = "/wxa/generatescheme"
)
type URLScheme struct {
// 跳转到的目标小程序信息。
SchemedInfo *SchemedInfo `json:"jump_wxa,omitempty"`
// 成的scheme码类型,到期失效:true,永久有效:false。
IsExpire bool `json:"is_expire,omitempty"`
// 到期失效的scheme码的失效时间,为Unix时间戳。生成的到期失效scheme码在该时间前有效。最长有效期为1年。生成到期失效的scheme时必填。
ExpireTime int64 `json:"expire_time,omitempty"`
}
type SchemedInfo struct {
// 通过scheme码进入的小程序页面路径,必须是已经发布的小程序存在的页面,不可携带query。path为空时会跳转小程序主页。
Path string `json:"path"`
// 通过scheme码进入小程序时的query,最大128个字符,只支持数字,大小写英文以及部分特殊字符:!#$&'()*+,/:;=?@-._~
Query string `json:"query"`
}
type URLSchemeResponse struct {
CommonError
// 生成的小程序scheme码
Openlink string `json:"openlink"`
}
// 获取小程序scheme码,适用于短信、邮件、外部网页等拉起小程序的业务场景。
//
// token 微信access_token
func (scheme *URLScheme) Generate(token string) (*URLSchemeResponse, error) {
api := baseURL + apiURLScheme
return scheme.generate(api, token)
} | uri, err := tokenAPI(api, token)
if err != nil {
return nil, err
}
res := new(URLSchemeResponse)
err = postJSON(uri, scheme, res)
if err != nil {
return nil, err
}
return res, nil
} |
func (scheme *URLScheme) generate(api, token string) (*URLSchemeResponse, error) { |
order.js | /**
* Created by finderlo on 07/05/2017.
*/
/**
* Created by finderlo on 07/05/2017.
*/
// document.ready(function () {
//
// })
function | (obj,orderId) {
var sValue = obj.options[obj.options.selectedIndex].value; //这是取值
var sText = obj.options[obj.options.selectedIndex].innerHTML; //这是取文本内容
document.getElementById("selectValue").innerHTML = sText + ",他的值为:" + sValue; //测试输出
$.ajax({
url:"updateorder",
data:{"newState":sValue,
"orderId":orderId
},
success:function (result) {
if(result.equals("true"))alert("修改成功")
}
})
}
function deleteOrder(orderid){
$.ajax({
url:"deleteoder",
data:{"orderId":orderid},
success:function (result) {
if(result.equals("true"))alert("删除成功")
}
})
}
function findOrderList() {
$("#orderlist").find("tr").remove();
$.ajax({
url: "/orderlist",
success: function (result) {
var tab = $("#orderlist");
var html = "";
html += "<tr>";
html += "<th>订单ID/th>"
html += "<th>用户</th>"
html += "<th>投标号</th>"
html += "<th>身份证</th>"
html += "<th>交管所密码</th>"
html += "<th>状态</th>"
html += "<th>状态修改</th>"
html += "<th>备注</th>"
html += "</tr>"
for (var i = 0; i < result.length; i++) {
// alert(result[i].nickname);
html += "<tr>";
html += "<th>"+result[i].orderid+ "</th>"
html += "<th>"+result[i].user+ "</th>"
html += "<th>"+result[i].bidid+ "</th>"
html += "<th>"+result[i].idcard+ "</th>"
html += "<th>"+result[i].transactionPassword+ "</th>"
html += "<th>"+result[i].state+ "</th>"
html += "<th>"+ " <select id=\"Select1\" onchange=\"getSelectValue(this,"+result[i].orderid+
");\"> name=\"\"> "+
"<option value=\"0\" >0</option> "+
"<option value=\"1\">1</option> "+
"</select>"
+ "</th>"
html += "<th>"+result[i].remark+ "</th>"
html += "<th>" + "<button id='delete' onclick='deleteOrder(" + result[i].orderid +
")'>" + "删除" + "</button>" + "</th>"
html += "</tr>"
}
tab.append(html);
},
error: function () {
alert("error");
}
});
} | getSelectValue |
rvec.js | export function rvec(arr) {
return { output: [arr], size: [1, arr.length] } | } |
|
move_keys_into_db.py | #!/usr/bin/env python3
import os
import sys
import yaml
import argparse
import logging
import mysql.connector
logger = logging.getLogger(__name__)
def build_mysql_connection(rest_config_path):
with open(rest_config_path) as f:
cluster_config = yaml.load(f)
host = cluster_config["mysql"]["hostname"]
port = cluster_config["mysql"]["port"]
username = cluster_config["mysql"]["username"]
password = cluster_config["mysql"]["password"]
db_name = "DLWSCluster-%s" % cluster_config["clusterId"]
return mysql.connector.connect(user=username,
password=password,
host=host,
port=port,
database=db_name)
def alter_table(rest_config_path):
conn = build_mysql_connection(rest_config_path)
cursor = conn.cursor()
cursor.execute(
"ALTER TABLE identity ADD COLUMN public_key TEXT not null"
)
cursor.execute(
"ALTER TABLE identity ADD COLUMN private_key TEXT not null"
)
conn.commit()
cursor.close()
conn.close()
def | (rest_config_path, work_path):
conn = build_mysql_connection(rest_config_path)
cursor = conn.cursor()
cursor.execute("SELECT `identityName` FROM identity")
users = cursor.fetchall()
for user_name, in users:
alias = user_name
if "@" in alias:
alias = alias.split("@")[0]
if "/" in alias:
alias = alias.split("/")[1]
if "\\" in alias:
alias = alias.split("\\")[1]
logger.info("dumping %s", alias)
private_path = os.path.join(work_path, alias, ".ssh", "id_rsa")
public_path = os.path.join(work_path, alias, ".ssh",
"id_rsa.pub")
if not os.path.isfile(private_path) or not os.path.isfile(
public_path):
logger.warning("%s or %s not exist, ignore", private_path,
public_path)
continue
with open(private_path) as f:
private_key = f.read()
with open(public_path) as f:
public_key = f.read()
cursor.execute(
"""UPDATE identity
SET private_key = %s, public_key = %s
WHERE identityName = %s""", (private_key, public_key,
user_name))
conn.commit()
cursor.close()
conn.close()
def roll_back(rest_config_path):
conn = build_mysql_connection(rest_config_path)
cursor = conn.cursor()
cursor.execute("ALTER TABLE identity DROP COLUMN private_key, DROP COLUMN public_key")
conn.commit()
cursor.close()
conn.close()
def main(action, rest_config_path, work_path):
if action == "alter":
alter_table(rest_config_path)
elif action == "dump":
dump_data(rest_config_path, work_path)
elif action == "rollback":
roll_back(rest_config_path)
else:
logger.error("unknown action %s", action)
sys.exit(2)
if __name__ == '__main__':
logging.basicConfig(
format=
"%(asctime)s - %(levelname)s - %(filename)s:%(lineno)s - %(message)s",
level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument("action", choices=["alter", "dump", "rollback"])
parser.add_argument("--work_path",
help="path to NFS work directory",
default="/dlwsdata/work")
parser.add_argument("--rest_path",
help="path to restfulapi config file",
default="/etc/RestfulAPI/config.yaml")
args = parser.parse_args()
main(args.action, args.rest_path, args.work_path)
| dump_data |
pandasread_csv_addheader.py | # %%
#######################################
def pandasread_csv_addheader(csv_file: str, addheader: list):
| """Returns a pandas dataframe from the given .csv file. Assumes there is no header in the .csv and requires a header to be given as an argument to the 'addheader'.
Example:
>>> myheader = ['NAME','AGE','JOB','DEPARTMENT','PAY']\n
>>> pandasread_csv_addheader('test.csv', addheader=myheader)\n
NAME AGE JOB DEPARTMENT PAY\n
0 bob 21 janitor sanitization team 2\n
1 alice 22 secretary admin team 3\n
2 chuck 23 plumber construction team 4\n
Reference:
https://stackoverflow.com/questions/36828348/pandas-read-csv-reading-a-csv-file-with-a-missing-header-element
Args:
csv_file (str): Reference an existing .csv file.
addheader (list): Reference the header you want to use for the columns.
Returns:
pandas.core.frame.DataFrame: Returns a pandas dataframe.
"""
import pandas
df = pandas.read_csv(csv_file, header=None, names=addheader)
return df |
|
__init__.py | # -*- coding: utf-8 -*-
# Copyright 2015, 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.api.constants import EventTypes
import logging
import re
logger = logging.getLogger(__name__)
class | (object):
DOWN = "down"
UP = "up"
class AppServiceTransaction(object):
"""Represents an application service transaction."""
def __init__(self, service, id, events):
self.service = service
self.id = id
self.events = events
def send(self, as_api):
"""Sends this transaction using the provided AS API interface.
Args:
as_api(ApplicationServiceApi): The API to use to send.
Returns:
A Deferred which resolves to True if the transaction was sent.
"""
return as_api.push_bulk(
service=self.service,
events=self.events,
txn_id=self.id
)
def complete(self, store):
"""Completes this transaction as successful.
Marks this transaction ID on the application service and removes the
transaction contents from the database.
Args:
store: The database store to operate on.
Returns:
A Deferred which resolves to True if the transaction was completed.
"""
return store.complete_appservice_txn(
service=self.service,
txn_id=self.id
)
class ApplicationService(object):
"""Defines an application service. This definition is mostly what is
provided to the /register AS API.
Provides methods to check if this service is "interested" in events.
"""
NS_USERS = "users"
NS_ALIASES = "aliases"
NS_ROOMS = "rooms"
# The ordering here is important as it is used to map database values (which
# are stored as ints representing the position in this list) to namespace
# values.
NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS]
def __init__(self, token, url=None, namespaces=None, hs_token=None,
sender=None, id=None):
self.token = token
self.url = url
self.hs_token = hs_token
self.sender = sender
self.namespaces = self._check_namespaces(namespaces)
self.id = id
def _check_namespaces(self, namespaces):
# Sanity check that it is of the form:
# {
# users: [ {regex: "[A-z]+.*", exclusive: true}, ...],
# aliases: [ {regex: "[A-z]+.*", exclusive: true}, ...],
# rooms: [ {regex: "[A-z]+.*", exclusive: true}, ...],
# }
if not namespaces:
namespaces = {}
for ns in ApplicationService.NS_LIST:
if ns not in namespaces:
namespaces[ns] = []
continue
if type(namespaces[ns]) != list:
raise ValueError("Bad namespace value for '%s'" % ns)
for regex_obj in namespaces[ns]:
if not isinstance(regex_obj, dict):
raise ValueError("Expected dict regex for ns '%s'" % ns)
if not isinstance(regex_obj.get("exclusive"), bool):
raise ValueError(
"Expected bool for 'exclusive' in ns '%s'" % ns
)
if not isinstance(regex_obj.get("regex"), basestring):
raise ValueError(
"Expected string for 'regex' in ns '%s'" % ns
)
return namespaces
def _matches_regex(self, test_string, namespace_key, return_obj=False):
if not isinstance(test_string, basestring):
logger.error(
"Expected a string to test regex against, but got %s",
test_string
)
return False
for regex_obj in self.namespaces[namespace_key]:
if re.match(regex_obj["regex"], test_string):
if return_obj:
return regex_obj
return True
return False
def _is_exclusive(self, ns_key, test_string):
regex_obj = self._matches_regex(test_string, ns_key, return_obj=True)
if regex_obj:
return regex_obj["exclusive"]
return False
def _matches_user(self, event, member_list):
if (hasattr(event, "sender") and
self.is_interested_in_user(event.sender)):
return True
# also check m.room.member state key
if (hasattr(event, "type") and event.type == EventTypes.Member
and hasattr(event, "state_key")
and self.is_interested_in_user(event.state_key)):
return True
# check joined member events
for user_id in member_list:
if self.is_interested_in_user(user_id):
return True
return False
def _matches_room_id(self, event):
if hasattr(event, "room_id"):
return self.is_interested_in_room(event.room_id)
return False
def _matches_aliases(self, event, alias_list):
for alias in alias_list:
if self.is_interested_in_alias(alias):
return True
return False
def is_interested(self, event, restrict_to=None, aliases_for_event=None,
member_list=None):
"""Check if this service is interested in this event.
Args:
event(Event): The event to check.
restrict_to(str): The namespace to restrict regex tests to.
aliases_for_event(list): A list of all the known room aliases for
this event.
member_list(list): A list of all joined user_ids in this room.
Returns:
bool: True if this service would like to know about this event.
"""
if aliases_for_event is None:
aliases_for_event = []
if member_list is None:
member_list = []
if restrict_to and restrict_to not in ApplicationService.NS_LIST:
# this is a programming error, so fail early and raise a general
# exception
raise Exception("Unexpected restrict_to value: %s". restrict_to)
if not restrict_to:
return (self._matches_user(event, member_list)
or self._matches_aliases(event, aliases_for_event)
or self._matches_room_id(event))
elif restrict_to == ApplicationService.NS_ALIASES:
return self._matches_aliases(event, aliases_for_event)
elif restrict_to == ApplicationService.NS_ROOMS:
return self._matches_room_id(event)
elif restrict_to == ApplicationService.NS_USERS:
return self._matches_user(event, member_list)
def is_interested_in_user(self, user_id):
return (
self._matches_regex(user_id, ApplicationService.NS_USERS)
or user_id == self.sender
)
def is_interested_in_alias(self, alias):
return self._matches_regex(alias, ApplicationService.NS_ALIASES)
def is_interested_in_room(self, room_id):
return self._matches_regex(room_id, ApplicationService.NS_ROOMS)
def is_exclusive_user(self, user_id):
return (
self._is_exclusive(ApplicationService.NS_USERS, user_id)
or user_id == self.sender
)
def is_exclusive_alias(self, alias):
return self._is_exclusive(ApplicationService.NS_ALIASES, alias)
def is_exclusive_room(self, room_id):
return self._is_exclusive(ApplicationService.NS_ROOMS, room_id)
def __str__(self):
return "ApplicationService: %s" % (self.__dict__,)
| ApplicationServiceState |
package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RGsodr(RPackage):
"""A Global Surface Summary of the Day (GSOD) Weather Data Client for R
Provides automated downloading, parsing, cleaning, unit conversion and
formatting of Global Surface Summary of the Day ('GSOD') weather data from
the from the USA National Centers for Environmental Information ('NCEI'). | checked for number of missing days defined by the user, where stations with
too many missing observations are omitted. Only stations with valid
reported latitude and longitude values are permitted in the final data.
Additional useful elements, saturation vapour pressure ('es'), actual
vapour pressure ('ea') and relative humidity ('RH') are calculated from the
original data using the improved August-Roche-Magnus approximation
(Alduchov & Eskridge 1996) and included in the final data set. The
resulting metadata include station identification information, country,
state, latitude, longitude, elevation, weather observations and associated
flags. For information on the 'GSOD' data from 'NCEI', please see the
'GSOD' 'readme.txt' file available from,
<https://www1.ncdc.noaa.gov/pub/data/gsod/readme.txt>."""
homepage = "https://docs.ropensci.org/GSODR/"
url = "https://cloud.r-project.org/src/contrib/GSODR_2.1.1.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/GSODR"
version('2.1.2', sha256='4fc1d084b6c21055d8cc17a6a6dc412261aa0d4ef4079bcd73b580a8c16bf74e')
version('2.1.1', sha256='dba732e5bd1e367b9d710e6b8924f0c02fa4546202f049124dba02bc2e3329f5')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('r-countrycode', type=('build', 'run'))
depends_on('r-curl', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('r-future-apply', type=('build', 'run'))
depends_on('r-httr', type=('build', 'run'))
depends_on('r-r-utils', type=('build', 'run')) | Units are converted from from United States Customary System ('USCS') units
to International System of Units ('SI'). Stations may be individually |
utils.rs | use erdos::Configuration;
/// Returns a unique port for each test to avoid race conditions.
fn get_unique_port() -> usize {
use std::sync::atomic::{AtomicUsize, Ordering};
static PORT: AtomicUsize = AtomicUsize::new(9000);
PORT.fetch_add(1, Ordering::SeqCst)
}
pub fn make_default_config() -> Configuration | {
let data_addresses = vec![format!("127.0.0.1:{}", get_unique_port())
.parse()
.expect("Unable to parse socket address")];
let control_addresses = vec![format!("127.0.0.1:{}", get_unique_port())
.parse()
.expect("Unable to parse socket address")];
Configuration::new(0, data_addresses, control_addresses, 4, None)
} |
|
context.rs | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use crate::{ProofOptions, TraceInfo};
use math::{log2, StarkField};
use utils::{
collections::Vec, string::ToString, ByteReader, ByteWriter, Deserializable,
DeserializationError, Serializable,
};
// PROOF CONTEXT
// ================================================================================================
/// Basic metadata about a specific execution of a computation.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Context {
trace_width: u8,
trace_length: u8, // stored as power of two
trace_meta: Vec<u8>,
field_modulus_bytes: Vec<u8>,
options: ProofOptions,
}
impl Context {
// CONSTRUCTOR
// --------------------------------------------------------------------------------------------
/// Creates a new context for a computation described by the specified field, trace info, and
/// proof options.
pub fn new<B: StarkField>(trace_info: &TraceInfo, options: ProofOptions) -> Self {
Context {
trace_width: trace_info.width() as u8,
trace_length: log2(trace_info.length()) as u8,
trace_meta: trace_info.meta().to_vec(),
field_modulus_bytes: B::get_modulus_le_bytes(),
options,
}
}
// PUBLIC ACCESSORS
// --------------------------------------------------------------------------------------------
/// Returns execution trace length of the computation described by this context.
pub fn trace_length(&self) -> usize {
2_usize.pow(self.trace_length as u32)
}
/// Returns execution trace width of the computation described by this context.
pub fn trace_width(&self) -> usize {
self.trace_width as usize
}
/// Returns execution trace info for the computation described by this context.
pub fn get_trace_info(&self) -> TraceInfo {
TraceInfo::with_meta(
self.trace_width(),
self.trace_length(),
self.trace_meta.clone(),
)
}
/// Returns the size of the LDE domain for the computation described by this context.
pub fn lde_domain_size(&self) -> usize {
self.trace_length() * self.options.blowup_factor()
}
/// Returns modulus of the field for the computation described by this context.
pub fn field_modulus_bytes(&self) -> &[u8] {
&self.field_modulus_bytes
}
/// Returns number of bits in the base field modulus for the computation described by this
/// context.
///
/// The modulus is assumed to be encoded in little-endian byte order.
pub fn num_modulus_bits(&self) -> u32 |
/// Returns proof options which were used to a proof in this context.
pub fn options(&self) -> &ProofOptions {
&self.options
}
}
impl Serializable for Context {
/// Serializes `self` and writes the resulting bytes into the `target`.
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write_u8(self.trace_width);
target.write_u8(self.trace_length);
target.write_u16(self.trace_meta.len() as u16);
target.write_u8_slice(&self.trace_meta);
assert!(self.field_modulus_bytes.len() < u8::MAX as usize);
target.write_u8(self.field_modulus_bytes.len() as u8);
target.write_u8_slice(&self.field_modulus_bytes);
self.options.write_into(target);
}
}
impl Deserializable for Context {
/// Reads proof context from the specified `source` and returns the result.
///
/// # Errors
/// Returns an error of a valid Context struct could not be read from the specified `source`.
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
// read and validate trace width
let trace_width = source.read_u8()?;
if trace_width == 0 {
return Err(DeserializationError::InvalidValue(
"trace width must be greater than zero".to_string(),
));
}
if trace_width as usize >= TraceInfo::MAX_TRACE_WIDTH {
return Err(DeserializationError::InvalidValue(format!(
"Trace width cannot be greater than {}, but had {}",
TraceInfo::MAX_TRACE_WIDTH,
trace_width
)));
}
// read and validate trace length
let trace_length = source.read_u8()?;
if 2_usize.pow(trace_length as u32) < TraceInfo::MIN_TRACE_LENGTH {
return Err(DeserializationError::InvalidValue(format!(
"Trace length cannot be smaller than {}, but had {}",
TraceInfo::MIN_TRACE_LENGTH,
2_usize.pow(trace_length as u32)
)));
}
// read trace metadata
let num_meta_bytes = source.read_u16()? as usize;
let trace_meta = if num_meta_bytes != 0 {
source.read_u8_vec(num_meta_bytes)?
} else {
vec![]
};
// read and validate field modulus bytes
let num_modulus_bytes = source.read_u8()? as usize;
if num_modulus_bytes == 0 {
return Err(DeserializationError::InvalidValue(
"field modulus cannot be an empty value".to_string(),
));
}
let field_modulus_bytes = source.read_u8_vec(num_modulus_bytes)?;
// read options
let options = ProofOptions::read_from(source)?;
Ok(Context {
trace_width,
trace_length,
trace_meta,
field_modulus_bytes,
options,
})
}
}
| {
let mut num_bits = self.field_modulus_bytes.len() as u32 * 8;
for &byte in self.field_modulus_bytes.iter().rev() {
if byte != 0 {
num_bits -= byte.leading_zeros();
return num_bits;
}
num_bits -= 8;
}
0
} |
_brz_win.py | # Copied from https://github.com/downloadam/client/blob/master/client/registry/win.py
import logging
import sys
import os
from contextlib import contextmanager
import subprocess
import _winreg as winreg
from _winreg import HKEY_CLASSES_ROOT, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER, \
KEY_QUERY_VALUE, REG_SZ, KEY_ALL_ACCESS, KEY_WRITE, KEY_CREATE_SUB_KEY, KEY_SET_VALUE
log = logging.getLogger(__name__)
@contextmanager
def open_key(hkey, *args):
key = winreg.OpenKeyEx(hkey, *args)
yield key
winreg.CloseKey(key)
@contextmanager
def create_key(hkey, subkey):
key = winreg.CreateKey(hkey, subkey)
yield key
winreg.CloseKey(key)
def read_reg_key(hkey, subkey, name=""):
try:
with open_key(hkey, subkey, 0, KEY_QUERY_VALUE) as k:
return winreg.QueryValueEx(k, name)
except WindowsError as e:
errno, message = e.args
if errno != 2:
raise e
return (None, None)
def write_reg_key(hkey, subkey, name, value):
try:
with open_key(hkey, subkey, 0, KEY_ALL_ACCESS) as k:
winreg.SetValueEx(k, name, 0, value[0], value[1])
return True
except WindowsError as e:
errno, message = e.args
if errno != 2:
raise e
return False
def enum_reg_keys(hkey, subkey):
with open_key(hkey, subkey) as k:
i = 0
while True:
try:
name = winreg.EnumKey(k, i)
except:
break
yield name
i += 1
def _parse_browser_path(path):
try:
if path.startswith('"'):
path = path[1:].split('"', 1)[0]
return path
except:
return None
def | ():
result = _parse_browser_path(read_reg_key(HKEY_CURRENT_USER, 'Software\\Classes\\http\\shell\\open\\command')[0])
if result is None:
result = _parse_browser_path(read_reg_key(HKEY_CLASSES_ROOT, 'http\\shell\\open\\command')[0])
return result
def get_browser_path(key):
result = _parse_browser_path(read_reg_key(HKEY_CURRENT_USER, 'Software\\Clients\\StartMenuInternet\\{}\\shell\\open\\command'.format(key))[0])
if result is None:
result = _parse_browser_path(read_reg_key(HKEY_LOCAL_MACHINE, 'Software\\Clients\\StartMenuInternet\\{}\\shell\\open\\command'.format(key))[0])
return result
def iterate_browsers(default=None):
if default is None:
default = get_default_browser() or ''
default = default.lower()
ignore = set()
for hkey in (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE):
try:
enum = list(enum_reg_keys(hkey, 'Software\\Clients\\StartMenuInternet'))
except WindowsError:
# key not exists or something?
continue
for key in enum:
if key in ignore:
continue
ignore.add(key)
path = get_browser_path(key)
if not path:
continue
if not os.path.exists(path):
continue
if key == 'IEXPLORE.EXE':
try:
version = int(read_reg_key(hkey, 'Software\\Microsoft\\Internet Explorer', 'Version')[0].split('.', 1)[0])
except AttributeError: # this maybe happens, don't know why. assume IE is outdated
version = 0
if version < 9:
outdated = True
else:
outdated = False
elif key == 'OperaStable':
outdated = True
else:
outdated = False
yield key.lower(), path, path.lower() == default, outdated
old_ie_settings = {}
def resume_ie_settings():
global old_ie_settings
key = HKEY_CURRENT_USER
subkey = 'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings'
for (name, value) in old_ie_settings.items():
write_reg_key(key, subkey, name, value)
def launch_ie(executable, url, rootdir, proxy_type, proxy_ip, proxy_port):
global old_ie_settings
key = HKEY_CURRENT_USER
subkey = 'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings'
new_values = {
'ProxyEnable' : (4, 1),
'ProxyOverride' : (1, u'*.local;<local>'),
'ProxyServer' : (1, u'%s:%d' % (proxy_ip, proxy_port)),
}
for (name, _) in new_values.items():
(reg_value, reg_type) = read_reg_key(key, subkey, name)
if reg_value is not None:
old_ie_settings[name] = (reg_type, reg_value)
write_reg_key(key, subkey, name, new_values[name])
cmdline = [
executable,
url,
]
cmdline = [s.encode(sys.getfilesystemencoding()) for s in cmdline]
return subprocess.Popen(cmdline)
def launch_ie_tab(executable, url, rootdir):
cmdline = [
executable,
url,
]
cmdline = [s.encode(sys.getfilesystemencoding()) for s in cmdline]
return subprocess.Popen(cmdline)
| get_default_browser |
continuous_mountaincar.py | """
Our modification of the OpenAI Gym Continuous Mountain Car by Olivier Sigaud:
https://github.com/openai/gym/blob/master/gym/envs/classic_control/continuous_mountain_car.py
which was (ultimately) based on Sutton's implementation:
http://incompleteideas.net/sutton/MountainCar/MountainCar1.cp
"""
from pilco.errors import EnvironmentError
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
class MountainCar(gym.Env):
metadata = {'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': 30}
def __init__(self):
# State and action bounds
|
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def step(self, action):
# Check if action is in permissible space
if not self.action_space.contains(action):
raise EnvironmentError(f'Expected action in the range of [-1., 1.] '
f'got action {action}.')
# Unpack positiion and valocity
position, velocity = self.state
# Increment position by velocity
position_ = position + velocity
# Increment velocity by Euler rule and clip
velocity_ = velocity + action * self.power - 0.0025 * np.cos(3 * position)
velocity_ = np.clip(velocity_, - self.max_speed, self.max_speed)
self.state = np.array([position_, velocity_])
return self.state, None, False, {}
def reset(self):
self.state = np.array([-0.5, 0.])
return np.array(self.state)
def _height(self, xs):
return 0.55 + 0.45 * np.sin(3 * xs)
def render(self, mode='human'):
# Set picture size
screen_width = 600
screen_height = 400
world_width = self.max_position - self.min_position
scale = screen_width/world_width
# Set car size
carwidth = 40
carheight = 20
if self.viewer is None:
from gym.envs.classic_control import rendering
# Car constants
clearance = 10
# Overall viewer
self.viewer = rendering.Viewer(screen_width, screen_height)
# Track on which the car moves
xs = np.linspace(self.min_position, self.max_position, 200)
ys = self._height(xs)
xys = list(zip((xs - self.min_position) * scale, ys * scale))
# Add car
self.track = rendering.make_polyline(xys)
self.track.set_linewidth(4)
self.viewer.add_geom(self.track)
self.cartrans = rendering.Transform()
# Car chasis
l, r, t, b = -carwidth / 2, carwidth / 2, carheight, 0
car = rendering.FilledPolygon([(l, b), (l, t), (r, t), (r, b)])
car.add_attr(rendering.Transform(translation=(0, clearance)))
car.add_attr(self.cartrans)
self.viewer.add_geom(car)
# Front wheel
frontwheel = rendering.make_circle(carheight / 2.5)
frontwheel.set_color(.5, .5, .5)
frontwheel.add_attr(rendering.Transform(translation=(carwidth / 4, clearance)))
frontwheel.add_attr(self.cartrans)
self.viewer.add_geom(frontwheel)
# Back wheel
backwheel = rendering.make_circle(carheight / 2.5)
backwheel.add_attr(rendering.Transform(translation=(-carwidth / 4, clearance)))
backwheel.add_attr(self.cartrans)
backwheel.set_color(.5, .5, .5)
self.viewer.add_geom(backwheel)
# Flagpole on mountain peak
flagx = scale * (0.5 - self.min_position)
flagy1 = scale * self._height(self.goal_position)
flagy2 = flagy1 + 50
flagpole = rendering.Line((flagx, flagy1),
(flagx, flagy2))
self.viewer.add_geom(flagpole)
# Flag on flagpole
flag = rendering.FilledPolygon([(flagx, flagy2),
(flagx, flagy2 - 10),
(flagx + 25, flagy2 - 5)])
flag.set_color(.8, .8, 0)
self.viewer.add_geom(flag)
# Translate and rotate car
self.cartrans.set_translation(scale * (self.state[0] - self.min_position),
scale * self._height(self.state[0]))
self.cartrans.set_rotation(np.cos(3 * self.state[0]))
return self.viewer.render(return_rgb_array=mode=='rgb_array')
def close(self):
if self.viewer:
self.viewer.close()
self.viewer = None
| self.min_action = -1.0
self.max_action = 1.0
self.min_position = - 3.0
self.max_position = 3.0
self.max_speed = 0.07
self.goal_position = 0.5
# Force per mass the car can output
self.power = 0.0015
self.low_state = np.array([self.min_position, -self.max_speed],
dtype=np.float32)
self.high_state = np.array([self.max_position, self.max_speed],
dtype=np.float32)
self.viewer = None
# Allowed action space
self.action_space = spaces.Box(low=self.min_action,
high=self.max_action,
shape=(1,),
dtype=np.float32)
self.seed()
# Temporary hack to work with rest of library
self.env = self |
query_NTNENA.py | from exceptions.exceptions import NGSIUsageError
from utils.jsondict import lookup_string_match
from flask import request
from reporter.reporter import _validate_query_params
from translators.crate import CrateTranslatorInstance
import logging
from .geo_query_handler import handle_geo_query
def | (id_=None, # In Query
attrs=None,
type_=None,
aggr_method=None,
aggr_period=None,
aggr_scope=None,
options=None,
from_date=None,
to_date=None,
last_n=None,
limit=10000,
offset=0,
georel=None,
geometry=None,
coords=None):
"""
See /v2/attrs in API Specification
quantumleap.yml
"""
r, c = _validate_query_params(attrs, aggr_period, aggr_method, aggr_scope,
options)
if c != 200:
return r, c
r, c, geo_query = handle_geo_query(georel, geometry, coords)
if r:
return r, c
if attrs is not None:
attrs = attrs.split(',')
fiware_s = request.headers.get('fiware-service', None)
fiware_sp = request.headers.get('fiware-servicepath', None)
entities = None
entity_ids = None
if id_:
entity_ids = [s.strip() for s in id_.split(',') if s]
try:
with CrateTranslatorInstance() as trans:
entities = trans.query(attr_names=attrs,
entity_type=type_,
entity_ids=entity_ids,
aggr_method=aggr_method,
aggr_period=aggr_period,
aggr_scope=aggr_scope,
from_date=from_date,
to_date=to_date,
last_n=last_n,
limit=limit,
offset=offset,
fiware_service=fiware_s,
fiware_servicepath=fiware_sp,
geo_query=geo_query)
except NGSIUsageError as e:
msg = "Bad Request Error: {}".format(e)
logging.getLogger().error(msg, exc_info=True)
return msg, 400
except Exception as e:
msg = "Something went wrong with QL. Error: {}".format(e)
logging.getLogger().error(msg, exc_info=True)
return msg, 500
attributes = []
entries = []
attrs_names = []
attrs_values = []
ignore = ('id', 'index', 'type')
if entities:
for e in entities:
attrs = [at for at in sorted(e.keys()) if at not in ignore]
for at in attrs:
if at not in attrs_names:
attrs_names.append(at)
for at in attrs_names:
entity_type = []
entity_types = []
entity_value = []
for e in entities:
matched_attr = lookup_string_match(e, at)
if matched_attr is not None:
index = [from_date or '', to_date or ''] if aggr_method and not aggr_period else e['index']
entity = {
'entityId': e['id'],
'index': index,
'values': matched_attr['values'] if matched_attr else [],
}
if e['type'] not in entity_types:
entity_value = []
entity_value.append(entity)
entity_ty = {
'entityType': e['type'],
'entities': entity_value
}
entity_type.append(entity_ty)
entity_types.append(e['type'])
else:
entity_value.append(entity)
entity_type.pop()
entity_ty = {
'entityType': e['type'],
'entities': entity_value
}
entity_type.append(entity_ty)
attrs_value = {
'attrName': at,
'types': entity_type
}
attrs_values.append(attrs_value)
res = {
'attrs': attrs_values
}
return res
r = {
"error": "Not Found",
"description": "No records were found for such query."
}
return r, 404
def query_NTNENA_value(*args, **kwargs):
res = query_NTNENA(*args, **kwargs)
if isinstance(res, dict):
res['values'] = res['attrs']
res.pop('attrs', None)
return res
| query_NTNENA |
paging.ts | import { describe } from "mocha";
const Paging = () => {
describe("Feature: Paging", () => {
// TODO: implement paging test
});
};
export default Paging; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.