max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
423 | import yaml
import csv
import torch
from collections import defaultdict
def get_ops_for_key(key):
# Needs modified PyTorch C++ code to work
if key is None:
ops = torch._C._dispatch_get_registrations_for_dispatch_key()
else:
ops = torch._C._dispatch_get_registrations_for_dispatch_key(key)
cleaned_ops = []
for i in ops:
if 'aten::' not in i:
continue
cleaned_ops.append(i[6:].strip())
return set(cleaned_ops)
def gen_data(special_op_lists, analysis_name):
all_ops = get_ops_for_key(None)
composite_ops = get_ops_for_key('CompositeImplicitAutograd')
noncomposite_ops = all_ops - composite_ops
ops = yaml.load(open('../../pytorch/aten/src/ATen/native/native_functions.yaml', 'r').read(), Loader=yaml.CLoader)
annotated_ops = {a.strip(): b.strip() for a, b in list(csv.reader(open('annotated_ops')))}
from collections import defaultdict
uniq_ops = []
uniq_names = set()
overload_types = defaultdict(list)
cnt = 0
for op in ops:
func_str = op['func']
name = func_str[:func_str.index('(')]
if '.' in name:
uniq_name = name[:name.index('.')]
overload_types[name[name.index('.') + 1:]].append(name)
else:
uniq_name = name
op['name'] = uniq_name
full_name = func_str[:func_str.index('(')]
op['full_name'] = full_name
ret_type = func_str[func_str.index('->') + 3:]
op['ret_type'] = ret_type
cnt += 1
if uniq_name in uniq_names:
continue
uniq_names.add(uniq_name)
uniq_ops.append(op)
def annotate_ops(ops, is_unique):
categorization = defaultdict(int)
for op in ops:
if op['name'][-1] == '_':
categorization['inplace'] += 1
op['meta'] = 'inplace'
continue
if not is_unique and 'a!' in op['func'].lower():
categorization['out'] += 1
op['meta'] = 'out'
continue
if 'conv' in op['name']:
categorization['conv'] += 1
op['meta'] = 'conv'
continue
if 'pool' in op['name']:
categorization['pool'] += 1
op['meta'] = 'pool'
continue
if 'backward' in op['name']:
categorization['backward'] += 1
op['meta'] = 'backward'
continue
if op['name'][0] == '_' and op['name'][1] != '_':
categorization['private'] += 1
op['meta'] = 'private'
continue
if 'batch_norm' in op['name']:
categorization['batch_norm'] += 1
op['meta'] = 'batch_norm'
continue
if 'Tensor' not in op['func'] or 'Tensor' not in op['ret_type']:
categorization['non_tensor'] += 1
op['meta'] = 'non_tensor'
continue
if 'cudnn' in op['name'] or 'mkldnn' in op['name'] or 'miopen' in op['name'] or \
'native' in op['name'] or 'thnn' in op['name'] or 'slow' in op['name']:
categorization['backend'] += 1
op['meta'] = 'backend'
continue
if op['name'] in annotated_ops:
categorization['core'] += 1
op['meta'] = 'core ' + annotated_ops[op['name']]
continue
categorization['core'] += 1
op['meta'] = 'core unknown'
return categorization
annotate_ops(ops, is_unique=False)
with open(f"{analysis_name}", 'w') as f:
for op in ops:
info = [
op['full_name'], op['meta'], not (op['full_name'] in noncomposite_ops)
] + [check(op) for check in special_op_lists]
f.write(','.join([str(i) for i in info]) + '\n')
def name_check(lst):
return lambda x: x['name'] in lst
def full_name_check(lst):
return lambda x: x['full_name'] in lst
# Generates batching rule data
gen_data([full_name_check(get_ops_for_key('FuncTorchBatched'))], 'vmap.txt')
def remove_suffix(input_string, suffix):
if suffix and input_string.endswith(suffix):
return input_string[:-len(suffix)]
return input_string
def remove_prefix(input_string, prefix):
if prefix and input_string.startswith(prefix):
return input_string[len(prefix):]
return input_string
if True:
with open('run_ops.txt', 'r') as f:
opinfo_ops = [remove_suffix(i.strip(), '.default') for i in f.readlines()]
with open('count_ops.txt', 'r') as f:
opinfo_counts = [i.strip() for i in f.readlines()]
opinfo_counts = defaultdict(int, {k: v for k, v in zip(opinfo_ops, opinfo_counts)})
def count_fn(x):
return opinfo_counts[x['full_name']]
with open('run_decompositions.txt', 'r') as f:
decomposed_ops = [remove_suffix(i.strip(), '.default') for i in f.readlines()]
with open('public_api', 'r') as f:
ref_api = [i.strip() for i in f.readlines()]
def has_ref_impl(x):
name = x['name']
for prefix in ["linalg_", "special_"]:
name = remove_prefix(name, prefix)
prefixes = ['nn.functional', 'fft', 'special', 'linalg']
return any(f"{prefix}.{name}" in ref_api for prefix in prefixes) or name in ref_api
gen_data([full_name_check(opinfo_ops), full_name_check(decomposed_ops), count_fn, has_ref_impl], 'decompositions.txt')
| 2,701 |
923 | import torch
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
from nets.se_module import SELayer
import os
from pdb import set_trace as br
__all__ = ['ResNet18s_Decode', 'resnet12_decode', 'resnet14_decode', 'resnet18_decode', 'resnet50_decode']
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class DShortCut(nn.Module):
def __init__(self, cin, cout, has_avg, has_BN, affine=True):
super(DShortCut, self).__init__()
self.conv = nn.Conv2d(cin, cout, kernel_size=1, stride=1, padding=0, bias=False)
if has_avg:
self.avg = nn.AvgPool2d(kernel_size=2, stride=2, padding=0)
else:
self.avg = None
if has_BN:
self.bn = nn.BatchNorm2d(cout, affine=affine)
else:
self.bn = None
def forward(self, x):
if self.avg:
out = self.avg(x)
else:
out = x
out = self.conv(out)
if self.bn:
out = self.bn(out)
return out
class BasicBlock(nn.Module):
def __init__(self, inplanes, cfg, stride=1, se=False, se_reduction=-1):
super(BasicBlock, self).__init__()
assert len(cfg) == 2, 'wrong cfg length!'
mid_planes, planes = cfg[0], cfg[1]
self.conv1 = conv3x3(inplanes, mid_planes, stride)
self.bn1 = nn.BatchNorm2d(mid_planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(mid_planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.relu2 = nn.ReLU(inplace=True) # added by me
self.stride = stride
if stride == 2:
self.shortcut = DShortCut(inplanes, planes, has_avg=True, has_BN=False)
elif inplanes != planes:
self.shortcut = DShortCut(inplanes, planes, has_avg=False, has_BN=True)
else:
self.shortcut = nn.Sequential()
self.se = se
self.se_reduction = se_reduction
if self.se:
assert se_reduction > 0, "Must specify se reduction > 0"
self.se_module = SELayer(planes, se_reduction)
def forward(self, x):
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.se:
out = self.se_module(out)
out += self.shortcut(x)
out = self.relu2(out)
return out
class Bottleneck(nn.Module):
def __init__(self, inplanes, cfg, stride=1, se=False, se_reduction=-1):
# NOTE: while no expansion=4 here, make sure it is multiplied in cfgs
super(Bottleneck, self).__init__()
assert len(cfg) == 3, 'wrong cfg length'
assert cfg[0] == cfg[1], 'dw channels are not equal'
mid_planes, planes = cfg[0], cfg[-1]
self.conv1 = conv1x1(inplanes, mid_planes)
self.bn1 = nn.BatchNorm2d(mid_planes)
self.conv2 = conv3x3(mid_planes, mid_planes, stride)
self.bn2 = nn.BatchNorm2d(mid_planes)
self.conv3 = conv1x1(mid_planes, planes)
self.bn3 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.stride = stride
if stride == 2:
self.shortcut = DShortCut(inplanes, planes, has_avg=True, has_BN=False)
elif inplanes != planes:
self.shortcut = DShortCut(inplanes, planes, has_avg=False, has_BN=True)
else:
self.shortcut = nn.Sequential()
self.se = se
self.se_reduction = se_reduction
if self.se:
assert se_reduction > 0, "Must specify se reduction > 0"
self.se_module = SELayer(planes, se_reduction)
def forward(self, x):
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.se:
out = self.se_module(out)
out += self.shortcut(x)
out = self.relu(out)
return out
class ResNet18s_Decode(nn.Module):
def __init__(self, block, num_blocks, cfgs, num_classes=1000, se=False, se_reduction=-1, zero_init_residual=False):
super(ResNet18s_Decode, self).__init__()
self.cfgs = cfgs
self.num_blocks = num_blocks # [3,4,6,3]
self.block_layer_num = 2 if block == BasicBlock else 3
assert len(self.cfgs) == self.block_layer_num*sum(self.num_blocks) + 1, 'cfg length and num_blocks do not match'
self.se = se
self.se_reduction = se_reduction
self.conv1 = nn.Conv2d(3, cfgs[0], kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = nn.BatchNorm2d(cfgs[0])
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
count = 1
self.layer1 = self._make_layer(block, self.cfgs[count-1 : count+num_blocks[0]*self.block_layer_num-1], \
self.cfgs[count : count+num_blocks[0]*self.block_layer_num], stride=1)
count += num_blocks[0]*self.block_layer_num
self.layer2 = self._make_layer(block, self.cfgs[count-1 : count+num_blocks[1]*self.block_layer_num-1], \
self.cfgs[count : count+num_blocks[1]*self.block_layer_num], stride=2)
count += num_blocks[1]*self.block_layer_num
self.layer3 = self._make_layer(block, self.cfgs[count-1 : count+num_blocks[2]*self.block_layer_num-1], \
self.cfgs[count : count+num_blocks[2]*self.block_layer_num], stride=2)
count += num_blocks[2]*self.block_layer_num
self.layer4 = self._make_layer(block, self.cfgs[count-1 : count+num_blocks[3]*self.block_layer_num-1], \
self.cfgs[count : count+num_blocks[3]*self.block_layer_num], stride=2)
count += num_blocks[3]*self.block_layer_num
assert count == len(cfgs)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(self.cfgs[-1], num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, inplanes, cfgs, stride):
assert len(cfgs) % self.block_layer_num == 0, 'cfgs must be dividable by block_layer_num'
print('Out channels:', cfgs)
num_block = len(cfgs) // self.block_layer_num
strides = [stride] + [1]*(num_block-1)
layers = nn.ModuleList()
count = 0
for idx, stride in enumerate(strides):
layers.append(block(inplanes[count], cfgs[count:count+self.block_layer_num], stride, \
se=self.se, se_reduction=self.se_reduction))
count += self.block_layer_num
assert count == len(cfgs)
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
def resnet12_decode(cfg, num_classes, se=False, se_reduction=-1):
"""Constructs a ResNet-18 cfg configured model.
"""
return ResNet18s_Decode(BasicBlock, [1,1,1,2], cfg, num_classes=num_classes, se=se, se_reduction=se_reduction)
def resnet14_decode(cfg, num_classes, se=False, se_reduction=-1):
"""Constructs a ResNet-18 cfg configured model.
"""
return ResNet18s_Decode(BasicBlock, [1,1,2,2], cfg, num_classes=num_classes, se=se, se_reduction=se_reduction)
def resnet18_decode(cfg, num_classes, se=False, se_reduction=-1):
"""Constructs a ResNet-18 cfg configured model.
"""
return ResNet18s_Decode(BasicBlock, [2,2,2,2], cfg, num_classes=num_classes, se=se, se_reduction=se_reduction)
def resnet50_decode(cfg, num_classes, se=False, se_reduction=-1):
"""Constructs a ResNet-18 cfg configured model.
"""
return ResNet18s_Decode(Bottleneck, [3,4,6,3], cfg, num_classes=num_classes, se=se, se_reduction=se_reduction)
| 4,388 |
892 | <gh_stars>100-1000
{
"schema_version": "1.2.0",
"id": "GHSA-q82w-r89w-5v2j",
"modified": "2022-05-02T03:25:43Z",
"published": "2022-05-02T03:25:43Z",
"aliases": [
"CVE-2009-1491"
],
"details": "McAfee GroupShield for Microsoft Exchange on Exchange Server 2000, and possibly other anti-virus or anti-spam products from McAfee or other vendors, does not scan X- headers for malicious content, which allows remote attackers to bypass virus detection via a crafted message, as demonstrated by a message with an X-Testing header and no message body.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2009-1491"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/50354"
},
{
"type": "WEB",
"url": "http://www.nmrc.org/~thegnome/blog/apr09/"
}
],
"database_specific": {
"cwe_ids": [
"CWE-20"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 442 |
1,927 | /*
* Copyright (C) 2019 ByteDance Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bytedance.scene;
import android.app.Activity;
import android.os.Bundle;
import android.view.ViewGroup;
import androidx.annotation.IdRes;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RestrictTo;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
/**
* TODO merge SceneLifecycleDispatcher SceneLifecycleManager?
*
* @hide
*/
@RestrictTo(LIBRARY_GROUP)
public class SceneLifecycleDispatcher<T extends Scene & SceneParent> implements SceneContainerLifecycleCallback {
private static final String TAG = "SCENE";
private static final String TRACE_ACTIVITY_CREATED_TAG = "SceneLifecycleDispatcher#OnActivityCreated";
private static final String TRACE_START_TAG = "SceneLifecycleDispatcher#OnStart";
private static final String TRACE_RESUME_TAG = "SceneLifecycleDispatcher#OnResume";
private static final String TRACE_PAUSE_TAG = "SceneLifecycleDispatcher#OnPause";
private static final String TRACE_STOP_TAG = "SceneLifecycleDispatcher#OnStop";
private static final String TRACE_DESTROY_VIEW_TAG = "SceneLifecycleDispatcher#OnDestroyView";
private static final String TRACE_SAVE_INSTANCE_STATE_TAG = "SceneLifecycleDispatcher#OnSaveInstance";
@IdRes
private final int mSceneContainerViewId;
private final ViewFinder mViewFinder;
private final T mScene;
private final Scope.RootScopeFactory mRootScopeFactory;
private final boolean mSupportRestore;
private final SceneLifecycleManager<T> mLifecycleManager = new SceneLifecycleManager<>();
public SceneLifecycleDispatcher(@IdRes int sceneContainerViewId,
ViewFinder viewFinder,
T rootScene,
Scope.RootScopeFactory rootScopeFactory,
boolean supportRestore) {
this.mSceneContainerViewId = sceneContainerViewId;
this.mViewFinder = viewFinder;
this.mScene = rootScene;
this.mRootScopeFactory = rootScopeFactory;
this.mSupportRestore = supportRestore;
}
@Override
public void onActivityCreated(@NonNull Activity activity, @Nullable Bundle savedInstanceState) {
SceneTrace.beginSection(TRACE_ACTIVITY_CREATED_TAG);
ViewGroup viewGroup = this.mViewFinder.requireViewById(this.mSceneContainerViewId);
this.mLifecycleManager.onActivityCreated(activity, viewGroup, this.mScene, this.mRootScopeFactory,
this.mSupportRestore, this.mSupportRestore ? savedInstanceState : null);
SceneTrace.endSection();
}
@Override
public void onStarted() {
SceneTrace.beginSection(TRACE_START_TAG);
this.mLifecycleManager.onStart();
SceneTrace.endSection();
}
@Override
public void onResumed() {
SceneTrace.beginSection(TRACE_RESUME_TAG);
this.mLifecycleManager.onResume();
SceneTrace.endSection();
}
@Override
public void onPaused() {
SceneTrace.beginSection(TRACE_PAUSE_TAG);
this.mLifecycleManager.onPause();
SceneTrace.endSection();
}
@Override
public void onStopped() {
SceneTrace.beginSection(TRACE_STOP_TAG);
this.mLifecycleManager.onStop();
SceneTrace.endSection();
}
@Override
public void onViewDestroyed() {
SceneTrace.beginSection(TRACE_DESTROY_VIEW_TAG);
this.mLifecycleManager.onDestroyView();
SceneTrace.endSection();
}
@Override
public void onSaveInstanceState(@NonNull Bundle outState) {
if (this.mSupportRestore) {
outState.putString(TAG, this.mScene.getClass().getName());
SceneTrace.beginSection(TRACE_SAVE_INSTANCE_STATE_TAG);
this.mLifecycleManager.onSaveInstanceState(outState);
SceneTrace.endSection();
}
}
}
| 1,693 |
935 | #pragma once
#include <thor-internal/ring-buffer.hpp>
namespace thor {
extern bool wantKernelProfile;
void initializeProfile();
LogRingBuffer *getGlobalProfileRing();
} // namespace thor
| 61 |
313 | <gh_stars>100-1000
// Copyright (c) 2015, <NAME>, <NAME>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this
// list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this
// list of conditions and the following disclaimer in the documentation
// and/or
// other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
// AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
// THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "boundingmesh/VoxelSubset.h"
namespace boundingmesh {
VoxelSubset::VoxelSubset(std::shared_ptr<VoxelSet> voxels)
: voxels_(voxels), final_(false) {}
VoxelSubset::VoxelSubset(const VoxelSubset &subset, bool empty)
: voxels_(subset.voxels_),
splits_(subset.splits_),
convex_hull_(subset.convex_hull_),
final_(subset.final_) {
if (!empty)
indices_ = std::vector<Index>(subset.indices_);
else
indices_ = std::vector<Index>();
}
VoxelSubset::~VoxelSubset() {}
VoxelSubset &VoxelSubset::operator=(VoxelSubset other) {
swap(*this, other);
return *this;
}
void swap(VoxelSubset &first, VoxelSubset &second) {
first.voxels_.swap(second.voxels_);
first.indices_.swap(second.indices_);
first.splits_.swap(second.splits_);
std::swap(first.convex_hull_, second.convex_hull_);
std::swap(first.final_, second.final_);
}
void VoxelSubset::addVoxel(Index index) { indices_.push_back(index); }
void VoxelSubset::addSplit(AppliedSplit split) {
// Filter redundant splits:
// Each dimension can have at most 2 splits ....|->....<-|...
// AppliedSplit::mergeSplits determines which plane to keep/discard
bool add = true;
for (std::vector<AppliedSplit>::iterator it = splits_.begin();
it != splits_.end();) {
int result = AppliedSplit::mergeSplits(*it, split);
switch (result) {
case 0:
++it;
break;
case 1:
add = false;
++it;
break;
case 2:
it = splits_.erase(it);
break;
}
}
if (add) splits_.push_back(split);
}
std::vector<VoxelSubset> VoxelSubset::partition(Split split) {
// Partition voxels of a set according to a plane
std::vector<VoxelSubset> subsets;
subsets.push_back(VoxelSubset(*this, true));
subsets.push_back(VoxelSubset(*this, true));
AppliedSplit above(split, true);
AppliedSplit below(split, false);
subsets[0].addSplit(above);
subsets[1].addSplit(below);
for (int i = 0; i < indices_.size(); ++i) {
const Voxel &voxel = voxels_->voxel(indices_[i]);
if (split.test(voxel)) {
subsets[0].addVoxel(indices_[i]);
} else {
subsets[1].addVoxel(indices_[i]);
}
}
// std::cout<<"split "<<split.dimension<<" "<<split.index<<" Values:
// "<<subsets[0].evaluate()<<" "<<subsets[1].evaluate()<<std::endl;
subsets[0].calculateConvexHull();
subsets[1].calculateConvexHull();
return subsets;
}
Real VoxelSubset::convexVolume() const { return convex_hull_.volume; }
Real VoxelSubset::volume() const {
return indices_.size() * voxels_->voxelSize() * voxels_->voxelSize() *
voxels_->voxelSize();
}
std::vector<Index> const &VoxelSubset::getIndices() const { return indices_; }
std::shared_ptr<VoxelSet> const &VoxelSubset::getVoxels() const {
return voxels_;
}
template <typename T>
int sgn(T val) {
return (T(0) < val) - (val < T(0));
}
void VoxelSubset::calculateConvexHull() {
std::vector<Vector3> points;
// Generate additional points within the volume
// Required at intersection of 3 planes, within the volume
for (int i = 0; i < splits_.size(); ++i)
for (int j = i + 1; j < splits_.size(); ++j)
for (int k = j + 1; k < splits_.size(); ++k) {
if (splits_[i].split.dimension != splits_[j].split.dimension &&
splits_[i].split.dimension != splits_[k].split.dimension &&
splits_[j].split.dimension != splits_[k].split.dimension) {
Vector3 voxel_pos;
voxel_pos(splits_[i].split.dimension) = splits_[i].split.index;
voxel_pos(splits_[j].split.dimension) = splits_[j].split.index;
voxel_pos(splits_[k].split.dimension) = splits_[k].split.index;
if (voxel_pos(0) < 0 || voxel_pos(0) > voxels_->resolution(0) ||
voxel_pos(1) < 0 || voxel_pos(1) > voxels_->resolution(1) ||
voxel_pos(2) < 0 || voxel_pos(2) > voxels_->resolution(2)) {
continue;
}
bool neighbour_filled =
voxels_->voxelAt(voxel_pos(0), voxel_pos(1), voxel_pos(2)) >= 0;
if (voxel_pos(2) + 1 < voxels_->resolution(2))
neighbour_filled = neighbour_filled ||
voxels_->voxelAt(voxel_pos(0), voxel_pos(1),
voxel_pos(2) + 1) >= 0;
if (voxel_pos(1) + 1 < voxels_->resolution(1))
neighbour_filled = neighbour_filled ||
voxels_->voxelAt(voxel_pos(0), voxel_pos(1) + 1,
voxel_pos(2)) >= 0;
if (voxel_pos(1) + 1 < voxels_->resolution(1) &&
voxel_pos(2) + 1 < voxels_->resolution(2))
neighbour_filled = neighbour_filled ||
voxels_->voxelAt(voxel_pos(0), voxel_pos(1) + 1,
voxel_pos(2) + 1) >= 0;
if (voxel_pos(0) + 1 < voxels_->resolution(0))
neighbour_filled = neighbour_filled ||
voxels_->voxelAt(voxel_pos(0) + 1, voxel_pos(1),
voxel_pos(2)) >= 0;
if (voxel_pos(0) + 1 < voxels_->resolution(0) &&
voxel_pos(2) + 1 < voxels_->resolution(2))
neighbour_filled = neighbour_filled ||
voxels_->voxelAt(voxel_pos(0) + 1, voxel_pos(1),
voxel_pos(2) + 1) >= 0;
if (voxel_pos(0) + 1 < voxels_->resolution(0) &&
voxel_pos(1) + 1 < voxels_->resolution(1))
neighbour_filled =
neighbour_filled ||
voxels_->voxelAt(voxel_pos(0) + 1, voxel_pos(1) + 1,
voxel_pos(2)) >= 0;
if (voxel_pos(0) + 1 < voxels_->resolution(0) &&
voxel_pos(1) + 1 < voxels_->resolution(1) &&
voxel_pos(2) + 1 < voxels_->resolution(2))
neighbour_filled =
neighbour_filled ||
voxels_->voxelAt(voxel_pos(0) + 1, voxel_pos(1) + 1,
voxel_pos(2) + 1) >= 0;
// Consider interior if at least one neighbouring voxel is filled
if (!neighbour_filled) {
continue;
}
Vector3 offset;
offset(splits_[i].split.dimension) =
(splits_[i].split.index + 0.5) * voxels_->voxelSize();
offset(splits_[j].split.dimension) =
(splits_[j].split.index + 0.5) * voxels_->voxelSize();
offset(splits_[k].split.dimension) =
(splits_[k].split.index + 0.5) * voxels_->voxelSize();
Vector3 new_point = voxels_->origin() + offset;
points.push_back(new_point);
}
}
std::set<Index> vertices;
Real epsilon = 0.00001;
// Collect points to generate convex hull
// Vertices of triangles on the surface
// Clip triangles against the split planes to remove overlap
// Points generated by clipping are directly added, vertices are added
// collectively by index to remove repetition
for (unsigned int i = 0; i < indices_.size(); ++i) {
const Voxel &voxel = voxels_->voxel(indices_[i]);
for (unsigned int j = 0; j < voxel.nTriangles(); ++j) {
const Triangle &triangle = voxels_->mesh()->triangle(voxel.triangle(j));
std::vector<Vector3> clipped_points;
std::vector<int> vertex_counter;
for (unsigned int k = 0; k < 3; ++k) {
clipped_points.push_back(
voxels_->mesh()->vertex(triangle.vertex(k)).position());
vertex_counter.push_back(triangle.vertex(k));
}
for (unsigned int k = 0; k < splits_.size(); ++k) {
Plane plane = splits_[k].getPlane(*voxels_);
for (unsigned int l = 0; l < clipped_points.size(); ++l) {
Vector3 current = clipped_points[l];
Real distance = plane.distance(current);
if (distance < -epsilon) {
Vector3 direction_forward =
clipped_points[(l + 1) % clipped_points.size()] - current;
Vector3 direction_backward =
clipped_points[(l + clipped_points.size() - 1) %
clipped_points.size()] -
current;
clipped_points.erase(clipped_points.begin() + l);
vertex_counter.erase(vertex_counter.begin() + l);
int inserted = 0;
if (direction_backward.dot(plane.normal) > epsilon) {
Real lambda_backward = (-current.dot(plane.normal) - plane.d) /
direction_backward.dot(plane.normal);
if (lambda_backward < 1 + epsilon && lambda_backward > 0) {
Vector3 backward_pos =
current + lambda_backward * direction_backward;
clipped_points.insert(clipped_points.begin() + l, backward_pos);
vertex_counter.insert(vertex_counter.begin() + l, -1);
inserted++;
}
}
if (direction_forward.dot(plane.normal) > epsilon) {
Real lambda_forward = (-current.dot(plane.normal) - plane.d) /
direction_forward.dot(plane.normal);
if (lambda_forward < 1 + epsilon && lambda_forward > 0) {
Vector3 forward_pos =
current + lambda_forward * direction_forward;
clipped_points.insert(clipped_points.begin() + l + inserted,
forward_pos);
vertex_counter.insert(vertex_counter.begin() + l + inserted,
-1);
inserted++;
}
}
l += inserted - 1;
}
}
}
for (unsigned int k = 0; k < vertex_counter.size(); ++k) {
if (vertex_counter[k] < 0) {
points.push_back(clipped_points[k]);
} else {
vertices.insert(vertex_counter[k]);
}
}
}
}
for (std::set<Index>::iterator it = vertices.begin(); it != vertices.end();
++it) {
points.push_back(voxels_->mesh()->vertex(*it).position());
}
if (points.size() > 3)
convex_hull_ = Convex(points);
else
convex_hull_ = Convex();
// Add split planes to hull for debugging
#if 0
for(unsigned int k = 0; k < splits_.size(); ++k)
{
Vector3 normal = Vector3::Zero();
normal(splits_[k].split.dimension) = 1;
Vector3 A = voxels_->origin() + ( ((Real)splits_[k].split.index+0.5) * voxels_->voxelSize()) * normal;
Vector3 direction1 = Vector3::Zero();
direction1((splits_[k].split.dimension+1)%3) = voxels_->voxelSize()*voxels_->resolution((splits_[k].split.dimension+1)%3);
Vector3 direction2 = Vector3::Zero();
direction2((splits_[k].split.dimension+2)%3) = voxels_->voxelSize()*voxels_->resolution((splits_[k].split.dimension+2)%3);
Index iA = result->addVertex(A);
Index iB = result->addVertex(A+direction1);
Index iC = result->addVertex(A+direction2);
Index iD = result->addVertex(A+direction1+direction2);
if(splits_[k].direction)
{
result->addTriangle(iA, iB, iD);
result->addTriangle(iA, iD, iC);
}
else
{
result->addTriangle(iA, iD, iB);
result->addTriangle(iA, iC, iD);
}
}
#endif
}
std::shared_ptr<Mesh> VoxelSubset::getConvexHull() {
if (!convex_hull_.mesh) {
return std::make_shared<Mesh>();
} else {
return convex_hull_.mesh;
}
}
bool VoxelSubset::isFinal() { return final_; }
void VoxelSubset::setFinal() { final_ = true; }
} | 6,164 |
1,334 | package org.mockserver.mock.action.http;
import org.mockserver.model.HttpResponse;
/**
* @author jamesdbloom
*/
public class HttpResponseActionHandler {
public HttpResponse handle(HttpResponse httpResponse) {
return httpResponse.clone();
}
}
| 89 |
190,993 | /* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_EXPERIMENTAL_ACCELERATION_MINI_BENCHMARK_FB_STORAGE_H_
#define TENSORFLOW_LITE_EXPERIMENTAL_ACCELERATION_MINI_BENCHMARK_FB_STORAGE_H_
#include <errno.h>
#include <cstring>
#include <string>
#include <vector>
#include "absl/base/attributes.h"
#include "absl/strings/string_view.h"
#include "flatbuffers/base.h" // from @flatbuffers
#include "flatbuffers/flatbuffers.h" // from @flatbuffers
#include "tensorflow/lite/c/c_api_types.h"
#include "tensorflow/lite/c/common.h"
#include "tensorflow/lite/core/api/error_reporter.h"
#include "tensorflow/lite/experimental/acceleration/mini_benchmark/status_codes.h"
#include "tensorflow/lite/stderr_reporter.h"
namespace tflite {
namespace acceleration {
// FileStorage wraps storage of data in a file with locking and error handling.
// Locking makes appends and reads atomic, using flock(2).
//
// The locking in this class is not meant for general purpose multiple
// reader/writer support, but primarily for the case where a previous instance
// of a program has not finished and we'd like to not corrupt the file
// unnecessarily.
class FileStorage {
public:
FileStorage(absl::string_view path, ErrorReporter* error_reporter);
// Read contents into buffer_. Returns an error if file exists but cannot be
// read.
MinibenchmarkStatus ReadFileIntoBuffer();
// Append data to file. Resets the in-memory items and returns an error if
// writing fails in any way.
//
// This calls fsync() on the file to guarantee persistence and is hence quite
// expensive. The assumption is that this is not done often or in a critical
// path.
MinibenchmarkStatus AppendDataToFile(absl::string_view data);
protected:
std::string path_;
ErrorReporter* error_reporter_;
std::string buffer_;
};
// FlatbufferStorage stores several flatbuffer objects in a file. The primary
// usage is for storing mini benchmark results.
//
// Flatbuffers are not designed for easy mutation. This class is append-only.
// The intended usage is to store a log of events like 'start benchmark with
// configuration X', 'benchmark results for X' / 'crash observed with X' that
// are then parsed to make decisions about how to configure TFLite.
//
// The data is stored as consecutive length-prefixed flatbuffers with identifier
// "STO1".
ABSL_CONST_INIT extern const char kFlatbufferStorageIdentifier[];
template <typename T>
class FlatbufferStorage : protected FileStorage {
public:
explicit FlatbufferStorage(
absl::string_view path,
ErrorReporter* error_reporter = DefaultErrorReporter())
: FileStorage(path, error_reporter) {}
// Reads current contents. Returns an error if file is inaccessible or
// contents are corrupt. The file not existing is not an error.
MinibenchmarkStatus Read();
// Get count of objects stored.
size_t Count() { return contents_.size(); }
// Get object at index i, i < Count();
const T* Get(size_t i) { return contents_[i]; }
// Append a new object to storage and write out to disk. Returns an error if
// disk write or re-read fails.
MinibenchmarkStatus Append(flatbuffers::FlatBufferBuilder* fbb,
flatbuffers::Offset<T> object);
private:
std::vector<const T*> contents_;
};
template <typename T>
MinibenchmarkStatus FlatbufferStorage<T>::Read() {
contents_.clear();
MinibenchmarkStatus status = ReadFileIntoBuffer();
if (status != kMinibenchmarkSuccess) {
return status;
}
size_t remaining_size = buffer_.size();
const uint8_t* current_ptr =
reinterpret_cast<const uint8_t*>(buffer_.c_str());
while (remaining_size != 0) {
if (remaining_size < sizeof(flatbuffers::uoffset_t)) {
TF_LITE_REPORT_ERROR(
error_reporter_,
"Corrupt size-prefixed flatbuffer file %s (remaining size less than "
"size of uoffset_t)",
path_.c_str());
return kMinibenchmarkCorruptSizePrefixedFlatbufferFile;
}
flatbuffers::uoffset_t current_size =
flatbuffers::ReadScalar<flatbuffers::uoffset_t>(current_ptr);
flatbuffers::Verifier verifier(
current_ptr, sizeof(flatbuffers::uoffset_t) + current_size);
if (!verifier.VerifySizePrefixedBuffer<T>(kFlatbufferStorageIdentifier)) {
TF_LITE_REPORT_ERROR(
error_reporter_,
"Corrupt size-prefixed flatbuffer file %s (verifier returned false)",
path_.c_str());
return kMinibenchmarkCorruptSizePrefixedFlatbufferFile;
}
contents_.push_back(flatbuffers::GetSizePrefixedRoot<T>(current_ptr));
size_t consumed = sizeof(flatbuffers::uoffset_t) + current_size;
if (remaining_size < consumed) {
TF_LITE_REPORT_ERROR(
error_reporter_,
"Corrupt size-prefixed flatbuffer file %s (mismatched size "
"calculation)",
path_.c_str());
return kMinibenchmarkCorruptSizePrefixedFlatbufferFile;
}
remaining_size -= consumed;
current_ptr += consumed;
}
return kMinibenchmarkSuccess;
}
template <typename T>
MinibenchmarkStatus FlatbufferStorage<T>::Append(
flatbuffers::FlatBufferBuilder* fbb, flatbuffers::Offset<T> object) {
contents_.clear();
fbb->FinishSizePrefixed(object, kFlatbufferStorageIdentifier);
const char* data = reinterpret_cast<const char*>(fbb->GetBufferPointer());
size_t size = fbb->GetSize();
MinibenchmarkStatus status = AppendDataToFile({data, size});
if (status != kMinibenchmarkSuccess) {
return status;
}
return Read();
}
} // namespace acceleration
} // namespace tflite
#endif // TENSORFLOW_LITE_EXPERIMENTAL_ACCELERATION_MINI_BENCHMARK_FB_STORAGE_H_
| 2,101 |
2,293 | /*
* Copyright (c) 1999 <NAME> <EMAIL>
*
* You may distribute under the terms of either the GNU General Public
* License or the Artistic License, as specified in the README file.
*
*/
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <sys/unistd.h>
#include <process.h>
#include "EXTERN.h"
#include "perl.h"
#include "XSUB.h"
int
do_spawn( char *cmd) {
dTHX;
return system( cmd);
}
int
do_aspawn ( void *vreally, void **vmark, void **vsp) {
dTHX;
SV *really = (SV*)vreally;
SV **mark = (SV**)vmark;
SV **sp = (SV**)vsp;
char **argv;
char *str;
char *p2, **ptr;
char *cmd;
int rc;
int index = 0;
if (sp<=mark)
return -1;
ptr = argv =(char**) malloc ((sp-mark+3)*sizeof (char*));
while (++mark <= sp) {
if (*mark && (str = SvPV_nolen(*mark)))
argv[index] = str;
else
argv[index] = "";
}
argv[index++] = 0;
cmd = strdup((const char*)(really ? SvPV_nolen(really) : argv[0]));
rc = spawnvp( P_WAIT, cmd, argv);
free( argv);
free( cmd);
return rc;
}
static
XS(epoc_getcwd) /* more or less stolen from win32.c */
{
dXSARGS;
/* Make the host for current directory */
char *buffer;
int buflen = 256;
char *ptr;
buffer = (char *) malloc( buflen);
if (buffer == NULL) {
XSRETURN_UNDEF;
}
while ((NULL == ( ptr = getcwd( buffer, buflen))) && (errno == ERANGE)) {
buflen *= 2;
if (NULL == realloc( buffer, buflen)) {
XSRETURN_UNDEF;
}
}
/*
* If ptr != NULL
* then it worked, set PV valid,
* else return 'undef'
*/
if (ptr) {
SV *sv = sv_newmortal();
char *tptr;
for (tptr = ptr; *tptr != '\0'; tptr++) {
if (*tptr == '\\') {
*tptr = '/';
}
}
sv_setpv(sv, ptr);
free( buffer);
EXTEND(SP,1);
SvPOK_on(sv);
ST(0) = sv;
#ifndef INCOMPLETE_TAINTS
SvTAINTED_on(ST(0));
#endif
XSRETURN(1);
}
free( buffer);
XSRETURN_UNDEF;
}
void
Perl_init_os_extras(void)
{
dTHX;
char *file = __FILE__;
newXS("EPOC::getcwd", epoc_getcwd, file);
}
| 1,049 |
852 | #ifndef ErrorMatrixPropagator_h
#define ErrorMatrixPropagator_h
/* From SimpleFits Package
* Designed an written by
* author: <NAME>
* Humboldt Foundations
*/
#include <functional>
#include "TMatrixT.h"
#include "TMatrixTSym.h"
#include "TVectorT.h"
namespace tauImpactParameter {
class ErrorMatrixPropagator {
public:
ErrorMatrixPropagator(){};
virtual ~ErrorMatrixPropagator(){};
static TMatrixTSym<double> propagateError(std::function<TVectorT<double>(const TVectorT<double>&)> f,
const TVectorT<double>& inPar,
TMatrixTSym<double>& inCov,
double epsilon = 0.001,
double errorEpsilonRatio = 1000);
};
} // namespace tauImpactParameter
#endif
| 420 |
2,151 | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/renderer/media/audio_input_ipc_factory.h"
#include <utility>
#include "base/logging.h"
#include "base/sequenced_task_runner.h"
#include "base/single_thread_task_runner.h"
#include "content/common/media/renderer_audio_input_stream_factory.mojom.h"
#include "content/renderer/media/mojo_audio_input_ipc.h"
#include "content/renderer/render_frame_impl.h"
#include "services/service_manager/public/cpp/interface_provider.h"
namespace content {
namespace {
void CreateMojoAudioInputStreamOnMainThread(
int frame_id,
int32_t session_id,
mojom::RendererAudioInputStreamFactoryClientPtr client,
const media::AudioParameters& params,
bool automatic_gain_control,
uint32_t total_segments) {
RenderFrameImpl* frame = RenderFrameImpl::FromRoutingID(frame_id);
if (frame) {
frame->GetAudioInputStreamFactory()->CreateStream(
std::move(client), session_id, params, automatic_gain_control,
total_segments);
}
}
void CreateMojoAudioInputStream(
scoped_refptr<base::SequencedTaskRunner> main_task_runner,
int frame_id,
int32_t session_id,
mojom::RendererAudioInputStreamFactoryClientPtr client,
const media::AudioParameters& params,
bool automatic_gain_control,
uint32_t total_segments) {
main_task_runner->PostTask(
FROM_HERE, base::BindOnce(&CreateMojoAudioInputStreamOnMainThread,
frame_id, session_id, std::move(client), params,
automatic_gain_control, total_segments));
}
void AssociateInputAndOutputForAec(
scoped_refptr<base::SequencedTaskRunner> main_task_runner,
int frame_id,
const base::UnguessableToken& input_stream_id,
const std::string& output_device_id) {
main_task_runner->PostTask(
FROM_HERE,
base::BindOnce(
[](int frame_id, const base::UnguessableToken& input_stream_id,
const std::string& output_device_id) {
RenderFrameImpl* frame = RenderFrameImpl::FromRoutingID(frame_id);
if (frame) {
frame->GetAudioInputStreamFactory()
->AssociateInputAndOutputForAec(input_stream_id,
output_device_id);
}
},
frame_id, input_stream_id, output_device_id));
}
} // namespace
AudioInputIPCFactory* AudioInputIPCFactory::instance_ = nullptr;
AudioInputIPCFactory::AudioInputIPCFactory(
scoped_refptr<base::SequencedTaskRunner> main_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> io_task_runner)
: main_task_runner_(std::move(main_task_runner)),
io_task_runner_(std::move(io_task_runner)) {
DCHECK(!instance_);
instance_ = this;
}
AudioInputIPCFactory::~AudioInputIPCFactory() {
DCHECK_EQ(instance_, this);
instance_ = nullptr;
}
std::unique_ptr<media::AudioInputIPC> AudioInputIPCFactory::CreateAudioInputIPC(
int frame_id,
int session_id) const {
DCHECK_NE(0, session_id);
return std::make_unique<MojoAudioInputIPC>(
base::BindRepeating(&CreateMojoAudioInputStream, main_task_runner_,
frame_id, session_id),
base::BindRepeating(&AssociateInputAndOutputForAec, main_task_runner_,
frame_id));
}
} // namespace content
| 1,405 |
892 | <filename>advisories/unreviewed/2022/05/GHSA-jm3x-f377-5m2h/GHSA-jm3x-f377-5m2h.json
{
"schema_version": "1.2.0",
"id": "GHSA-jm3x-f377-5m2h",
"modified": "2022-05-05T02:48:14Z",
"published": "2022-05-05T02:48:14Z",
"aliases": [
"CVE-2013-0011"
],
"details": "The Print Spooler in Microsoft Windows Server 2008 R2 and R2 SP1 and Windows 7 Gold and SP1 allows remote attackers to execute arbitrary code or cause a denial of service (memory corruption) via a crafted print job, aka \"Windows Print Spooler Components Vulnerability.\"",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2013-0011"
},
{
"type": "WEB",
"url": "https://docs.microsoft.com/en-us/security-updates/securitybulletins/2013/ms13-001"
},
{
"type": "WEB",
"url": "https://oval.cisecurity.org/repository/search/definition/oval%3Aorg.mitre.oval%3Adef%3A16357"
},
{
"type": "WEB",
"url": "http://www.us-cert.gov/cas/techalerts/TA13-008A.html"
}
],
"database_specific": {
"cwe_ids": [
"CWE-119"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 554 |
335 | package com.russelarms.offsetanimatorsample;
import android.content.Context;
import android.graphics.Point;
import android.util.AttributeSet;
import android.view.View;
import android.view.animation.DecelerateInterpolator;
import android.widget.RelativeLayout;
import com.russelarms.offsetanimator.AnimatorFactory;
import com.russelarms.offsetanimator.Scene;
import com.russelarms.offsetanimator.util.ArcSide;
import com.russelarms.offsetanimator.util.ArcUtils;
import com.russelarms.offsetanimator.util.SpringInterpolator;
import butterknife.BindView;
import butterknife.ButterKnife;
import static com.russelarms.offsetanimatorsample.ScreenUtils.convertDIPToPixels;
/**
* Custom view that containes {@link Scene} and animation script.
*/
public class AnimatorScene extends RelativeLayout {
private Point screenDimensions;
@BindView(R.id.ocean)
View ocean;
@BindView(R.id.fish_left)
View fishLeft;
@BindView(R.id.fish_left_bottom)
View fishLeftBottom;
@BindView(R.id.fish_right)
View fishRight;
@BindView(R.id.submarine)
View submarine;
private Scene scene;
public Scene getScene() {
return scene;
}
public AnimatorScene(Context context) {
super(context);
init();
}
public AnimatorScene(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
inflate(getContext(), R.layout.scene_layout, this);
ButterKnife.bind(this);
screenDimensions = ScreenUtils.getScreenDimensions(getContext());
scene = Scene.create(getRootView(), () -> {
prepare();
initSteps();
});
}
public void prepare() {
submarine.setY(screenDimensions.y + dipToPx(50));
submarine.setX(screenDimensions.x);
fishLeftBottom.setY(screenDimensions.y);
}
private void initSteps() {
scene.page(0).step(0)
.createAnimation(ocean.getY(), ocean.getY() - dipToPx(120))
.setDuration(0.8f)
.setListener(value -> ocean.setY(value));
scene.page(0).step(1)
.createAnimation(fishLeft.getX(), fishLeft.getX() + dipToPx(160))
.setDuration(0.3f)
.setStartThreshold(0.5f)
.setListener(value -> fishLeft.setX(value));
scene.page(0).step(2)
.createAnimation(fishRight.getX(), fishRight.getX() - dipToPx(160))
.setDuration(0.5f)
.setStartThreshold(0.3f)
.setListener(value -> fishRight.setX(value));
scene.page(0).step(3)
.createAnimation(fishLeftBottom.getY(), fishLeftBottom.getY() - screenDimensions.y / 2)
.setListener(value -> fishLeftBottom.setY(value));
scene.page(1).step(0)
.createAnimation(1926, 1032)
.setInterpolator(new SpringInterpolator(0.8f))
.setListener(value -> submarine.setY(value));
scene.page(1).step(1)
.createAnimation(submarine.getX(), screenDimensions.x / 2 - submarine.getWidth() / 2)
.setListener(value -> submarine.setX(value));
scene.page(2).step(0)
.createAnimation(() -> AnimatorFactory.createArcAnimator(submarine,
ArcUtils.centerX(submarine),
ArcUtils.centerY(submarine),
ArcUtils.centerX(submarine),
dipToPx(48),
180f, ArcSide.LEFT))
.setDuration(0.5f);
scene.page(2).step(1)
.createAnimation(() -> AnimatorFactory.createArcAnimator(submarine,
ArcUtils.centerX(submarine),
dipToPx(48),
submarine.getX() + submarine.getWidth() / 2,
ArcUtils.centerY(submarine),
180f, ArcSide.RIGHT))
.setStartThreshold(0.5f)
.setDuration(0.5f);
scene.page(2).step(2)
.createAnimation(0, 90)
.setDuration(0.25f)
.setListener(value -> submarine.setRotation(value));
scene.page(2).step(3)
.createAnimation(90, 180)
.setStartThreshold(0.25f)
.setDuration(0.25f)
.setListener(value -> submarine.setRotation(value));
scene.page(2).step(4)
.createAnimation(180, 270)
.setStartThreshold(0.5f)
.setListener(value -> submarine.setRotation(value))
.setDuration(0.25f);
scene.page(2).step(5)
.createAnimation(270, 360)
.setStartThreshold(0.75f)
.setDuration(0.25f)
.setListener(value -> submarine.setRotation(value));
scene.page(3).step(0)
.createAnimation(() -> AnimatorFactory.createAnimator(fishLeft.getX(), fishLeft.getX() - dipToPx(160)))
.setDuration(0.5f)
.setListener(value -> fishLeft.setX(value));
scene.page(3).step(1)
.createAnimation(() -> new AnotherOffsetAnimator(fishRight.getX(), fishRight.getX() + dipToPx(160)))
.setDuration(0.5f)
.setListener(value -> fishRight.setX(value));
scene.page(3).step(2)
.createAnimation(() -> AnimatorFactory.createAnimator(fishLeftBottom.getY(), fishLeftBottom.getY() + screenDimensions.y / 2))
.setDuration(0.5f)
.setListener(value -> fishLeftBottom.setY(value));
scene.page(3).step(3)
.createAnimation(() -> AnimatorFactory.createAnimator(fishLeftBottom.getY(), fishLeftBottom.getY() + screenDimensions.y / 2))
.setDuration(0.5f)
.setListener(value -> fishLeftBottom.setY(value));
scene.page(3).step(4)
.createAnimation(() -> AnimatorFactory.createAnimator(submarine.getScaleX(), submarine.getScaleX() * 2))
.setListener(value -> {
submarine.setScaleX(value);
submarine.setScaleY(value);
});
scene.page(3).step(5)
.createAnimation(1, 0.5f)
.setListener(value -> {
fishLeftBottom.setScaleY(value);
fishLeftBottom.setScaleX(value);
fishLeft.setScaleX(value);
fishLeft.setScaleY(value);
fishRight.setScaleX(value);
fishRight.setScaleY(value);
});
scene.page(3).step(6)
.createAnimation(() -> AnimatorFactory.createAnimator(submarine.getY(), screenDimensions.y / 2 - submarine.getHeight()))
.setInterpolator(new DecelerateInterpolator())
.setListener(value -> submarine.setY(value));
}
private int dipToPx(int dip) {
return convertDIPToPixels(getContext(), dip);
}
}
| 3,476 |
563 | package com.gentics.mesh.core.rest.node.field.list.impl;
import com.gentics.mesh.core.rest.node.FieldMap;
import com.gentics.mesh.core.rest.node.field.MicronodeField;
import com.gentics.mesh.core.rest.node.field.list.MicronodeFieldList;
/**
* REST model for a micronode list field. Please note that {@link FieldMap} will handle the actual JSON format building.
*/
public class MicronodeFieldListImpl extends AbstractFieldList<MicronodeField> implements MicronodeFieldList {
}
| 153 |
743 | <gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.guacamole.auth.saml;
import com.google.inject.Singleton;
import com.onelogin.saml2.authn.SamlResponse;
import com.onelogin.saml2.exception.ValidationError;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* A class that handles mapping of hashes to SAMLResponse objects.
*/
@Singleton
public class SAMLResponseMap {
/**
* The internal data structure that holds a map of SHA-256 hashes to
* SAML responses.
*/
private final ConcurrentMap<String, SamlResponse> samlResponseMap =
new ConcurrentHashMap<>();
/**
* Executor service which runs the periodic cleanup task
*/
private final ScheduledExecutorService executor =
Executors.newScheduledThreadPool(1);
/**
* Create a new instance of this response map and kick off the executor
* that schedules the response cleanup task to run every five minutes.
*/
public SAMLResponseMap() {
// Cleanup unclaimed responses every five minutes
executor.scheduleAtFixedRate(new SAMLResponseCleanupTask(), 5, 5, TimeUnit.MINUTES);
}
/**
* Retrieve the SamlResponse from the map that is represented by the
* provided hash, or null if no such object exists.
*
* @param hash
* The SHA-256 hash of the SamlResponse.
*
* @return
* The SamlResponse object matching the hash provided.
*/
protected SamlResponse getSamlResponse(String hash) {
return samlResponseMap.remove(hash);
}
/**
* Place the provided mapping of hash to SamlResponse into the map.
*
* @param hash
* The hash that will be the lookup key for this SamlResponse.
*
* @param samlResponse
* The SamlResponse object.
*/
protected void putSamlResponse(String hash, SamlResponse samlResponse) {
samlResponseMap.put(hash, samlResponse);
}
/**
* Return true if the provided hash key exists in the map, otherwise false.
*
* @param hash
* The hash key to look for in the map.
*
* @return
* true if the provided hash is present, otherwise false.
*/
protected boolean hasSamlResponse(String hash) {
return samlResponseMap.containsKey(hash);
}
/**
* Task which runs every five minutes and cleans up any expired SAML
* responses that haven't been claimed and removed from the map.
*/
private class SAMLResponseCleanupTask implements Runnable {
@Override
public void run() {
// Loop through responses in map and remove ones that are no longer valid.
Iterator<SamlResponse> responseIterator = samlResponseMap.values().iterator();
while (responseIterator.hasNext()) {
try {
responseIterator.next().validateTimestamps();
}
catch (ValidationError e) {
responseIterator.remove();
}
}
}
}
/**
* Shut down the executor service that periodically cleans out the
* SamlResponse Map. This must be invoked during webapp shutdown in order
* to avoid resource leaks.
*/
public void shutdown() {
executor.shutdownNow();
}
}
| 1,582 |
405 | <filename>convlab/modules/policy/system/policy.py
"""
The policy base class for system bot.
"""
class SysPolicy:
"""Base class for system policy model."""
def __init__(self):
""" Constructor for SysPolicy class. """
pass
def predict(self, state):
"""
Predict the system action (dialog act) given state.
Args:
state (dict): Dialog state. For more details about the each field of the dialog state, please refer to
the init_state method in convlab/dst/dst_util.py
Returns:
action (dict): The dialog act of the current turn system response, which is then passed to NLG module to
generate a NL utterance.
"""
pass
def init_session(self):
"""Init the SysPolicy module to start a new session."""
pass
| 348 |
1,306 | <filename>app/src/main/java/com/gh4a/fragment/ReleaseListFragment.java
package com.gh4a.fragment;
import android.os.Bundle;
import androidx.annotation.Nullable;
import androidx.recyclerview.widget.RecyclerView;
import com.gh4a.R;
import com.gh4a.ServiceFactory;
import com.gh4a.activities.ReleaseInfoActivity;
import com.gh4a.adapter.ReleaseAdapter;
import com.gh4a.adapter.RootAdapter;
import com.meisolsson.githubsdk.model.Page;
import com.meisolsson.githubsdk.model.Release;
import com.meisolsson.githubsdk.service.repositories.RepositoryReleaseService;
import java.util.Collections;
import java.util.Comparator;
import io.reactivex.Single;
import retrofit2.Response;
import static java.util.Comparator.reverseOrder;
import static java.util.Comparator.nullsFirst;
public class ReleaseListFragment extends PagedDataBaseFragment<Release> implements
RootAdapter.OnItemClickListener<Release> {
private String mUserLogin;
private String mRepoName;
private static final Comparator<Release> MOST_RECENT_RELEASES_AND_DRAFTS_FIRST =
Comparator.comparing(Release::publishedAt, nullsFirst(reverseOrder()));
public static ReleaseListFragment newInstance(String owner, String repo) {
ReleaseListFragment f = new ReleaseListFragment();
Bundle args = new Bundle();
args.putString("owner", owner);
args.putString("repo", repo);
f.setArguments(args);
return f;
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mUserLogin = getArguments().getString("owner");
mRepoName = getArguments().getString("repo");
}
@Override
protected Single<Response<Page<Release>>> loadPage(int page, boolean bypassCache) {
final RepositoryReleaseService service = ServiceFactory.get(RepositoryReleaseService.class, bypassCache);
return service.getReleases(mUserLogin, mRepoName, page)
// Sometimes the API returns releases in a slightly wrong order (see TeamNewPipe/NewPipe repo
// for an example), so we need to fix the sorting locally
.map(response -> {
if (response.body() != null) {
Collections.sort(response.body().items(), MOST_RECENT_RELEASES_AND_DRAFTS_FIRST);
}
return response;
});
}
@Override
protected RootAdapter<Release, ? extends RecyclerView.ViewHolder> onCreateAdapter() {
ReleaseAdapter adapter = new ReleaseAdapter(getActivity());
adapter.setOnItemClickListener(this);
return adapter;
}
@Override
protected int getEmptyTextResId() {
return R.string.no_releases_found;
}
@Override
public void onItemClick(Release release) {
startActivity(ReleaseInfoActivity.makeIntent(getActivity(), mUserLogin, mRepoName, release));
}
}
| 1,115 |
606 | <gh_stars>100-1000
package org.arend.ext.userData;
public class Key<T> {
protected final String name;
public Key(String name) {
this.name = name;
}
public Key() {
this.name = null;
}
public T copy(T value) {
return value;
}
@Override
public String toString() {
return name != null ? name : super.toString();
}
}
| 131 |
4,036 | <reponame>vadi2/codeql
package com.semmle.js.ast.regexp;
/** Visitor interface for {@link RegExpTerm}. */
public interface Visitor {
public void visit(Caret nd);
public void visit(Constant nd);
public void visit(Dollar nd);
public void visit(Group nd);
public void visit(NonWordBoundary nd);
public void visit(Opt nd);
public void visit(Plus nd);
public void visit(Range nd);
public void visit(Sequence nd);
public void visit(Star nd);
public void visit(WordBoundary nd);
public void visit(Disjunction nd);
public void visit(ZeroWidthPositiveLookahead nd);
public void visit(ZeroWidthNegativeLookahead nd);
public void visit(Dot nd);
public void visit(DecimalEscape nd);
public void visit(HexEscapeSequence nd);
public void visit(OctalEscape nd);
public void visit(UnicodeEscapeSequence nd);
public void visit(BackReference nd);
public void visit(ControlEscape nd);
public void visit(IdentityEscape nd);
public void visit(ControlLetter nd);
public void visit(CharacterClassEscape nd);
public void visit(CharacterClass nd);
public void visit(CharacterClassRange nd);
public void visit(NamedBackReference nd);
public void visit(ZeroWidthPositiveLookbehind nd);
public void visit(ZeroWidthNegativeLookbehind nd);
public void visit(UnicodePropertyEscape nd);
}
| 421 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-2wxx-6f5h-cwcg",
"modified": "2022-05-04T00:31:05Z",
"published": "2022-05-04T00:31:05Z",
"aliases": [
"CVE-2012-0536"
],
"details": "Unspecified vulnerability in the PeopleSoft Enterprise HRMS component in Oracle PeopleSoft Products 8.9 through Bundle #26 allows remote authenticated users to affect confidentiality via unknown vectors related to eCompensation.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0536"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/48878"
},
{
"type": "WEB",
"url": "http://www.mandriva.com/security/advisories?name=MDVSA-2013:150"
},
{
"type": "WEB",
"url": "http://www.oracle.com/technetwork/topics/security/cpuapr2012-366314.html"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/53099"
},
{
"type": "WEB",
"url": "http://www.securitytracker.com/id?1026954"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 548 |
347 | <filename>http/src/main/java/com/cjj/listener/BaseListener.java
package com.cjj.listener;
import com.squareup.okhttp.Call;
/**
* Created by Administrator on 2015/8/25.
*/
public abstract class BaseListener<T> {
public abstract void onError(Exception e);
public abstract void onSuccess(T result);
public abstract void onStringResult(String result);
public abstract void onDownloadFinish(String path);//下载完成
public abstract void onDownloadProgress(int progress);//下载进度
}
| 169 |
310 | {
"name": "tcpdump",
"description": "A command-line tool for analysing packets.",
"url": "http://www.tcpdump.org/"
} | 45 |
348 | <reponame>armcha/Vertical-Intro
package com.luseen.sample;
import android.graphics.Typeface;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import com.luseen.verticalintrolibrary.VerticalIntro;
import com.luseen.verticalintrolibrary.VerticalIntroItem;
public class TestActivity extends VerticalIntro {
@Override
protected void init() {
addIntroItem(new VerticalIntroItem.Builder()
.backgroundColor(R.color.colorAccent)
.image(R.drawable.intro_second_vector)
.title("Lorem Ipsum Lorem Ipsum")
.text("Lorem Ipsum is simply dummy text of the printing and typesetting industry." +
"Lorem Ipsum is simply dummy text of the printing and typesetting industry." +
"Lorem Ipsum is simply dummy text of the printing and typesetting industry.")
.textSize(14)
.titleSize(17)
.build());
addIntroItem(new VerticalIntroItem.Builder()
.backgroundColor(R.color.color2)
.image(R.drawable.four)
.title("Lorem Ipsum Lorem Ipsum ")
.text("Lorem Ipsum is simply dummy text of the printing and typesetting industry." +
"Lorem Ipsum is simply dummy text of the printing and typesetting industry.")
.build());
addIntroItem(new VerticalIntroItem.Builder()
.backgroundColor(R.color.colorPrimary)
.image(R.drawable.android)
.title("Lorem Ipsum")
.text("Lorem Ipsum is simply dummy text of the printing and typesetting industry.")
.textColor(R.color.black)
.titleColor(R.color.black)
.build());
addIntroItem(new VerticalIntroItem.Builder()
.backgroundColor(R.color.color3)
.image(R.drawable.new_intro)
.title("Lorem Ipsum")
.text("Lorem Ipsum is simply dummy text of the printing and typesetting industry." +
"Lorem Ipsum is simply dummy text of the printing and typesetting industry." +
"Lorem Ipsum is simply dummy text of the printing and typesetting industry.")
.build());
setSkipEnabled(true);
setVibrateEnabled(true);
setSkipColor(R.color.black);
// setNextText("OK");
// setDoneText("FINISH HIM");
// setSkipText("GO GO");
setVibrateIntensity(20);
setCustomTypeFace(Typeface.createFromAsset(getAssets(), "fonts/NotoSans-Regular.ttf"));
}
@Override
protected Integer setLastItemBottomViewColor() {
return R.color.color2;
}
@Override
protected void onSkipPressed(View view) {
Log.e("onSkipPressed ", "onSkipPressed");
}
@Override
protected void onFragmentChanged(int position) {
Log.e("onFragmentChanged ", "" + position);
}
@Override
protected void onDonePressed() {
Toast.makeText(this, "Done", Toast.LENGTH_SHORT).show();
}
}
| 1,414 |
3,102 | <gh_stars>1000+
// RUN: %clang_cc1 -triple=x86_64-linux-gnu -emit-llvm -o - %s
// Don't crash if the argument to __builtin_constant_p isn't scalar.
template <typename T>
constexpr bool is_constant(const T v) {
return __builtin_constant_p(v);
}
template <typename T>
class numeric {
public:
using type = T;
template <typename S>
constexpr numeric(S value)
: value_(static_cast<T>(value)) {}
private:
const T value_;
};
bool bcp() {
return is_constant(numeric<int>(1));
}
| 197 |
609 | package org.consenlabs.tokencore.foundation.rlp;
/**
* Base RLP type.
*/
public interface RlpType {
}
| 38 |
1,405 | <filename>sample4/recompiled_java/sources/org/apache/commons/httpclient/ChunkedInputStream.java
package org.apache.commons.httpclient;
import com.tencent.tmsecure.module.wupsession.WupConfig;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.httpclient.cookie.CookieSpec;
import org.apache.commons.httpclient.util.EncodingUtil;
import org.apache.commons.httpclient.util.ExceptionUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class ChunkedInputStream extends InputStream {
static Class a;
private static final Log i;
private InputStream b;
private int c;
private int d;
private boolean e;
private boolean f;
private boolean g;
private HttpMethod h;
static {
Class cls;
if (a == null) {
cls = a("org.apache.commons.httpclient.ChunkedInputStream");
a = cls;
} else {
cls = a;
}
i = LogFactory.getLog(cls);
}
private static Class a(String x0) {
try {
return Class.forName(x0);
} catch (ClassNotFoundException x1) {
throw new NoClassDefFoundError(x1.getMessage());
}
}
public ChunkedInputStream(InputStream in, HttpMethod method) throws IOException {
this.e = true;
this.f = false;
this.g = false;
this.h = null;
if (in == null) {
throw new IllegalArgumentException("InputStream parameter may not be null");
}
this.b = in;
this.h = method;
this.d = 0;
}
public ChunkedInputStream(InputStream in) throws IOException {
this(in, null);
}
@Override // java.io.InputStream
public int read() throws IOException {
if (this.g) {
throw new IOException("Attempted read from closed stream.");
} else if (this.f) {
return -1;
} else {
if (this.d >= this.c) {
a();
if (this.f) {
return -1;
}
}
this.d++;
return this.b.read();
}
}
@Override // java.io.InputStream
public int read(byte[] b2, int off, int len) throws IOException {
if (this.g) {
throw new IOException("Attempted read from closed stream.");
} else if (this.f) {
return -1;
} else {
if (this.d >= this.c) {
a();
if (this.f) {
return -1;
}
}
int count = this.b.read(b2, off, Math.min(len, this.c - this.d));
this.d += count;
return count;
}
}
@Override // java.io.InputStream
public int read(byte[] b2) throws IOException {
return read(b2, 0, b2.length);
}
private void a() throws IOException {
if (!this.e) {
int read = this.b.read();
int read2 = this.b.read();
if (!(read == 13 && read2 == 10)) {
throw new IOException(new StringBuffer().append("CRLF expected at end of chunk: ").append(read).append(CookieSpec.PATH_DELIM).append(read2).toString());
}
}
this.c = b(this.b);
this.e = false;
this.d = 0;
if (this.c == 0) {
this.f = true;
b();
}
}
/* JADX INFO: Can't fix incorrect switch cases order, some code will duplicate */
private static int b(InputStream in) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int state = 0;
while (state != -1) {
int b2 = in.read();
if (b2 == -1) {
throw new IOException("chunked stream ended unexpectedly");
}
switch (state) {
case 0:
switch (b2) {
case 13:
state = 1;
break;
case WupConfig.RQ_GET_TRAFFIC_CMD:
state = 2;
default:
baos.write(b2);
break;
}
case 1:
if (b2 == 10) {
state = -1;
break;
} else {
throw new IOException("Protocol violation: Unexpected single newline character in chunk size");
}
case 2:
switch (b2) {
case WupConfig.RQ_GET_TRAFFIC_CMD:
state = 0;
baos.write(b2);
break;
case 92:
baos.write(in.read());
break;
default:
baos.write(b2);
break;
}
default:
throw new RuntimeException("assertion failed");
}
}
String dataString = EncodingUtil.getAsciiString(baos.toByteArray());
int separator = dataString.indexOf(59);
String dataString2 = separator > 0 ? dataString.substring(0, separator).trim() : dataString.trim();
try {
return Integer.parseInt(dataString2.trim(), 16);
} catch (NumberFormatException e2) {
throw new IOException(new StringBuffer().append("Bad chunk size: ").append(dataString2).toString());
}
}
private void b() throws IOException {
String charset = "US-ASCII";
try {
if (this.h != null) {
charset = this.h.getParams().getHttpElementCharset();
}
Header[] footers = HttpParser.parseHeaders(this.b, charset);
if (this.h != null) {
for (Header header : footers) {
this.h.addResponseFooter(header);
}
}
} catch (HttpException e2) {
i.error("Error parsing trailer headers", e2);
IOException ioe = new IOException(e2.getMessage());
ExceptionUtil.initCause(ioe, e2);
throw ioe;
}
}
@Override // java.io.Closeable, java.lang.AutoCloseable, java.io.InputStream
public void close() throws IOException {
if (!this.g) {
try {
if (!this.f) {
a(this);
}
} finally {
this.f = true;
this.g = true;
}
}
}
static void a(InputStream inStream) throws IOException {
do {
} while (inStream.read(new byte[1024]) >= 0);
}
}
| 3,681 |
1,256 | <reponame>MKadaner/FarManager
#include <all_lib.h>
#pragma hdrstop
#if defined(__MYPACKAGE__)
#pragma package(smart_init)
#endif
#define MSG_EOL "\n"
#define ERR_EXPCOLOR "Expected color value"
#define ERR_EXPCOMMEND "End of file reached, but \"*/\" expected"
#define ERR_EXPINT "Expected integer value"
#define ERR_EXPNAME "Expected name token"
#define ERR_EXPSTR "Expected \"%s\""
#define ERR_INTCHAR "Wrong digit character \'%c\'"
#define ERR_INTDOT "Can not use more then one dot"
#define ERR_INTSIGN "Can not use more then one sign character"
#define ERR_NAMEBEGIN "Name can not begin with \'%c\'"
/***************************************
CTParserBase
***************************************/
CTParserBase::CTParserBase( void )
{
curX = curY = 0; lastLen = 0;
CLineComments = CBlockComments = TRUE;
}
int CTParserBase::GetX( void ) { return curX+1; }
int CTParserBase::GetY( void ) { return curY+1; }
BOOL CTParserBase::Assign( CONSTSTR /*data*/ )
{
curX = curY = 0;
lastLen = 0;
return TRUE;
}
float CTParserBase::GetFloatToken( void )
{
return (float)GetDoubleToken();
}
char CTParserBase::GetChar( void )
{ char ch = GetNextChar();
if ( !ch )
return 0;
if ( ch != '\n' && ch != '\r' ) {
curX++;
return ch;
}
curX = 0;
curY++;
char eol = GetNextChar();
if ( (ch == '\n' && eol == '\r') ||
(ch == '\r' && eol == '\n') )
return '\n';
if (eol) UnGetChar();
return '\n';
}
void CTParserBase::UnGet( void )
{ char ch;
if ( !UnGetChar() ) return;
curX--;
ch = CurChar();
if ( ch != '\n' && ch != '\r' )
return;
curY = Max( 0, curY-1 );
curX = 0;
if ( !UnGetChar() ) return;
char eol = CurChar();
if ( (ch == '\n' && eol == '\r') ||
(ch == '\r' && eol == '\n') )
UnGetChar();
while( 1 ) {
char skip = CurChar();
if ( skip == '\n' || skip == '\r' )
break;
if ( !UnGetChar() )
break;
curX++;
}
while( (ch=GetNextChar()) != 0 && ch != eol )
/**/;
}
void CTParserBase::UnGet( CONSTSTR s )
{ char ch;
int n;
if ( !s || !s[0] ) return;
while( (ch=CurChar()) != 0 && StrChr(CT_STDSKIPS,ch) != 0 ) UnGet();
if (!ch) return;
for ( n = strLen(s)-1; n >= 0; n-- )
for( ch = CurChar(); ch && ch == s[n]; ch = CurChar() ) UnGet();
}
char CTParserBase::NextGet( void )
{ char ch = Get();
if ( ch != 0 ) UnGet();
return ch;
}
char CTParserBase::Get( void )
{ char ch;
int n;
GetBegin:
while( 1 ) {
if ( (ch=GetChar()) == 0 )
return 0;
if ( StrChr( CT_STDSKIPS,ch ) != 0 )
continue;
break;
}
if ( ch == '/' ) {
switch( GetChar() ) {
case '*': if (CBlockComments) {
n = 1;
while( n > 0 ){
ch = GetChar();
if ( !ch ) THROW( ERR_EXPCOMMEND )
if ( ch == '/' ) { if ( GetChar() == '*' ) n++; else UnGet(); }
if ( ch == '*' ) { if ( GetChar() == '/' ) n--; else UnGet(); }
}
goto GetBegin;
} else
UnGet();
break;
case '/': if ( CLineComments ) {
while( (ch=GetChar()) != 0 && ch != '\n' )
/**/;
goto GetBegin;
} else
UnGet();
break;
default: UnGet();
}
}
return ch;
}
MyString CTParserBase::GetToken( CONSTSTR delimiter )
{ MyString str;
char ch;
if (!delimiter)
delimiter = CT_STDDELIMITERS;
if ( (ch=Get()) == 0 )
return str;
for( str.Add(ch); (ch=GetChar()) != 0 && StrChr( delimiter,ch ) == NULL; )
str.Add( ch );
if ( ch )
UnGet();
return str;
}
MyString CTParserBase::GetBefore( const MyString& token,CONSTSTR delimiter )
{ MyString s,tmp;
while( (tmp=GetToken(delimiter)) != token )
s.Add( tmp );
UnGet( tmp.Text() );
return s;
}
MyString CTParserBase::GetExact( CONSTSTR token )
{ MyString s;
char ch;
for( int n = 0; token[n]; n++ )
if ( token[n] == (ch=Get()) )
s.Add( ch );
else
THROW( Message(ERR_EXPSTR,token) )
return s;
}
int GetPDigit( CTParserBase& p,CONSTSTR chars,int base,char /*end*/ )
{ char ch;
int val = 0;
while( (ch=p.NextGet()) != 0 && strchr(chars,ch) ) {
p.GetChar();
val = val*base + (ch>'a'?(ch-'a'+10):((ch>'A')?(ch-'A'+10):(ch-'0')));
}
return val;
}
char CTParserBase::GetCharToken( void )
{ char ch;
if ( NextGet() != '\'' ) return (char)GetIntToken();
GetExact( "\'" );
ch = GetChar();
if ( ch == '\\' )
switch( GetChar() )
{
case 'n': ch = '\n'; break;
case 'r': ch = '\r'; break;
case 'b': ch = '\b'; break;
case 't': ch = '\t'; break;
case '\\': ch = '\\'; break;
case '\'': ch = '\''; break;
case '\"': ch = '\"'; break;
case 'x': ch = (char)GetPDigit(*this,"0123456789ABCDEFabcdef",16,'\''); break;
case '0': if ( NextGet() == 'x' ) {
GetChar();
ch = (char)GetPDigit(*this,"0123456789ABCDEFabcdef",16,'\''); break;
} else {
ch = (char)GetPDigit(*this,"01234567",8,'\''); break;
}
}
GetExact( "\'");
return ch;
}
MyString CTParserBase::GetStringToken( void )
{ MyString str;
char ch;
QuotedString:
GetExact( "\"");
while( (ch=GetChar()) != '\"' && ch )
if ( ch == '\\' )
switch( GetChar() )
{
case 'n': str.Add( '\n' ); break;
case 'r': str.Add( '\r' ); break;
case 'b': str.Add( '\b' ); break;
case 't': str.Add( '\t' ); break;
case '\\': str.Add( '\\' ); break;
case '\'': str.Add( '\'' ); break;
case '\"': str.Add( '\"' ); break;
case 'x': str.Add( (char)GetPDigit(*this,"0123456789ABCDEFabcdef",16,'\'') ); break;
case '0': if ( NextGet() == 'x' ) {
GetChar();
str.Add( (char)GetPDigit(*this,"0123456789ABCDEFabcdef",16,'\'') ); break;
} else {
str.Add( (char)GetPDigit(*this,"01234567",8,'\'') ); break;
}
default: str.Add( CurChar() );
}
else
str.Add( ch );
ch = NextGet();
if ( ch == '\"' ) goto QuotedString;
if ( ch == '\\' ) {
GetChar();
if ( NextGet() != '\"' ) THROW( "Bad string expend" )
goto QuotedString;
}
return str;
}
DWORD CTParserBase::GetIntToken( void )
{ MyString str;
if ( NextGet() == '\'' ) return GetCharToken();
str = GetToken( CT_STDDELIMITERSWOSIGN );
if ( str.Length() == 0 )
THROW( ERR_EXPINT )
if ( str == "TRUE" || str == "TRUE" ) return TRUE;
if ( str == "FALSE" || str == "FALSE" ) return FALSE;
if ( str == "NULL" ) return 0;
return Str2DigitDetect( str.Text(), 10, 0 );
}
double CTParserBase::GetDoubleToken( void )
{ MyString str;
BOOL sign = FALSE,
dot = FALSE;
int n;
str = GetToken( CT_STDFLOATDELIMITERSWOSIGN );
if ( str.Length() == 0 ) THROW( ERR_EXPINT )
for ( n = 0; n < str.Length(); n++ )
switch( str[n] )
{
case '.': if ( dot ) THROW( ERR_INTDOT )
dot = TRUE;
break;
case '-': if ( sign ) THROW( ERR_INTSIGN )
sign = TRUE;
break;
default: if ( !isdigit(str[n]) )
THROW( Message( ERR_INTCHAR,str[n] ) )
}
return atof( str.Text() );
}
MyString CTParserBase::GetNameToken( void )
{ MyString name;
name = GetToken(CT_STDDELIMITERS);
if ( name.Length() == 0 ) THROW( ERR_EXPNAME )
if ( name[0] != '_' && !isalpha(name[0]) ) THROW( Message( ERR_NAMEBEGIN,name[0] ) )
return name;
}
int CTParserBase::GetTypeToken( CONSTSTR *str,BOOL isCase,CONSTSTR dels,MyString *token )
{ MyString s( GetToken( dels ) );
if ( token ) *token = s;
for ( int n = 0; str[n]; n++ )
if ( StrCmp(s.Text(),str[n],-1,isCase) == 0 )
return n;
return -1;
}
CONSTSTR stdColorTokens[] = {
"clBlack", "clBlue", "clGreen", "clCyan",
"clRed", "clMagenta", "clYellow", "clWhite"
};
WORD CTParserBase::GetColorToken( void )
{ int fg,bk;
fg = GetTypeToken( stdColorTokens,TRUE,CT_STDDELIMITERS );
if ( fg < 0 || fg > 7 ) THROW( ERR_EXPCOLOR )
GetExact( "/" );
bk = GetTypeToken( stdColorTokens,TRUE,CT_STDDELIMITERS );
if ( bk < 0 || bk > 7 ) THROW( ERR_EXPCOLOR )
if ( NextGet() == '+' ) {
GetChar();
return (WORD)CLRH( fg,bk );
} else
return (WORD)CLR( fg,bk );
}
char CTParserBase::GetTo( char ender,char beginer,MyString *place )
{ char ch;
int level = 1;
if ( place ) *place = "";
while( (ch=GetChar()) != 0 && level > 0 ) {
if ( beginer && ch == beginer ) level++;
if ( ch == ender ) level--;
if (place && level) place->Add(ch);
}
return ch;
}
char CTParserBase::GetTo( CONSTSTR ender, CONSTSTR beginer, MyString *place )
{ char ch;
int level = 1;
if (place) *place = "";
while( (ch=GetChar()) != 0 && level > 0 ) {
if ( beginer && StrChr(beginer,ch) ) level++;
if ( StrChr(ender,ch) ) level--;
if ( place && level ) place->Add(ch);
}
return ch;
}
void CTParserBase::GetToEOL( PMyString s )
{ char ch;
int n = curY;
if ( s ) *s = "";
while( (ch=GetChar()) != 0 && n == curY ) {
if ( s ) s->Add( ch );
}
if (ch) UnGet();
}
| 5,036 |
435 | <filename>src/parameter/parameter.h
#pragma once
#include "system/customer.h"
#include "parameter/proto/param.pb.h"
namespace PS {
/// The base class of shared parameters
class Parameter : public Customer {
public:
Parameter(int id) : Customer(id) { }
virtual ~Parameter() { }
typedef std::initializer_list<int> Timestamps;
typedef ::google::protobuf::RepeatedPtrField<FilterConfig> Filters;
/**
* @brief Creats a request task
*
* @param channel communication channel
* @param ts the timestamp of this request
* @param wait a list of timestamp this request should wait
* @param filters a list of filters to compress the request message
* @param key_range the key range of this request
*
* @return A Task
*/
static Task Request(int channel,
int ts = Message::kInvalidTime,
const Timestamps& wait = {},
const Filters& filters = Filters(),
const Range<Key>& key_range = Range<Key>::All()) {
Task req; req.set_request(true);
req.set_key_channel(channel);
if (ts > Message::kInvalidTime) req.set_time(ts);
for (int t : wait) req.add_wait_time(t);
for (const auto& f : filters) req.add_filter()->CopyFrom(f);
key_range.To(req.mutable_key_range());
return req;
}
/// @brief Submit a push message to msg->recver
inline int Push(Message* msg) {
msg->task.mutable_param()->set_push(true);
return Submit(msg);
}
/// @brief Submit a pull message to msg->recver
inline int Pull(Message* msg) {
msg->task.mutable_param()->set_push(false);
return Submit(msg);
}
virtual void WriteToFile(std::string file) { }
virtual void ProcessRequest(Message* request);
virtual void ProcessResponse(Message* response);
protected:
/// @brief Fill "msg" with the values it requests, e.g.,
/// msg->value(0)[0] = my_val_[msg->key[0]];
virtual void GetValue(Message* msg) = 0;
/// @brief Set the values in "msg" into into my data strcuture, e.g..
/// my_val_[msg->key[0]] = msg->value(0)[0];
virtual void SetValue(const Message* msg) = 0;
/// @brief the message contains the backup KV pairs sent by the master node of the key
/// segment to its replica node. merge these pairs into my replica, say
/// replica_[msg->sender] = ...
virtual void SetReplica(const Message* msg) { }
/// @brief retrieve the replica. a new server node replacing a dead server will first
/// ask for the dead's replica node for the data
virtual void GetReplica(Message* msg) { }
/// @brief a new server node fill its own datastructure via the the replica data from
/// the dead's replica node
virtual void Recover(Message* msg) { }
};
} // namespace PS
| 949 |
2,591 | package liquibase.diff;
import org.junit.Test;
public class DiffResultTest {
@Test
public void dummy() {
}
}
| 45 |
3,301 | <reponame>zhangjun0x01/Alink
package com.alibaba.alink.params.nlp;
import org.apache.flink.ml.api.misc.param.ParamInfo;
import org.apache.flink.ml.api.misc.param.ParamInfoFactory;
import org.apache.flink.ml.api.misc.param.WithParams;
public interface HasNegative<T> extends WithParams <T> {
ParamInfo <Integer> NEGATIVE = ParamInfoFactory
.createParamInfo("negative", Integer.class)
.setDescription("The negative sampling size")
.setHasDefaultValue(5)
.build();
default Integer getNegative() {
return get(NEGATIVE);
}
default T setNegative(Integer value) {
return set(NEGATIVE, value);
}
}
| 219 |
6,717 | <reponame>crossmob/WinObjC<filename>include/UIKit/UIPickerViewDelegate.h
//******************************************************************************
//
// Copyright (c) Microsoft. All rights reserved.
//
// This code is licensed under the MIT License (MIT).
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//******************************************************************************
#pragma once
#import <UIKit/UIKitExport.h>
#import <CoreGraphics/CGBase.h>
#import <Foundation/Foundation.h>
@class UIPickerView;
@class NSString;
@class NSAttributedString;
@class UIView;
@protocol UIPickerViewDelegate <NSObject>
@optional
- (CGFloat)pickerView:(UIPickerView*)pickerView rowHeightForComponent:(NSInteger)component;
- (CGFloat)pickerView:(UIPickerView*)pickerView widthForComponent:(NSInteger)component;
- (NSString*)pickerView:(UIPickerView*)pickerView titleForRow:(NSInteger)row forComponent:(NSInteger)component;
- (NSAttributedString*)pickerView:(UIPickerView*)pickerView attributedTitleForRow:(NSInteger)row forComponent:(NSInteger)component;
- (UIView*)pickerView:(UIPickerView*)pickerView viewForRow:(NSInteger)row forComponent:(NSInteger)component reusingView:(UIView*)view;
- (void)pickerView:(UIPickerView*)pickerView didSelectRow:(NSInteger)row inComponent:(NSInteger)component;
@end
| 520 |
1,921 | <reponame>ksvr444/daily-coding-problem<gh_stars>1000+
def reverse_words(string, delimiters):
words = list()
delims = list()
delim_positions = list() # stores positions of the delimiters seen
start = 0
i = 0
while i < len(string):
char = string[i]
if char in delimiters:
word = string[start:i]
if i - start > 1:
words.append(word)
delims.append(char)
delim_positions.append(len(words) + len(delims) - 1)
start = i + 1
i += 1
# get last word if present
if i - start > 1:
words.append(string[start:i])
words.reverse() # reverse just the words
reversed_order = list()
word_index = 0
delim_index = 0
# merging the reversed words and the delimiters
for i in range(len(words) + len(delims)):
if delim_index < len(delim_positions) and delim_positions[delim_index] == i:
# insert next delimiter if the position is saved for a delimiter
reversed_order.append(delims[delim_index])
delim_index += 1
else:
reversed_order.append(words[word_index])
word_index += 1
reversed_string = "".join(reversed_order)
return reversed_string
assert reverse_words("hello/world:here/",
set([':', '/'])) == "here/world:hello/"
assert reverse_words(":hello//world:here/",
set([':', '/'])) == ":here//world:hello/"
assert reverse_words("hello//world:here",
set([':', '/'])) == "here//world:hello"
assert reverse_words("hello/world:here",
set([':', '/'])) == "here/world:hello"
| 760 |
504 | package org.dayatang.security.api;
import java.util.Date;
/**
* Created by yyang on 2016/11/29.
*/
public class UserInfo {
private String id;
private int version;
private String username;
private String remark;
private Date created;
private Date lastModified;
private Date expired;
private boolean locked;
private boolean disabled;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public Date getLastModified() {
return lastModified;
}
public void setLastModified(Date lastModified) {
this.lastModified = lastModified;
}
public Date getExpired() {
return expired;
}
public void setExpired(Date expired) {
this.expired = expired;
}
public boolean isLocked() {
return locked;
}
public void setLocked(boolean locked) {
this.locked = locked;
}
public boolean isDisabled() {
return disabled;
}
public void setDisabled(boolean disabled) {
this.disabled = disabled;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof UserInfo)) {
return false;
}
UserInfo userInfo = (UserInfo) o;
return getUsername().equals(userInfo.getUsername());
}
@Override
public int hashCode() {
return getUsername().hashCode();
}
@Override
public String toString() {
return "UserInfo{" +
"username='" + username + '\'' +
'}';
}
}
| 933 |
635 | import os
import sys
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
# add `./src` dir to system path
src_dir = os.path.abspath(os.path.join(os.getcwd(), "../"))
add_path(src_dir) | 94 |
1,346 | from trakt_sync.differ.core.helpers import dict_path
class Result(object):
def __init__(self, differ):
self.changes = {}
self._differ = differ
def add(self, current):
for handler in self._differ.handlers:
items = dict_path(self.changes, (
handler.name, 'added'
))
for key, item in current.items():
items[key] = handler.properties(item)
| 202 |
12,252 | package org.keycloak.quarkus.runtime.themes;
import org.keycloak.Config;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.KeycloakSessionFactory;
import org.keycloak.quarkus.runtime.Environment;
import org.keycloak.theme.FolderThemeProvider;
import org.keycloak.theme.ThemeProvider;
import org.keycloak.theme.ThemeProviderFactory;
import java.io.File;
import java.util.Objects;
public class QuarkusFolderThemeProviderFactory implements ThemeProviderFactory {
private static final String CONFIG_DIR_KEY = "dir";
private FolderThemeProvider themeProvider;
@Override
public ThemeProvider create(KeycloakSession sessions) {
return themeProvider;
}
@Override
public void init(Config.Scope config) {
String configDir = config.get(CONFIG_DIR_KEY);
File rootDir = getThemeRootDirWithFallback(configDir);
themeProvider = new FolderThemeProvider(rootDir);
}
@Override
public void postInit(KeycloakSessionFactory factory) {
}
@Override
public void close() {
}
@Override
public String getId() {
return "folder";
}
/**
* Determines if the theme root directory we get
* from {@link Config} exists.
* If not, uses the default theme directory as a fallback.
*
* @param rootDirFromConfig string value from {@link Config}
* @return Directory to use as theme root directory in {@link File} format, either from config or from default. Null if none is available.
* @throws RuntimeException when filesystem path is not accessible
*/
private File getThemeRootDirWithFallback(String rootDirFromConfig) {
File themeRootDir;
themeRootDir = new File(Objects.requireNonNullElseGet(rootDirFromConfig, Environment::getDefaultThemeRootDir));
if (!themeRootDir.exists()) {
return null;
}
return themeRootDir;
}
}
| 660 |
453 | <filename>src/ru/krlvm/powertunnel/frames/AdvancedMainFrame.java
package ru.krlvm.powertunnel.frames;
import ru.krlvm.powertunnel.PowerTunnel;
import ru.krlvm.powertunnel.data.Settings;
import ru.krlvm.powertunnel.enums.ServerStatus;
import ru.krlvm.powertunnel.ui.TextRightClickPopup;
import ru.krlvm.powertunnel.ui.Tooltipped;
import ru.krlvm.powertunnel.utilities.UIUtility;
import ru.krlvm.swingdpi.SwingDPI;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.io.IOException;
public class AdvancedMainFrame extends MainFrame {
private static final String ABOUT_MESSAGE =
"Simple, scalable, cross-platform and effective solution against government censorship<br><br>" +
"<a href=\"https://github.com/krlvm/PowerTunnel\">PowerTunnel</a> is made possible by these open-source projects:" +
"<br><br>" +
" • <a href=\"https://github.com/adamfisk/LittleProxy\">LittleProxy</a> - proxy server, <a href=\"https://github.com/mrog/LittleProxy\">forked</a> version<br>" +
" • <a href=\"https://github.com/ganskef/LittleProxy-mitm\">LittleProxy-MITM</a> - LittleProxy SSL extension<br>" +
" • <a href=\"https://github.com/dnsjava/dnsjava\">dnsjava</a> - DNS library, DoH realization<br>" +
" • <a href=\"https://github.com/ibauersachs/dnssecjava\">dnssecjava</a> - DNSSec realization for dnsjava<br>" +
" • <a href=\"https://github.com/adamfisk/DNSSEC4J\">DNSSEC4J</a> - DNSSec realization for LittleProxy<br>" +
" • <a href=\"https://github.com/java-native-access/jna\">Java Native Access</a> - library for accessing system native API<br>" +
" • <a href=\"https://github.com/krlvm/SwingDPI\">SwingDPI</a> - High DPI scaling" +
"<br><br>" +
"Get <a href=\"https://github.com/krlvm/PowerTunnel-Android\">version for Android</a>" +
"<br><br>" +
"Licensed under the<br>" +
"<a href=\"https://raw.githubusercontent.com/krlvm/PowerTunnel/master/LICENSE\">MIT License</a>" +
"<br><br>" +
"(c) krlvm, 2019-2021";
private final JLabel header;
private final JButton stateButton;
private final JTextField[] inputs;
private final boolean[] inputsDisabled;
public AdvancedMainFrame() {
JRootPane root = getRootPane();
root.setLayout(new BorderLayout());
root.setBorder(BorderFactory.createEmptyBorder(0, 8, 0, 8));
JPanel pane = new JPanel(new GridBagLayout());
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridwidth = GridBagConstraints.REMAINDER;
gbc.anchor = GridBagConstraints.CENTER;
gbc.insets = new Insets(8,0,0,0);
header = new JLabel(getHeaderText());
inputsDisabled = new boolean[]{
PowerTunnel.SETTINGS.isTemporary(Settings.SERVER_IP_ADDRESS),
PowerTunnel.SETTINGS.isTemporary(Settings.SERVER_PORT)
};
final JTextField ipInput = new Tooltipped.TextField("IP Address");
TextRightClickPopup.register(ipInput);
Insets insets = ipInput.getInsets();
ipInput.setPreferredSize(new Dimension(SwingDPI.scale(200)+insets.left+insets.right,
SwingDPI.scale(22)+insets.top+insets.bottom));
//ipInput.setHorizontalAlignment(SwingConstants.CENTER);
ipInput.setText(String.valueOf(PowerTunnel.SERVER_IP_ADDRESS));
ipInput.setEnabled(!inputsDisabled[0]);
final JTextField portInput = new Tooltipped.TextField("Port");
TextRightClickPopup.register(portInput);
insets = portInput.getInsets();
portInput.setPreferredSize(SwingDPI.scale(75+insets.left+insets.right,
22+insets.top+insets.bottom));
//portInput.setHorizontalAlignment(SwingConstants.CENTER);
portInput.setText(String.valueOf(PowerTunnel.SERVER_PORT));
portInput.setEnabled(!inputsDisabled[1]);
inputs = new JTextField[]{ipInput, portInput};
stateButton = new JButton("Start server");
stateButton.setPreferredSize(new Dimension((int)stateButton.getPreferredSize().getWidth(), (int)portInput.getPreferredSize().getHeight()));
stateButton.addActionListener(e -> new Thread(() -> {
if (PowerTunnel.getStatus() == ServerStatus.RUNNING) {
PowerTunnel.stopServer();
} else {
try {
PowerTunnel.SERVER_IP_ADDRESS = ipInput.getText();
PowerTunnel.SERVER_PORT = Integer.parseInt(portInput.getText());
String error = PowerTunnel.safeBootstrap();
if (error != null) {
JOptionPane.showMessageDialog(AdvancedMainFrame.this, error,
"Error", JOptionPane.ERROR_MESSAGE);
}
} catch (NumberFormatException ex) {
JOptionPane.showMessageDialog(AdvancedMainFrame.this, "Invalid port",
"Error", JOptionPane.ERROR_MESSAGE);
}
}
}).start());
JButton logButton = new JButton("Logs");
logButton.addActionListener(e -> PowerTunnel.logFrame.showFrame());
logButton.setEnabled(PowerTunnel.ENABLE_LOGS);
JButton journalButton = new JButton("Journal");
journalButton.addActionListener(e -> PowerTunnel.journalFrame.showFrame());
journalButton.setEnabled(PowerTunnel.ENABLE_JOURNAL);
JButton userBlacklist = new JButton("Blacklist");
userBlacklist.addActionListener(e -> PowerTunnel.USER_FRAMES[0].showFrame());
JButton userWhitelist = new JButton("Whitelist");
userWhitelist.addActionListener(e -> PowerTunnel.USER_FRAMES[1].showFrame());
JButton options = new JButton("Options");
options.addActionListener(e -> PowerTunnel.optionsFrame.showFrame());
JButton reload = new JButton("Reload");
reload.addActionListener(e -> {
try {
PowerTunnel.loadLists();
JOptionPane.showMessageDialog(AdvancedMainFrame.this,
"Government blacklist and user lists have been reloaded",
PowerTunnel.NAME, JOptionPane.INFORMATION_MESSAGE);
} catch (IOException ex) {
JOptionPane.showMessageDialog(AdvancedMainFrame.this,
"An error occurred while reloading lists: " + ex.getMessage(),
PowerTunnel.NAME, JOptionPane.INFORMATION_MESSAGE);
ex.printStackTrace();
}
});
JButton about = new JButton("About");
about.addActionListener(e -> {
JEditorPane message = UIUtility.getLabelWithHyperlinkSupport(ABOUT_MESSAGE, null, true);
JOptionPane.showMessageDialog(AdvancedMainFrame.this, message, "About " + PowerTunnel.NAME, JOptionPane.INFORMATION_MESSAGE);
});
JPanel panel = new JPanel(new FlowLayout(FlowLayout.CENTER));
//panel.add(new JLabel("IP Address:"));
panel.add(ipInput);
//panel.add(new JLabel("Port:"));
panel.add(portInput);
panel.add(stateButton);
pane.add(header, gbc);
pane.add(panel, gbc);
root.add(pane, BorderLayout.NORTH);
JPanel generalButtonsPane = new JPanel(new GridLayout(2, 1));
JPanel firstButtonsRow = new JPanel();
firstButtonsRow.add(logButton);
firstButtonsRow.add(journalButton);
firstButtonsRow.add(userBlacklist);
firstButtonsRow.add(userWhitelist);
JPanel secondButtonsRow = new JPanel();
secondButtonsRow.add(reload);
secondButtonsRow.add(options);
secondButtonsRow.add(about);
generalButtonsPane.add(firstButtonsRow);
generalButtonsPane.add(secondButtonsRow);
pane.add(generalButtonsPane, gbc);
pane.add(UIUtility.getLabelWithHyperlinkSupport("<a href=\"" + PowerTunnel.REPOSITORY_URL + "/issues\">Submit a bug</a> | <a href=\"https://t.me/powertunnel_dpi\">Telegram Channel</a> | " + "<a href=\"" + PowerTunnel.REPOSITORY_URL + "/wiki\">Help</a><br>" +
"<b><a style=\"color: black\" href=\"" + PowerTunnel.REPOSITORY_URL + "\">" + PowerTunnel.REPOSITORY_URL + "</a>" +
"</b><br><br>(c) krlvm, 2019-2021", "text-align: center"), gbc);
pack();
setResizable(false);
controlFrameInitialized();
setVisible(!PowerTunnel.RUN_MINIMIZED);
stateButton.requestFocus();
stateButton.requestFocusInWindow();
root.setDefaultButton(stateButton);
//save data
setDefaultCloseOperation(WindowConstants.HIDE_ON_CLOSE);
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
if(PowerTunnel.getStatus() != ServerStatus.NOT_RUNNING && PowerTunnel.getTray().isLoaded()) {
PowerTunnel.getTray().showNotification(PowerTunnel.NAME + " is still running in tray mode");
return;
}
PowerTunnel.handleClosing();
}
});
}
@Override
public void update() {
SwingUtilities.invokeLater(() -> {
boolean running = PowerTunnel.getStatus() == ServerStatus.RUNNING;
stateButton.setText((running ? "Stop" : "Start") + " server");
header.setText(getHeaderText());
boolean activateUI = !(PowerTunnel.getStatus() == ServerStatus.STARTING || PowerTunnel.getStatus() == ServerStatus.STOPPING);
stateButton.setEnabled(activateUI);
for(int i = 0; i < inputs.length; i++) {
inputs[i].setEditable(PowerTunnel.getStatus() == ServerStatus.NOT_RUNNING && !inputsDisabled[i]);
}
});
}
private String getHeaderText() {
return getCenteredLabel("<b>" + PowerTunnel.NAME + " v" + PowerTunnel.VERSION + "</b><br>Server " + PowerTunnel.getStatus() + "</div></html>");
}
private String getCenteredLabel(String text) {
return "<html><div style='text-align: center;'>" + text + "</div></html>";
}
}
| 4,516 |
428 | /**
* Copyright 2008 - 2015 The Loon Game Engine Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loon
* @author cping
* @email:<EMAIL>
* @version 0.5
*/
package loon.action.camera;
import loon.geom.Matrix4;
import loon.geom.Quaternion;
import loon.geom.Transforms;
import loon.geom.Vector3f;
public class PerspectiveCamera extends EmptyCamera {
private Vector3f position;
private Quaternion rotation;
private Vector3f forward;
private Vector3f right;
private Vector3f up;
public PerspectiveCamera() {
this(70, 1f, 0.01f, 100);
}
public PerspectiveCamera(float fovy, float aspect, float zNear, float zFar) {
_viewMatrix4 = Transforms.createPerspective(fovy, aspect, zNear, zFar);
position = new Vector3f(0, 0, 1);
rotation = new Quaternion();
forward = new Vector3f(0, 0, -1);
right = new Vector3f(0, 0, 1);
up = new Vector3f(0, 1, 0);
}
public PerspectiveCamera lookAt(Vector3f point) {
return lookAt(point, getUp().normalizeSelf());
}
public PerspectiveCamera lookAt(Vector3f point, Vector3f up) {
Transforms.createLookAtQuaternion(position, point, up, rotation);
return this;
}
public Vector3f getUp() {
return rotation.multiply(up.set(Vector3f.AXIS_Y()), up).normalizeSelf();
}
public PerspectiveCamera lookAt(Vector3f position, Vector3f point,
Vector3f up) {
return setPosition(position).lookAt(point, up);
}
public PerspectiveCamera moveForward(float amount) {
return move(getForward(), amount);
}
public PerspectiveCamera move(Vector3f dir, float amount) {
position.addSelf(dir.normalizeSelf().scaleSelf(amount));
return this;
}
public Vector3f getForward() {
return rotation.multiply(forward.set(Vector3f.AXIS_Z()).negateSelf(),
forward).normalizeSelf();
}
public PerspectiveCamera moveBackward(float amount) {
return move(getForward().negateSelf(), amount);
}
public PerspectiveCamera moveLeft(float amount) {
return move(getRight().negateSelf(), amount);
}
public Vector3f getRight() {
return rotation.multiply(Vector3f.AXIS_X(), right).normalizeSelf();
}
public PerspectiveCamera moveRight(float amount) {
return move(getRight(), amount);
}
public PerspectiveCamera moveUp(float amount) {
return move(getUp(), amount);
}
public PerspectiveCamera moveDown(float amount) {
return move(getUp().negateSelf(), amount);
}
public PerspectiveCamera rotateX(float angle) {
Quaternion tempQuat = Quaternion.TMP();
Quaternion xRot = tempQuat.set(Vector3f.AXIS_X(), angle);
rotation.multiplySelf(xRot);
return this;
}
public PerspectiveCamera rotateY(float angle) {
Quaternion tempQuat = Quaternion.TMP();
Quaternion yRot = tempQuat.set(Vector3f.AXIS_Y(), angle);
rotation.set(yRot.multiplySelf(rotation));
return this;
}
public PerspectiveCamera lerp(PerspectiveCamera p, float alpha) {
position.lerpSelf(p.position, alpha);
rotation.lerpSelf(p.rotation, alpha);
return this;
}
public PerspectiveCamera slerp(PerspectiveCamera p, float alpha) {
position.lerpSelf(p.position, alpha);
rotation.slerpSelf(p.rotation, alpha);
return this;
}
@Override
public void setup() {
super.setup();
Vector3f tempVec3 = Vector3f.TMP();
Matrix4 tempMat4 = Matrix4.TMP();
Quaternion tempQuat = Quaternion.TMP();
_viewMatrix4
.idt()
.mul(Transforms.createRotation(tempQuat.set(rotation)
.invertSelf(), tempMat4))
.mul(Transforms.createTranslation(tempVec3.set(position)
.negateSelf(), tempMat4));
}
public Vector3f getPosition() {
return position;
}
public PerspectiveCamera setPosition(Vector3f position) {
this.position.set(position);
return this;
}
public Quaternion getRotation() {
return rotation;
}
public PerspectiveCamera setRotation(Quaternion rotation) {
this.rotation.set(rotation);
return this;
}
public PerspectiveCamera initProjection(float fovy, float aspect, float zNear, float zFar)
{
Transforms.createPerspective(fovy, aspect, zNear, zFar, _projMatrix4);
return this;
}
public PerspectiveCamera initProjection(float width, float height)
{
return initProjection(0, width, height, 0, 0.01f, 100f);
}
public PerspectiveCamera initProjection(float left, float right, float bottom, float top, float zNear, float zFar)
{
Transforms.createFrustum(left, right, bottom, top, zNear, zFar, _projMatrix4);
return this;
}
}
| 1,713 |
3,100 | <gh_stars>1000+
/**
******************************************************************************
* Xenia : Xbox 360 Emulator Research Project *
******************************************************************************
* Copyright 2020 <NAME>. All rights reserved. *
* Released under the BSD license - see LICENSE in the root for more details. *
******************************************************************************
*/
#include "xenia/gpu/trace_reader.h"
#include <cinttypes>
#include "third_party/snappy/snappy.h"
#include "xenia/base/filesystem.h"
#include "xenia/base/logging.h"
#include "xenia/base/mapped_memory.h"
#include "xenia/base/math.h"
#include "xenia/gpu/packet_disassembler.h"
#include "xenia/gpu/trace_protocol.h"
#include "xenia/memory.h"
namespace xe {
namespace gpu {
bool TraceReader::Open(const std::filesystem::path& path) {
Close();
mmap_ = MappedMemory::Open(path, MappedMemory::Mode::kRead);
if (!mmap_) {
return false;
}
trace_data_ = reinterpret_cast<const uint8_t*>(mmap_->data());
trace_size_ = mmap_->size();
// Verify version.
auto header = reinterpret_cast<const TraceHeader*>(trace_data_);
if (header->version != kTraceFormatVersion) {
XELOGE("Trace format version mismatch, code has {}, file has {}",
kTraceFormatVersion, header->version);
if (header->version < kTraceFormatVersion) {
XELOGE("You need to regenerate your trace for the latest version");
}
return false;
}
XELOGI("Mapped {}b trace from {}", trace_size_, xe::path_to_utf8(path));
XELOGI(" Version: {}", header->version);
auto commit_str = std::string(header->build_commit_sha,
xe::countof(header->build_commit_sha));
XELOGI(" Commit: {}", commit_str);
XELOGI(" Title ID: {}", header->title_id);
ParseTrace();
return true;
}
void TraceReader::Close() {
mmap_.reset();
trace_data_ = nullptr;
trace_size_ = 0;
}
void TraceReader::ParseTrace() {
// Skip file header.
auto trace_ptr = trace_data_;
trace_ptr += sizeof(TraceHeader);
Frame current_frame;
current_frame.start_ptr = trace_ptr;
const PacketStartCommand* packet_start = nullptr;
const uint8_t* packet_start_ptr = nullptr;
const uint8_t* last_ptr = trace_ptr;
bool pending_break = false;
auto current_command_buffer = new CommandBuffer();
current_frame.command_tree =
std::unique_ptr<CommandBuffer>(current_command_buffer);
while (trace_ptr < trace_data_ + trace_size_) {
++current_frame.command_count;
auto type = static_cast<TraceCommandType>(xe::load<uint32_t>(trace_ptr));
switch (type) {
case TraceCommandType::kPrimaryBufferStart: {
auto cmd =
reinterpret_cast<const PrimaryBufferStartCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd) + cmd->count * 4;
break;
}
case TraceCommandType::kPrimaryBufferEnd: {
auto cmd = reinterpret_cast<const PrimaryBufferEndCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd);
break;
}
case TraceCommandType::kIndirectBufferStart: {
auto cmd =
reinterpret_cast<const IndirectBufferStartCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd) + cmd->count * 4;
// Traverse down a level.
auto sub_command_buffer = new CommandBuffer();
sub_command_buffer->parent = current_command_buffer;
current_command_buffer->commands.push_back(
CommandBuffer::Command(sub_command_buffer));
current_command_buffer = sub_command_buffer;
break;
}
case TraceCommandType::kIndirectBufferEnd: {
auto cmd = reinterpret_cast<const IndirectBufferEndCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd);
// IB packet is wrapped in a kPacketStart/kPacketEnd. Skip the end.
auto end_cmd = reinterpret_cast<const PacketEndCommand*>(trace_ptr);
assert_true(end_cmd->type == TraceCommandType::kPacketEnd);
trace_ptr += sizeof(*cmd);
// Go back up a level. If parent is null, this frame started in an
// indirect buffer.
if (current_command_buffer->parent) {
current_command_buffer = current_command_buffer->parent;
}
break;
}
case TraceCommandType::kPacketStart: {
auto cmd = reinterpret_cast<const PacketStartCommand*>(trace_ptr);
packet_start_ptr = trace_ptr;
packet_start = cmd;
trace_ptr += sizeof(*cmd) + cmd->count * 4;
break;
}
case TraceCommandType::kPacketEnd: {
auto cmd = reinterpret_cast<const PacketEndCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd);
if (!packet_start_ptr) {
continue;
}
auto packet_category = PacketDisassembler::GetPacketCategory(
packet_start_ptr + sizeof(*packet_start));
switch (packet_category) {
case PacketCategory::kDraw: {
Frame::Command command;
command.type = Frame::Command::Type::kDraw;
command.head_ptr = packet_start_ptr;
command.start_ptr = last_ptr;
command.end_ptr = trace_ptr;
current_frame.commands.push_back(std::move(command));
last_ptr = trace_ptr;
current_command_buffer->commands.push_back(CommandBuffer::Command(
uint32_t(current_frame.commands.size() - 1)));
break;
}
case PacketCategory::kSwap: {
Frame::Command command;
command.type = Frame::Command::Type::kSwap;
command.head_ptr = packet_start_ptr;
command.start_ptr = last_ptr;
command.end_ptr = trace_ptr;
current_frame.commands.push_back(std::move(command));
last_ptr = trace_ptr;
current_command_buffer->commands.push_back(CommandBuffer::Command(
uint32_t(current_frame.commands.size() - 1)));
} break;
case PacketCategory::kGeneric: {
// Ignored.
break;
}
}
if (pending_break) {
current_frame.end_ptr = trace_ptr;
frames_.push_back(std::move(current_frame));
current_command_buffer = new CommandBuffer();
current_frame.command_tree =
std::unique_ptr<CommandBuffer>(current_command_buffer);
current_frame.start_ptr = trace_ptr;
current_frame.end_ptr = nullptr;
current_frame.command_count = 0;
pending_break = false;
}
break;
}
case TraceCommandType::kMemoryRead: {
auto cmd = reinterpret_cast<const MemoryCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd) + cmd->encoded_length;
break;
}
case TraceCommandType::kMemoryWrite: {
auto cmd = reinterpret_cast<const MemoryCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd) + cmd->encoded_length;
break;
}
case TraceCommandType::kEdramSnapshot: {
auto cmd = reinterpret_cast<const EdramSnapshotCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd) + cmd->encoded_length;
break;
}
case TraceCommandType::kEvent: {
auto cmd = reinterpret_cast<const EventCommand*>(trace_ptr);
trace_ptr += sizeof(*cmd);
switch (cmd->event_type) {
case EventCommand::Type::kSwap: {
pending_break = true;
break;
}
}
break;
}
default:
// Broken trace file?
assert_unhandled_case(type);
break;
}
}
if (pending_break || current_frame.command_count) {
current_frame.end_ptr = trace_ptr;
frames_.push_back(std::move(current_frame));
}
}
bool TraceReader::DecompressMemory(MemoryEncodingFormat encoding_format,
const uint8_t* src, size_t src_size,
uint8_t* dest, size_t dest_size) {
switch (encoding_format) {
case MemoryEncodingFormat::kNone:
assert_true(src_size == dest_size);
std::memcpy(dest, src, src_size);
return true;
case MemoryEncodingFormat::kSnappy:
return snappy::RawUncompress(reinterpret_cast<const char*>(src), src_size,
reinterpret_cast<char*>(dest));
default:
assert_unhandled_case(encoding_format);
return false;
}
}
} // namespace gpu
} // namespace xe
| 3,600 |
462 | <reponame>VisualAwarenessTech/gdal-2.3.1<filename>ogr/ogrsf_frmts/dods/ogrdodsfielddefn.cpp
/******************************************************************************
*
* Project: OGR/DODS Interface
* Purpose: Implements OGRDODSFieldDefn class. This is a small class used
* to encapsulate information about a referenced field.
* Author: <NAME>, <EMAIL>
*
******************************************************************************
* Copyright (c) 2004, <NAME> <<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
****************************************************************************/
#include "ogr_dods.h"
#include "cpl_conv.h"
CPL_CVSID("$Id: ogrdodsfielddefn.cpp 103562a4b99e01eef1f5cb350b2dccaa6fe01e81 2017-12-14 19:39:15Z Even Rouault $")
/************************************************************************/
/* OGRDODSFieldDefn() */
/************************************************************************/
OGRDODSFieldDefn::OGRDODSFieldDefn() :
bValid(false),
pszFieldName(nullptr),
pszFieldScope(nullptr),
iFieldIndex(-1),
pszFieldValue(nullptr),
pszPathToSequence(nullptr),
bRelativeToSuperSequence(false),
bRelativeToSequence(false)
{}
/************************************************************************/
/* ~OGRDODSFieldDefn() */
/************************************************************************/
OGRDODSFieldDefn::~OGRDODSFieldDefn()
{
CPLFree( pszFieldName );
CPLFree( pszFieldScope );
CPLFree( pszFieldValue );
CPLFree( pszPathToSequence );
}
/************************************************************************/
/* Initialize() */
/* */
/* Build field reference from a DAS entry. The AttrTable */
/* passed should be the container of the field defn. For */
/* instance, the "x_field" node with a name and scope sub */
/* entry. */
/************************************************************************/
bool OGRDODSFieldDefn::Initialize( AttrTable *poEntry,
BaseType *poTarget,
BaseType *poSuperSeq )
{
const char *l_pszFieldScope = poEntry->get_attr("scope").c_str();
if( l_pszFieldScope == nullptr )
l_pszFieldScope = "dds";
return Initialize( poEntry->get_attr("name").c_str(), l_pszFieldScope,
poTarget, poSuperSeq );
}
/************************************************************************/
/* Initialize() */
/************************************************************************/
bool OGRDODSFieldDefn::Initialize( const char *pszFieldNameIn,
const char *pszFieldScopeIn,
BaseType *poTarget,
BaseType *poSuperSeq )
{
pszFieldScope = CPLStrdup( pszFieldScopeIn );
pszFieldName = CPLStrdup( pszFieldNameIn );
if( poTarget != nullptr && EQUAL(pszFieldScope,"dds") )
{
string oTargPath = OGRDODSGetVarPath( poTarget );
int nTargPathLen = static_cast<int>(strlen(oTargPath.c_str()));
if( EQUALN(oTargPath.c_str(),pszFieldNameIn,nTargPathLen)
&& pszFieldNameIn[nTargPathLen] == '.' )
{
CPLFree( pszFieldName );
pszFieldName = CPLStrdup( pszFieldNameIn + nTargPathLen + 1 );
bRelativeToSequence = true;
iFieldIndex = OGRDODSGetVarIndex(
dynamic_cast<Sequence *>( poTarget ), pszFieldName );
}
else if( poSuperSeq != nullptr )
{
oTargPath = OGRDODSGetVarPath( poSuperSeq );
nTargPathLen = static_cast<int>(strlen(oTargPath.c_str()));
if( EQUALN(oTargPath.c_str(),pszFieldNameIn,nTargPathLen)
&& pszFieldNameIn[nTargPathLen] == '.' )
{
CPLFree( pszFieldName );
pszFieldName = CPLStrdup( pszFieldNameIn + nTargPathLen + 1 );
bRelativeToSuperSequence = true;
iFieldIndex = OGRDODSGetVarIndex(
dynamic_cast<Sequence *>( poSuperSeq ), pszFieldName );
}
}
}
bValid = true;
return true;
}
/************************************************************************/
/* OGRDODSGetVarPath() */
/* */
/* Return the full path to a variable. */
/************************************************************************/
string OGRDODSGetVarPath( BaseType *poTarget )
{
string oFullName;
oFullName = poTarget->name();
while( (poTarget = poTarget->get_parent()) != nullptr )
{
oFullName = poTarget->name() + "." + oFullName;
}
return oFullName;
}
/************************************************************************/
/* OGRDODSGetVarIndex() */
/************************************************************************/
int OGRDODSGetVarIndex( Sequence *poParent, string oVarName )
{
Sequence::Vars_iter v_i;
int i;
for( v_i = poParent->var_begin(), i=0;
v_i != poParent->var_end();
v_i++, i++ )
{
if( EQUAL((*v_i)->name().c_str(),oVarName.c_str()) )
return i;
}
return -1;
}
| 2,918 |
463 | import time
from pathlib import Path
import psutil
from labml import logger
from labml.internal.computer.monitor.process import ProcessMonitor
from labml.logger import Text
from labml.utils.notice import labml_notice
class Scanner:
def __init__(self):
self.data = {}
self.cache = {}
self.nvml = None
self.n_gpu = 0
try:
from py3nvml import py3nvml as nvml
self.nvml = nvml
except ImportError:
labml_notice('Install py3nvml to monitor GPUs:\n pip install py3nvml',
is_warn=False)
if self.nvml:
try:
self.nvml.nvmlInit()
self.nvml.nvmlShutdown()
except self.nvml.NVMLError:
logger.log('NVML Library not found', Text.warning)
self.nvml = None
self.process_monitor = ProcessMonitor(self.nvml)
def configs(self):
configs = {
'os': self.get_os(),
'cpu.logical': psutil.cpu_count(),
'cpu.physical': psutil.cpu_count(logical=False),
}
configs.update(self._gpu_header())
configs.update(self._sensor_header())
return configs
def _sensor_header(self):
try:
sensors = psutil.sensors_temperatures()
except AttributeError as e:
return {}
data = {}
for k, temps in sensors.items():
for i, t in enumerate(temps):
assert isinstance(t, psutil._common.shwtemp)
data[f'sensor.temp.name.{k}.{i}'] = t.label
return data
def _gpu_header(self):
if not self.nvml:
return {}
self.nvml.nvmlInit()
self.n_gpu = self.nvml.nvmlDeviceGetCount()
res = {'gpus': self.n_gpu}
for i in range(self.n_gpu):
handle = self.nvml.nvmlDeviceGetHandleByIndex(i)
res.update({
f'gpu.name.{i}': self.nvml.nvmlDeviceGetName(handle),
})
self.nvml.nvmlShutdown()
return res
def track_gpu(self):
if not self.nvml:
return
self.nvml.nvmlInit()
for i in range(self.n_gpu):
handle = self.nvml.nvmlDeviceGetHandleByIndex(i)
self.data.update({
f'gpu.memory.used.{i}': self.nvml.nvmlDeviceGetMemoryInfo(handle).used,
f'gpu.utilization.{i}': self.nvml.nvmlDeviceGetUtilizationRates(handle).gpu,
f'gpu.temperature.{i}': self.nvml.nvmlDeviceGetTemperature(handle, self.nvml.NVML_TEMPERATURE_GPU),
f'gpu.power.usage.{i}': self.nvml.nvmlDeviceGetPowerUsage(handle),
})
self.data.update(self.process_monitor.track_gpus())
self.nvml.nvmlShutdown()
def first_gpu(self):
if not self.nvml:
return
self.nvml.nvmlInit()
for i in range(self.n_gpu):
handle = self.nvml.nvmlDeviceGetHandleByIndex(i)
self.data[f'gpu.memory.total.{i}'] = self.nvml.nvmlDeviceGetMemoryInfo(handle).total
try:
self.data[f'gpu.power.limit.{i}'] = self.nvml.nvmlDeviceGetPowerManagementLimit(handle)
except self.nvml.NVMLError:
pass
self.nvml.nvmlShutdown()
def track_net_io_counters(self):
res = psutil.net_io_counters()
t = time.time()
if 'net.recv' in self.cache:
td = t - self.cache['net.time']
self.data.update({
'net.recv': (res.bytes_recv - self.cache['net.recv']) / td,
'net.sent': (res.bytes_sent - self.cache['net.sent']) / td,
})
self.cache['net.recv'] = res.bytes_recv
self.cache['net.sent'] = res.bytes_sent
self.cache['net.time'] = t
def track_memory(self):
res = psutil.virtual_memory()
self.data.update({
'memory.total': res.total,
'memory.used': res.used,
'memory.available': res.available,
})
def track_cpu(self):
res = psutil.cpu_times()
self.data.update({
'cpu.idle': res.idle,
'cpu.system': res.system,
'cpu.user': res.user,
})
res = psutil.cpu_freq()
if res is not None:
self.data.update({
'cpu.freq': res.current,
'cpu.freq.min': res.min,
'cpu.freq.max': res.max,
})
res = psutil.cpu_percent(percpu=True)
self.data.update({f'cpu.perc.{i}': p for i, p in enumerate(res)})
def track_disk(self):
res = psutil.disk_usage(Path.home())
self.data.update({
'disk.free': res.free,
'disk.total': res.total,
'disk.used': res.used,
})
def track_sensors(self):
try:
sensors = psutil.sensors_temperatures()
except AttributeError as e:
return
for k, temps in sensors.items():
for i, t in enumerate(temps):
self.data[f'sensor.temp.{k}.{i}'] = t.current
def track_processes(self):
self.data.update(self.process_monitor.track())
def track_battery(self):
try:
battery = psutil.sensors_battery()._asdict()
except AttributeError as e:
return
except FileNotFoundError as e:
return
self.data.update({
'battery.percent': battery['percent'],
'battery.power_plugged': battery['power_plugged'],
'battery.secsleft': battery['secsleft'],
})
def track(self):
self.data = {}
try:
self.track_net_io_counters()
self.track_memory()
self.track_cpu()
self.track_disk()
self.track_sensors()
self.track_battery()
self.track_processes()
self.track_gpu()
except Exception as e:
print(e)
return self.data
def first(self):
self.data = {}
self.first_gpu()
return self.data
@staticmethod
def get_os():
if psutil.MACOS:
return 'macos'
elif psutil.LINUX:
return 'linux'
elif psutil.WINDOWS:
return 'windows'
else:
return 'unknown'
def _test():
scanner = Scanner()
from labml.logger import inspect
inspect(scanner.configs())
inspect(scanner.first())
inspect(scanner.track())
if __name__ == '__main__':
_test()
| 3,397 |
745 | <gh_stars>100-1000
#ifndef CK_CLUSTER_DESCRIPTOR_HPP
#define CK_CLUSTER_DESCRIPTOR_HPP
#include "common_header.hpp"
#include "tensor_adaptor.hpp"
namespace ck {
template <typename Lengths,
typename ArrangeOrder = typename arithmetic_sequence_gen<0, Lengths::Size(), 1>::type>
__host__ __device__ constexpr auto make_cluster_descriptor(
const Lengths& lengths,
ArrangeOrder order = typename arithmetic_sequence_gen<0, Lengths::Size(), 1>::type{})
{
constexpr index_t ndim_low = Lengths::Size();
const auto reordered_lengths = container_reorder_given_new2old(lengths, order);
const auto low_lengths = generate_tuple(
[&](auto idim_low) { return reordered_lengths[idim_low]; }, Number<ndim_low>{});
const auto transform = make_merge_transform(low_lengths);
constexpr auto low_dim_old_top_ids = ArrangeOrder{};
constexpr auto up_dim_new_top_ids = Sequence<0>{};
return make_single_stage_tensor_adaptor(
make_tuple(transform), make_tuple(low_dim_old_top_ids), make_tuple(up_dim_new_top_ids));
}
} // namespace ck
#endif
| 421 |
640 | <gh_stars>100-1000
from binance_f import RequestClient
from binance_f.constant.test import *
from binance_f.base.printobject import *
from binance_f.model.constant import *
request_client = RequestClient(api_key=g_api_key, secret_key=g_secret_key)
result = request_client.change_position_margin(symbol="BTCUSDT", amount=0.5, type=1)
PrintBasic.print_obj(result)
| 136 |
1,687 | public class Solution2 {
public ListNode mergeKLists(ListNode[] lists) {
int len = lists.length;
if (len == 0) {
return null;
}
ListNode res = lists[0];
for (int i = 1; i < len; i++) {
if (lists[i] != null) {
res = mergeTwoSortLinkedList(res, lists[i]);
}
}
return res;
}
private ListNode mergeTwoSortLinkedList(ListNode list1, ListNode list2) {
ListNode dummyNode = new ListNode(-1);
ListNode p1 = list1;
ListNode p2 = list2;
ListNode curNode = dummyNode;
// 两者都不为空的时候,才有必要进行比较
while (p1 != null && p2 != null) {
if (p1.val < p2.val) {
curNode.next = p1;
p1 = p1.next;
} else {
curNode.next = p2;
p2 = p2.next;
}
curNode = curNode.next;
}
// 跳出循环是因为 p1 == null 或者 p2 == null
if (p1 == null) {
curNode.next = p2;
} else {
curNode.next = p1;
}
return dummyNode.next;
}
} | 678 |
1,062 | <reponame>larkov/MailTrackerBlocker
//
// Generated by class-dump 3.5b1 (64 bit) (Debug version compiled Dec 3 2019 19:59:57).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import <objc/NSObject.h>
@class NSArray;
@interface MCMessageGenerator : NSObject
{
struct __SecIdentity *_signingIdentity; // 8 = 0x8
BOOL _shouldMarkNonresizableAttachmentData; // 16 = 0x10
BOOL _createsMimeAlternatives; // 17 = 0x11
BOOL _createsPlainTextOnly; // 18 = 0x12
BOOL _alwaysCreatesRichText; // 19 = 0x13
BOOL _allows8BitMimeParts; // 20 = 0x14
BOOL _allowsBinaryMimeParts; // 21 = 0x15
BOOL _allowsAppleDoubleAttachments; // 22 = 0x16
unsigned long long _encodingHint; // 24 = 0x18
NSArray *_encryptionCertificates; // 32 = 0x20
}
+ (id)domainHintForResentIDFromHeaders:(id)arg1 hasResentFromHeaders:(char *)arg2; // IMP=0x000000000002ebdb
@property(copy, nonatomic) NSArray *encryptionCertificates; // @synthesize encryptionCertificates=_encryptionCertificates;
@property(nonatomic) BOOL allowsAppleDoubleAttachments; // @synthesize allowsAppleDoubleAttachments=_allowsAppleDoubleAttachments;
@property(nonatomic) BOOL allowsBinaryMimeParts; // @synthesize allowsBinaryMimeParts=_allowsBinaryMimeParts;
@property(nonatomic) BOOL allows8BitMimeParts; // @synthesize allows8BitMimeParts=_allows8BitMimeParts;
@property(nonatomic) BOOL alwaysCreatesRichText; // @synthesize alwaysCreatesRichText=_alwaysCreatesRichText;
@property(nonatomic) BOOL createsPlainTextOnly; // @synthesize createsPlainTextOnly=_createsPlainTextOnly;
@property(nonatomic) BOOL createsMimeAlternatives; // @synthesize createsMimeAlternatives=_createsMimeAlternatives;
@property(nonatomic) unsigned long long encodingHint; // @synthesize encodingHint=_encodingHint;
// - (void).cxx_destruct; // IMP=0x0000000000064158
- (id)_newRFC2047NameParameterDataForMimePart:(id)arg1; // IMP=0x00000000000273df
- (id)_hfsFilenameDataWithFilename:(id)arg1 partData:(id)arg2; // IMP=0x0000000000063d9b
- (void)_setMimeTypeFromAttachment:(id)arg1 onMimePart:(id)arg2 filename:(id *)arg3; // IMP=0x00000000000330de
- (BOOL)_encodeDataForMimePart:(id)arg1 withPartData:(id)arg2; // IMP=0x000000000002f19b
- (id)_newDataForMimePart:(id)arg1 withPartData:(id)arg2; // IMP=0x0000000000063c4f
- (BOOL)appendDataForMimePart:(id)arg1 toData:(id)arg2 withPartData:(id)arg3; // IMP=0x0000000000026581
- (void)_appendHeadersForMimePart:(id)arg1 toHeaders:(id)arg2; // IMP=0x0000000000026ac6
- (id)_newPartForDirectoryAttachment:(id)arg1 partData:(id)arg2; // IMP=0x0000000000063a12
- (id)_newPartForAttachment:(id)arg1 partData:(id)arg2; // IMP=0x0000000000032707
- (id)_newMimePartWithWebResource:(id)arg1 partData:(id)arg2 seenURLStrings:(id)arg3; // IMP=0x0000000000063681
- (id)_newPartAndDataForString:(id)arg1 charset:(id)arg2 subtype:(id)arg3 partData:(id)arg4; // IMP=0x000000000002da9c
- (id)_newPartAndDataForHTMLStringAndMIMECharset:(id)arg1 partData:(id)arg2; // IMP=0x00000000000634f7
- (id)_newPlainTextPartWithAttributedString:(id)arg1 partData:(id)arg2; // IMP=0x0000000000033487
- (id)_newMimePartWithAttributedString:(id)arg1 partData:(id)arg2 outputRich:(BOOL)arg3; // IMP=0x000000000006333b
- (id)_encryptionCertificatesForRecipients:(id)arg1; // IMP=0x0000000000062cb9
- (id)_newOutgoingMessageFromTopLevelMimePart:(id)arg1 topLevelHeaders:(id)arg2 withPartData:(id)arg3; // IMP=0x000000000002e2ed
- (id)newMessageByRemovingAttachmentsFromMessage:(id)arg1; // IMP=0x0000000000061e5d
- (id)newMessageWithBodyData:(id)arg1 headers:(id)arg2; // IMP=0x0000000000061d63
- (id)newDataForAttributedString:(id)arg1; // IMP=0x0000000000061d16
- (id)newMessageWithAttributedString:(id)arg1 headers:(id)arg2; // IMP=0x000000000006165c
- (void)_recursivelyAddSubresourcesFromArchive:(id)arg1 toArray:(id)arg2; // IMP=0x0000000000061413
- (id)newMessageWithHtmlString:(id)arg1 plainTextAlternative:(id)arg2 otherHtmlStringsAndAttachments:(id)arg3 headers:(id)arg4; // IMP=0x000000000002cd3f
- (void)setSigningIdentity:(struct __SecIdentity *)arg1; // IMP=0x00000000000613de
- (void)setShouldMarkNonresizableAttachmentData:(BOOL)arg1; // IMP=0x000000000002cd36
- (void)dealloc; // IMP=0x000000000006139f
- (id)init; // IMP=0x00000000000264eb
@end
| 1,625 |
2,539 | // LangUtils.h
#ifndef __LANG_UTILS_H
#define __LANG_UTILS_H
#include "../../../Windows/ResourceString.h"
#ifdef LANG
extern UString g_LangID;
struct CIDLangPair
{
UInt32 ControlID;
UInt32 LangID;
};
void ReloadLang();
void LoadLangOneTime();
FString GetLangDirPrefix();
void LangSetDlgItemText(HWND dialog, UInt32 controlID, UInt32 langID);
void LangSetDlgItems(HWND dialog, const UInt32 *ids, unsigned numItems);
void LangSetDlgItems_Colon(HWND dialog, const UInt32 *ids, unsigned numItems);
void LangSetWindowText(HWND window, UInt32 langID);
UString LangString(UInt32 langID);
void AddLangString(UString &s, UInt32 langID);
void LangString(UInt32 langID, UString &dest);
void LangString_OnlyFromLangFile(UInt32 langID, UString &dest);
#else
inline UString LangString(UInt32 langID) { return NWindows::MyLoadString(langID); }
inline void LangString(UInt32 langID, UString &dest) { NWindows::MyLoadString(langID, dest); }
inline void AddLangString(UString &s, UInt32 langID) { s += NWindows::MyLoadString(langID); }
#endif
#endif
| 392 |
318 | <filename>addons/blender.NodeOSC/server/pythonosc/osc_bundle.py
import logging
from pythonosc import osc_message
from pythonosc.parsing import osc_types
from typing import Any, Iterator
_BUNDLE_PREFIX = b"#bundle\x00"
class ParseError(Exception):
"""Base exception raised when a datagram parsing error occurs."""
class OscBundle(object):
"""Bundles elements that should be triggered at the same time.
An element can be another OscBundle or an OscMessage.
"""
def __init__(self, dgram: bytes) -> None:
"""Initializes the OscBundle with the given datagram.
Args:
dgram: a UDP datagram representing an OscBundle.
Raises:
ParseError: if the datagram could not be parsed into an OscBundle.
"""
# Interesting stuff starts after the initial b"#bundle\x00".
self._dgram = dgram
index = len(_BUNDLE_PREFIX)
try:
self._timestamp, index = osc_types.get_date(self._dgram, index)
except osc_types.ParseError as pe:
raise ParseError("Could not get the date from the datagram: %s" % pe)
# Get the contents as a list of OscBundle and OscMessage.
self._contents = self._parse_contents(index)
# Return type is actually List[OscBundle], but that would require import annotations from __future__, which is
# python 3.7+ only.
def _parse_contents(self, index: int) -> Any:
contents = []
try:
# An OSC Bundle Element consists of its size and its contents.
# The size is an int32 representing the number of 8-bit bytes in the
# contents, and will always be a multiple of 4. The contents are either
# an OSC Message or an OSC Bundle.
while self._dgram[index:]:
# Get the sub content size.
content_size, index = osc_types.get_int(self._dgram, index)
# Get the datagram for the sub content.
content_dgram = self._dgram[index:index + content_size]
# Increment our position index up to the next possible content.
index += content_size
# Parse the content into an OSC message or bundle.
if OscBundle.dgram_is_bundle(content_dgram):
contents.append(OscBundle(content_dgram))
elif osc_message.OscMessage.dgram_is_message(content_dgram):
contents.append(osc_message.OscMessage(content_dgram))
else:
logging.warning(
"Could not identify content type of dgram %s" % content_dgram)
except (osc_types.ParseError, osc_message.ParseError, IndexError) as e:
raise ParseError("Could not parse a content datagram: %s" % e)
return contents
@staticmethod
def dgram_is_bundle(dgram: bytes) -> bool:
"""Returns whether this datagram starts like an OSC bundle."""
return dgram.startswith(_BUNDLE_PREFIX)
@property
def timestamp(self) -> int:
"""Returns the timestamp associated with this bundle."""
return self._timestamp
@property
def num_contents(self) -> int:
"""Shortcut for len(*bundle) returning the number of elements."""
return len(self._contents)
@property
def size(self) -> int:
"""Returns the length of the datagram for this bundle."""
return len(self._dgram)
@property
def dgram(self) -> bytes:
"""Returns the datagram from which this bundle was built."""
return self._dgram
def content(self, index) -> Any:
"""Returns the bundle's content 0-indexed."""
return self._contents[index]
def __iter__(self) -> Iterator[Any]:
"""Returns an iterator over the bundle's content."""
return iter(self._contents)
| 1,586 |
22,688 | /******************************************************************************
* Copyright 2019 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#pragma once
#include "modules/canbus/proto/chassis_detail.pb.h"
#include "modules/drivers/canbus/can_comm/protocol_data.h"
namespace apollo {
namespace canbus {
namespace ge3 {
class Scuepb310 : public ::apollo::drivers::canbus::ProtocolData<
::apollo::canbus::ChassisDetail> {
public:
static const int32_t ID;
Scuepb310();
void Parse(const std::uint8_t* bytes, int32_t length,
ChassisDetail* chassis) const override;
private:
// config detail: {'description': 'EPS interrupt index', 'enum': {0:
// 'EPB_INTIDX_NOINT', 1: 'EPB_INTIDX_OVERFLOW', 2: 'EPB_INTIDX_TIMEOUT'},
// 'precision': 1.0, 'len': 3, 'name': 'EPB_IntIdx', 'is_signed_var': False,
// 'offset': 0.0, 'physical_range': '[0|7]', 'bit': 10, 'type': 'enum',
// 'order': 'motorola', 'physical_unit': ''}
Scu_epb_310::Epb_intidxType epb_intidx(const std::uint8_t* bytes,
const int32_t length) const;
// config detail: {'description': 'EPB drive mode', 'enum': {0:
// 'EPB_DRVMODE_INVALID', 1: 'EPB_DRVMODE_MANUAL', 2: 'EPB_DRVMODE_INTERRUPT',
// 3: 'EPB_DRVMODE_AUTO'}, 'precision': 1.0, 'len': 2, 'name': 'EPB_DrvMode',
// 'is_signed_var': False, 'offset': 0.0, 'physical_range': '[0|3]', 'bit': 6,
// 'type': 'enum', 'order': 'motorola', 'physical_unit': ''}
Scu_epb_310::Epb_drvmodeType epb_drvmode(const std::uint8_t* bytes,
const int32_t length) const;
// config detail: {'description': 'EPB system status', 'enum': {0:
// 'EPB_SYSST_RELEASED', 1: 'EPB_SYSST_APPLIED', 2: 'EPB_SYSST_RELEASING', 3:
// 'EPB_SYSST_FAULT', 4: 'EPB_SYSST_APPLYING', 5: 'EPB_SYSST_DISENGAGED'},
// 'precision': 1.0, 'len': 3, 'name': 'EPB_SysSt', 'is_signed_var': False,
// 'offset': 0.0, 'physical_range': '[0|7]', 'bit': 2, 'type': 'enum',
// 'order': 'motorola', 'physical_unit': ''}
Scu_epb_310::Epb_sysstType epb_sysst(const std::uint8_t* bytes,
const int32_t length) const;
// config detail: {'description': 'EPB fault status', 'enum': {0:
// 'EPB_FAULTST_NORMAL', 1: 'EPB_FAULTST_FAULT'}, 'precision': 1.0, 'len': 1,
// 'name': 'EPB_FaultSt', 'is_signed_var': False, 'offset': 0.0,
// 'physical_range': '[0|1]', 'bit': 7, 'type': 'enum', 'order': 'motorola',
// 'physical_unit': ''}
Scu_epb_310::Epb_faultstType epb_faultst(const std::uint8_t* bytes,
const int32_t length) const;
};
} // namespace ge3
} // namespace canbus
} // namespace apollo
| 1,359 |
415 | {
"Comment": "comment -- dsa/1024 by putty 58yj22GKexky3cj3",
"FingerprintSHA256": "wmySj3ZNYvaJ9TykmPjoXwsfW9Ylbkiw+2quczVDmLI",
"FingerprintMD5": "4e:b9:68:0e:09:fb:48:46:47:10:a3:8b:4c:76:1c:f7",
"Type": "dsa",
"Bits": 1024,
"Encrypted": true,
"command": "puttygen --random-device /dev/urandom -t dsa -b 1024 -o specs/RkhmCKiRdhQ21uGf -O private-sshcom --new-passphrase /dev/fd/63 -C comment -- dsa/1024 by putty 58yj22GKexky3cj3",
"Format": "sshcom",
"Source": "putty"
}
| 267 |
482 | /*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.prana.http.api;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.config.DynamicProperty;
import com.netflix.prana.http.Context;
import rx.Observable;
import javax.inject.Inject;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by dchoudhury on 10/20/14.
*/
public class DynamicPropertiesHandler extends AbstractRequestHandler {
private static final String ID_QUERY_PARAMETER = "id";
@Inject
public DynamicPropertiesHandler(ObjectMapper objectMapper) {
super(objectMapper);
}
@Override
Observable<Void> handle(Context context) {
Map<String, String> properties = new HashMap<>();
List<String> ids = context.getQueryParams(ID_QUERY_PARAMETER);
for (String id : ids) {
String property = DynamicProperty.getInstance(id).getString(null);
properties.put(id, property);
}
return context.send(properties);
}
}
| 527 |
313 | <gh_stars>100-1000
//------------------------------------------------------------------------------
// GB_mex_mxm_generic: C<Mask> = accum(C,A*B)
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, <NAME>, (c) 2017-2022, All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//------------------------------------------------------------------------------
#include "GB_mex.h"
#define USAGE "C = GB_mex_mxm_generic (C, Mask, accum, semiring, A, B, desc)"
#define FREE_ALL \
{ \
GrB_Matrix_free_(&A) ; \
GrB_Matrix_free_(&B) ; \
GrB_Matrix_free_(&C) ; \
GrB_Matrix_free_(&Mask) ; \
GrB_Monoid_free_(&myplus_monoid) ; \
GrB_BinaryOp_free_(&myplus) ; \
if (semiring != Complex_plus_times) \
{ \
GrB_Semiring_free_(&semiring) ; \
} \
GrB_Descriptor_free_(&desc) ; \
GB_mx_put_global (true) ; \
}
void My_Plus_int64 (void *z, const void *x, const void *y) ;
void My_Plus_int32 (void *z, const void *x, const void *y) ;
void My_Plus_fp64 (void *z, const void *x, const void *y) ;
void My_Plus_int64 (void *z, const void *x, const void *y)
{
int64_t a = (*((int64_t *) x)) ;
int64_t b = (*((int64_t *) y)) ;
int64_t c = a + b ;
(*((int64_t *) z)) = c ;
}
void My_Plus_int32 (void *z, const void *x, const void *y)
{
int32_t a = (*((int32_t *) x)) ;
int32_t b = (*((int32_t *) y)) ;
int32_t c = a + b ;
(*((int32_t *) z)) = c ;
}
void My_Plus_fp64 (void *z, const void *x, const void *y)
{
double a = (*((double *) x)) ;
double b = (*((double *) y)) ;
double c = a + b ;
(*((double *) z)) = c ;
}
void mexFunction
(
int nargout,
mxArray *pargout [ ],
int nargin,
const mxArray *pargin [ ]
)
{
bool malloc_debug = GB_mx_get_global (true) ;
GrB_Matrix A = NULL ;
GrB_Matrix B = NULL ;
GrB_Matrix C = NULL ;
GrB_Matrix Mask = NULL ;
GrB_Semiring semiring = NULL ;
GrB_Descriptor desc = NULL ;
GrB_BinaryOp myplus = NULL ;
GrB_Monoid myplus_monoid = NULL ;
// check inputs
if (nargout > 1 || nargin < 6 || nargin > 7)
{
mexErrMsgTxt ("Usage: " USAGE) ;
}
// get C (make a deep copy)
#define GET_DEEP_COPY \
C = GB_mx_mxArray_to_Matrix (pargin [0], "C input", true, true) ;
#define FREE_DEEP_COPY GrB_Matrix_free_(&C) ;
GET_DEEP_COPY ;
if (C == NULL)
{
FREE_ALL ;
mexErrMsgTxt ("C failed") ;
}
// get Mask (shallow copy)
Mask = GB_mx_mxArray_to_Matrix (pargin [1], "Mask", false, false) ;
if (Mask == NULL && !mxIsEmpty (pargin [1]))
{
FREE_ALL ;
mexErrMsgTxt ("Mask failed") ;
}
// get A (shallow copy)
A = GB_mx_mxArray_to_Matrix (pargin [4], "A input", false, true) ;
if (A == NULL)
{
FREE_ALL ;
mexErrMsgTxt ("A failed") ;
}
// get B (shallow copy)
B = GB_mx_mxArray_to_Matrix (pargin [5], "B input", false, true) ;
if (B == NULL)
{
FREE_ALL ;
mexErrMsgTxt ("B failed") ;
}
bool user_complex = (Complex != GxB_FC64) && (C->type == Complex) ;
// get semiring
if (!GB_mx_mxArray_to_Semiring (&semiring, pargin [3], "semiring",
C->type, user_complex))
{
FREE_ALL ;
mexErrMsgTxt ("semiring failed") ;
}
if (semiring != NULL && semiring->add == GrB_PLUS_MONOID_INT64)
{
// replace the semiring with a user-defined monoid
GrB_BinaryOp mult = semiring->multiply ;
GrB_Monoid_free_(&(semiring->add)) ;
GrB_Semiring_free_(&semiring) ;
GrB_BinaryOp_new (&myplus, My_Plus_int64,
GrB_INT64, GrB_INT64, GrB_INT64) ;
// add a spurious terminal value
GxB_Monoid_terminal_new_INT64 (&myplus_monoid, myplus,
(int64_t) 0, (int64_t) -111) ;
GrB_Semiring_new (&semiring, myplus_monoid, mult) ;
}
else if (semiring != NULL && semiring->add == GrB_PLUS_MONOID_INT32)
{
// replace the semiring with a user-defined monoid
GrB_BinaryOp mult = semiring->multiply ;
GrB_Monoid_free_(&(semiring->add)) ;
GrB_Semiring_free_(&semiring) ;
GrB_BinaryOp_new (&myplus, My_Plus_int32,
GrB_INT32, GrB_INT32, GrB_INT32) ;
// add a spurious terminal value
GxB_Monoid_terminal_new_INT32 (&myplus_monoid, myplus,
(int32_t) 0, (int32_t) -111) ;
GrB_Semiring_new (&semiring, myplus_monoid, mult) ;
}
else if (semiring != NULL && semiring->add == GrB_PLUS_MONOID_FP64)
{
// replace the semiring with a user-defined monoid
GrB_BinaryOp mult = semiring->multiply ;
GrB_Monoid_free_(&(semiring->add)) ;
GrB_Semiring_free_(&semiring) ;
GrB_BinaryOp_new (&myplus, My_Plus_fp64,
GrB_FP64, GrB_FP64, GrB_FP64) ;
GrB_Monoid_new_FP64 (&myplus_monoid, myplus, (double) 0) ;
GrB_Semiring_new (&semiring, myplus_monoid, mult) ;
}
// get accum, if present
GrB_BinaryOp accum ;
if (!GB_mx_mxArray_to_BinaryOp (&accum, pargin [2], "accum",
C->type, user_complex))
{
FREE_ALL ;
mexErrMsgTxt ("accum failed") ;
}
// get desc
if (!GB_mx_mxArray_to_Descriptor (&desc, PARGIN (6), "desc"))
{
FREE_ALL ;
mexErrMsgTxt ("desc failed") ;
}
// C<Mask> = accum(C,A*B)
METHOD (GrB_mxm (C, Mask, accum, semiring, A, B, desc)) ;
// return C as a struct and free the GraphBLAS C
pargout [0] = GB_mx_Matrix_to_mxArray (&C, "C output from GrB_mxm", true) ;
FREE_ALL ;
}
| 3,018 |
463 | <reponame>elgalu/labml<gh_stars>100-1000
import numpy as np
import torch
from labml.configs import BaseConfigs, option
class SetSeed:
def __init__(self, seed: int):
self.seed = seed
def __call__(self):
torch.manual_seed(self.seed)
np.random.seed(self.seed)
class SeedConfigs(BaseConfigs):
r"""
This is a configurable module for setting the seeds.
It will set seeds with ``torch.manual_seed`` and ``np.random.seed``.
You need to call ``set`` method to set seeds
(`example <https://github.com/labmlai/labml/blob/master/samples/pytorch/mnist/e_labml_helpers.py>`_).
Arguments:
seed (int): Seed integer. Defaults to ``5``.
"""
seed: int = 5
set = '_set_seed'
@option(SeedConfigs.set)
def _set_seed(c: SeedConfigs):
return SetSeed(c.seed)
| 342 |
1,056 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.mercurial.ui.properties;
import org.netbeans.modules.versioning.spi.VCSContext;
import org.netbeans.modules.mercurial.util.HgUtils;
import org.netbeans.modules.mercurial.ui.actions.ContextAction;
import javax.swing.*;
import java.io.File;
import java.awt.BorderLayout;
import java.awt.Dialog;
import org.netbeans.modules.mercurial.Mercurial;
import org.openide.DialogDescriptor;
import org.openide.DialogDisplayer;
import org.openide.nodes.Node;
import org.openide.util.HelpCtx;
import org.openide.util.NbBundle;
/**
* Properties for mercurial:
* Set hg repository properties
*
* @author <NAME>
*/
@NbBundle.Messages({
"CTL_MenuItem_Properties=Pr&operties..."
})
public class PropertiesAction extends ContextAction {
private static final String ICON_RESOURCE = "org/netbeans/modules/mercurial/resources/icons/properties.png"; //NOI18N
public PropertiesAction () {
super(ICON_RESOURCE);
}
@Override
protected String iconResource () {
return ICON_RESOURCE;
}
@Override
protected boolean enable(Node[] nodes) {
return HgUtils.isFromHgRepository(HgUtils.getCurrentContext(nodes));
}
@Override
protected String getBaseName(Node[] nodes) {
return "CTL_MenuItem_Properties"; //NOI18N
}
@Override
protected void performContextAction(Node[] nodes) {
VCSContext context = HgUtils.getCurrentContext(nodes);
final File roots[] = HgUtils.getActionRoots(context);
if (roots == null || roots.length == 0) return;
final File root = Mercurial.getInstance().getRepositoryRoot(roots[0]);
final PropertiesPanel panel = new PropertiesPanel();
final PropertiesTable propTable;
propTable = new PropertiesTable(panel.labelForTable, PropertiesTable.PROPERTIES_COLUMNS);
panel.setPropertiesTable(propTable);
JComponent component = propTable.getComponent();
panel.propsPanel.setLayout(new BorderLayout());
panel.propsPanel.add(component, BorderLayout.CENTER);
HgProperties hgProperties = new HgProperties(panel, propTable, root);
DialogDescriptor dd = new DialogDescriptor(panel, org.openide.util.NbBundle.getMessage(PropertiesAction.class, "CTL_PropertiesDialog_Title", null), true, null); // NOI18N
JButton okButton = new JButton();
org.openide.awt.Mnemonics.setLocalizedText(okButton, org.openide.util.NbBundle.getMessage(PropertiesAction.class, "CTL_Properties_Action_OK"));
okButton.getAccessibleContext().setAccessibleName(org.openide.util.NbBundle.getMessage(PropertiesAction.class, "ACSN_Properties_Action_OK")); // NOI18N
okButton.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(PropertiesAction.class, "ACSD_Properties_Action_OK"));
JButton cancelButton = new JButton();
org.openide.awt.Mnemonics.setLocalizedText(cancelButton, org.openide.util.NbBundle.getMessage(PropertiesAction.class, "CTL_Properties_Action_Cancel"));
cancelButton.getAccessibleContext().setAccessibleName(org.openide.util.NbBundle.getMessage(PropertiesAction.class, "ACSN_Properties_Action_Cancel")); // NOI18N
cancelButton.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(PropertiesAction.class, "ACSD_Properties_Action_Cancel"));
dd.setOptions(new Object[] {okButton, cancelButton});
dd.setHelpCtx(new HelpCtx(PropertiesAction.class));
panel.putClientProperty("contentTitle", null); // NOI18N
panel.putClientProperty("DialogDescriptor", dd); // NOI18N
Dialog dialog = DialogDisplayer.getDefault().createDialog(dd);
dialog.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(PropertiesAction.class, "ACSD_Properties_Dialog")); // NOI18N
dialog.pack();
dialog.setVisible(true);
if (dd.getValue() == okButton) {
hgProperties.updateLastSelection();
hgProperties.setProperties();
}
}
}
| 1,771 |
1,473 | <reponame>JiangJibo/pinpoint<gh_stars>1000+
/*
* Copyright 2018 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.context;
import com.navercorp.pinpoint.profiler.context.compress.SpanProcessor;
import com.navercorp.pinpoint.profiler.context.compress.SpanProcessorV1;
import com.navercorp.pinpoint.profiler.context.id.DefaultTraceRoot;
import com.navercorp.pinpoint.profiler.context.id.TraceRoot;
import com.navercorp.pinpoint.thrift.dto.TSpan;
import com.navercorp.pinpoint.thrift.dto.TSpanChunk;
import com.navercorp.pinpoint.thrift.dto.TSpanEvent;
import org.junit.Assert;
import org.junit.Test;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.LogManager;
import com.navercorp.pinpoint.profiler.context.id.DefaultTraceId;
import java.util.Arrays;
/**
* @author emeroad
*/
public class SpanEventTest {
private final Logger logger = LogManager.getLogger(this.getClass());
private final SpanProcessor<TSpan, TSpanChunk> compressorV1 = new SpanProcessorV1();
@Test
public void testMarkStartTime() {
final DefaultTraceId traceId = new DefaultTraceId("agentId", 0, 0);
TraceRoot traceRoot = new DefaultTraceRoot(traceId, "agentId", System.currentTimeMillis(),0);
Span span = new Span(traceRoot);
span.markBeforeTime();
span.setElapsedTime((int) (span.getStartTime() + 10));
logger.debug("span:{}", span);
final SpanEvent spanEvent = new SpanEvent();
long currentTime = System.currentTimeMillis();
spanEvent.setStartTime(currentTime);
spanEvent.setElapsedTime(10);
logger.debug("spanEvent:{}", spanEvent);
span.setSpanEventList(Arrays.asList(spanEvent));
TSpan tSpan = new TSpan();
TSpanEvent tSpanEvent = new TSpanEvent();
tSpan.addToSpanEventList(tSpanEvent);
compressorV1.preProcess(span, tSpan);
compressorV1.postProcess(span, tSpan);
Assert.assertEquals("startTime", span.getStartTime() + tSpanEvent.getStartElapsed(), spanEvent.getStartTime());
Assert.assertEquals("endTime", span.getStartTime() + tSpanEvent.getStartElapsed() + spanEvent.getElapsedTime(), spanEvent.getAfterTime());
}
@Test
public void testGetStartTime() throws Exception {
}
@Test
public void testMarkEndTime() throws Exception {
}
@Test
public void testGetEndTime() throws Exception {
}
}
| 1,075 |
649 | package net.thucydides.core.webdriver.jquery;
public class ByJQuery {
public static ByJQuerySelector selector(final String selectorExpression) {
return new ByJQuerySelector(selectorExpression);
}
}
| 74 |
860 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.coordinator.scheduler;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import org.apache.samza.util.TableUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Scheduler invoked by each processor for heartbeating to a row of the table.
* Heartbeats every 5 seconds.
* The row is determined by the job model version and processor id passed to the scheduler.
* All time units are in SECONDS.
*/
public class HeartbeatScheduler implements TaskScheduler {
private static final Logger LOG = LoggerFactory.getLogger(HeartbeatScheduler.class);
private static final long HEARTBEAT_DELAY_SEC = 5;
private static final ThreadFactory PROCESSOR_THREAD_FACTORY =
new ThreadFactoryBuilder().setNameFormat("HeartbeatScheduler-%d").build();
private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(PROCESSOR_THREAD_FACTORY);
private final String processorId;
private final TableUtils table;
private final AtomicReference<String> currentJMVersion;
private final Consumer<String> errorHandler;
public HeartbeatScheduler(Consumer<String> errorHandler, TableUtils table, AtomicReference<String> currentJMVersion, final String pid) {
this.table = table;
this.currentJMVersion = currentJMVersion;
processorId = pid;
this.errorHandler = errorHandler;
}
@Override
public ScheduledFuture scheduleTask() {
return scheduler.scheduleWithFixedDelay(() -> {
try {
String currJVM = currentJMVersion.get();
LOG.info("Updating heartbeat for processor ID: " + processorId + " and job model version: " + currJVM);
table.updateHeartbeat(currJVM, processorId);
} catch (Exception e) {
errorHandler.accept("Exception in Heartbeat Scheduler. Stopping the processor...");
}
}, HEARTBEAT_DELAY_SEC, HEARTBEAT_DELAY_SEC, TimeUnit.SECONDS);
}
@Override
public void setStateChangeListener(SchedulerStateChangeListener listener) {}
@Override
public void shutdown() {
LOG.info("Shutting down HeartbeatScheduler");
scheduler.shutdownNow();
}
} | 957 |
2,151 | <reponame>zealoussnow/chromium
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Base and helper classes for Google RESTful APIs."""
__all__ = ['add_sync_methods']
import httplib
import random
import time
from . import api_utils
try:
from google.appengine.api import app_identity
from google.appengine.ext import ndb
except ImportError:
from google.appengine.api import app_identity
from google.appengine.ext import ndb
def _make_sync_method(name):
"""Helper to synthesize a synchronous method from an async method name.
Used by the @add_sync_methods class decorator below.
Args:
name: The name of the synchronous method.
Returns:
A method (with first argument 'self') that retrieves and calls
self.<name>, passing its own arguments, expects it to return a
Future, and then waits for and returns that Future's result.
"""
def sync_wrapper(self, *args, **kwds):
method = getattr(self, name)
future = method(*args, **kwds)
return future.get_result()
return sync_wrapper
def add_sync_methods(cls):
"""Class decorator to add synchronous methods corresponding to async methods.
This modifies the class in place, adding additional methods to it.
If a synchronous method of a given name already exists it is not
replaced.
Args:
cls: A class.
Returns:
The same class, modified in place.
"""
for name in cls.__dict__.keys():
if name.endswith('_async'):
sync_name = name[:-6]
if not hasattr(cls, sync_name):
setattr(cls, sync_name, _make_sync_method(name))
return cls
class _AE_TokenStorage_(ndb.Model):
"""Entity to store app_identity tokens in memcache."""
token = ndb.StringProperty()
expires = ndb.FloatProperty()
@ndb.tasklet
def _make_token_async(scopes, service_account_id):
"""Get a fresh authentication token.
Args:
scopes: A list of scopes.
service_account_id: Internal-use only.
Returns:
An tuple (token, expiration_time) where expiration_time is
seconds since the epoch.
"""
rpc = app_identity.create_rpc()
app_identity.make_get_access_token_call(rpc, scopes, service_account_id)
token, expires_at = yield rpc
raise ndb.Return((token, expires_at))
class _RestApi(object):
"""Base class for REST-based API wrapper classes.
This class manages authentication tokens and request retries. All
APIs are available as synchronous and async methods; synchronous
methods are synthesized from async ones by the add_sync_methods()
function in this module.
WARNING: Do NOT directly use this api. It's an implementation detail
and is subject to change at any release.
"""
_TOKEN_EXPIRATION_HEADROOM = random.randint(60, 600)
def __init__(self, scopes, service_account_id=None, token_maker=None,
retry_params=None):
"""Constructor.
Args:
scopes: A scope or a list of scopes.
token_maker: An asynchronous function of the form
(scopes, service_account_id) -> (token, expires).
retry_params: An instance of api_utils.RetryParams. If None, the
default for current thread will be used.
service_account_id: Internal use only.
"""
if isinstance(scopes, basestring):
scopes = [scopes]
self.scopes = scopes
self.service_account_id = service_account_id
self.make_token_async = token_maker or _make_token_async
self.token = None
if not retry_params:
retry_params = api_utils._get_default_retry_params()
self.retry_params = retry_params
def __getstate__(self):
"""Store state as part of serialization/pickling."""
return {'token': self.token,
'scopes': self.scopes,
'id': self.service_account_id,
'a_maker': None if self.make_token_async == _make_token_async
else self.make_token_async,
'retry_params': self.retry_params}
def __setstate__(self, state):
"""Restore state as part of deserialization/unpickling."""
self.__init__(state['scopes'],
service_account_id=state['id'],
token_maker=state['a_maker'],
retry_params=state['retry_params'])
self.token = state['token']
@ndb.tasklet
def do_request_async(self, url, method='GET', headers=None, payload=None,
deadline=None, callback=None):
"""Issue one HTTP request.
This is an async wrapper around urlfetch(). It adds an authentication
header and retries on a 401 status code. Upon other retriable errors,
it performs blocking retries.
"""
headers = {} if headers is None else dict(headers)
if self.token is None:
self.token = yield self.get_token_async()
headers['authorization'] = 'OAuth ' + self.token
deadline = deadline or self.retry_params.urlfetch_timeout
retry = False
resp = None
try:
resp = yield self.urlfetch_async(url, payload=payload, method=method,
headers=headers, follow_redirects=False,
deadline=deadline, callback=callback)
if resp.status_code == httplib.UNAUTHORIZED:
self.token = yield self.get_token_async(refresh=True)
headers['authorization'] = 'OAuth ' + self.token
resp = yield self.urlfetch_async(
url, payload=payload, method=method, headers=headers,
follow_redirects=False, deadline=deadline, callback=callback)
except api_utils._RETRIABLE_EXCEPTIONS:
retry = True
else:
retry = api_utils._should_retry(resp)
if retry:
retry_resp = api_utils._retry_fetch(
url, retry_params=self.retry_params, payload=payload, method=method,
headers=headers, follow_redirects=False, deadline=deadline)
if retry_resp:
resp = retry_resp
elif not resp:
raise
raise ndb.Return((resp.status_code, resp.headers, resp.content))
@ndb.tasklet
def get_token_async(self, refresh=False):
"""Get an authentication token.
The token is cached in memcache, keyed by the scopes argument.
Args:
refresh: If True, ignore a cached token; default False.
Returns:
An authentication token.
"""
if self.token is not None and not refresh:
raise ndb.Return(self.token)
key = '%s,%s' % (self.service_account_id, ','.join(self.scopes))
ts = yield _AE_TokenStorage_.get_by_id_async(
key, use_cache=True, use_memcache=True,
use_datastore=self.retry_params.save_access_token)
if ts is None or ts.expires < (time.time() +
self._TOKEN_EXPIRATION_HEADROOM):
token, expires_at = yield self.make_token_async(
self.scopes, self.service_account_id)
timeout = int(expires_at - time.time())
ts = _AE_TokenStorage_(id=key, token=token, expires=expires_at)
if timeout > 0:
yield ts.put_async(memcache_timeout=timeout,
use_datastore=self.retry_params.save_access_token,
use_cache=True, use_memcache=True)
self.token = ts.token
raise ndb.Return(self.token)
def urlfetch_async(self, url, **kwds):
"""Make an async urlfetch() call.
This just passes the url and keyword arguments to NDB's async
urlfetch() wrapper in the current context.
This returns a Future despite not being decorated with @ndb.tasklet!
"""
ctx = ndb.get_context()
return ctx.urlfetch(url, **kwds)
_RestApi = add_sync_methods(_RestApi)
| 3,050 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.jumpto.symbol;
import org.netbeans.spi.jumpto.symbol.SymbolDescriptor;
import java.awt.Dialog;
import java.awt.event.ActionEvent;
import java.io.IOException;
import java.util.logging.Logger;
import javax.swing.AbstractAction;
import javax.swing.JButton;
import javax.swing.JEditorPane;
import org.netbeans.api.project.ui.OpenProjects;
import org.netbeans.editor.JumpList;
import org.openide.ErrorManager;
import org.openide.cookies.EditorCookie;
import org.openide.nodes.Node;
import org.openide.text.NbDocument;
import org.openide.util.NbBundle;
import org.openide.windows.TopComponent;
/**
* @author <NAME>
* @author <NAME>
*/
public class GoToSymbolAction extends AbstractAction {
static final Logger LOGGER = Logger.getLogger(GoToSymbolAction.class.getName()); // Used from the panel as well
private String title;
/** Creates a new instance of OpenTypeAction */
public GoToSymbolAction() {
this(NbBundle.getMessage( GoToSymbolAction.class, "DLG_GoToSymbol"));
}
public GoToSymbolAction(String title) {
super( NbBundle.getMessage( GoToSymbolAction.class,"TXT_GoToSymbol") );
this.title = title;
}
@Override
public void actionPerformed( ActionEvent e ) {
SymbolDescriptor typeDescriptor = getSelectedSymbol();
if (typeDescriptor != null) {
JumpList.checkAddEntry();
typeDescriptor.open();
}
}
public SymbolDescriptor getSelectedSymbol() {
SymbolDescriptor result = null;
try {
final JButton okButton = new JButton (NbBundle.getMessage(GoToSymbolAction.class, "CTL_OK"));
final ContentProviderImpl cp = new ContentProviderImpl(okButton);
final GoToPanelImpl panel = new GoToPanelImpl(cp);
final Dialog dialog = DialogFactory.createDialog(title, panel, cp, okButton);
cp.setDialog(dialog);
Node[] arr = TopComponent.getRegistry ().getActivatedNodes();
String initSearchText;
if (arr.length > 0) {
EditorCookie ec = arr[0].getCookie (EditorCookie.class);
if (ec != null) {
JEditorPane recentPane = NbDocument.findRecentEditorPane(ec);
if (recentPane != null) {
initSearchText = org.netbeans.editor.Utilities.getSelectionOrIdentifier(recentPane);
if (initSearchText != null && org.openide.util.Utilities.isJavaIdentifier(initSearchText)) {
panel.setInitialText(initSearchText);
}
}
}
}
dialog.setVisible(true);
result = panel.getSelectedSymbol();
} catch (IOException ex) {
ErrorManager.getDefault().notify(ex);
}
return result;
}
@Override
public boolean isEnabled () {
return OpenProjects.getDefault().getOpenProjects().length>0;
}
}
| 1,588 |
754 | <reponame>wangpengcheng/AutoKernel
#ifndef DATA_TRANSFORM_H
#define DATA_TRANSFORM_H
/** \file
*
* Defines analyses to extract the functions called a function.
*/
#include <map>
#include <string>
#include <set>
#include "iostream"
#include <unordered_set>
#include "Halide.h"
#include "BoundEstimate.h"
#include "DataOP.h"
namespace Halide {
namespace Internal {
using std::map;
using std::pair;
using std::string;
static bool data_transform_debug = false;
namespace {
/* Find all the internal halide calls in an expr */
template <typename T>
class DataTransformer : public IRVisitor {
private:
std::string target_;
Func replace_func_;
T op_;
public:
//map<string, Function> calls;
using IRVisitor::visit;
DataTransformer() = default;
DataTransformer(Func target_func,Func func,T op){
replace_func_ = func;
target_ = target_func.name();
op_ = op;
}
void visit(const Add *add) override {
IRVisitor::visit(add);
Add *node = const_cast<Add *>(reinterpret_cast<const Add *>(add)) ;
if (const Halide::Internal::Call *v= node->a.as<Halide::Internal::Call>()) {
if (v->name==target_)
{
auto expr = v->args;
op_(expr);
node->a = replace_func_(expr);
}
}
if (const Halide::Internal::Call *v= node->b.as<Halide::Internal::Call>()) {
if (v->name==target_)
{
auto expr = v->args;
op_(expr);
node->b = replace_func_(expr);
}
}
}
void visit(const Mul *mul) override {
IRVisitor::visit(mul);
Mul *node = const_cast<Mul *>(reinterpret_cast<const Mul *>(mul)) ;
if (const Halide::Internal::Call *v= node->a.as<Halide::Internal::Call>()) {
if (v->name==target_)
{
auto expr = v->args;
op_(expr);
node->a = replace_func_(expr);
}
}
if (const Halide::Internal::Call *v= node->b.as<Halide::Internal::Call>()) {
if (v->name==target_)
{
auto expr = v->args;
op_(expr);
node->b = replace_func_(expr);
}
}
}
void visit(const Sub *sub) override {
IRVisitor::visit(sub);
Sub *node = const_cast<Sub *>(reinterpret_cast<const Sub *>(sub)) ;
if (const Halide::Internal::Call *v= node->a.as<Halide::Internal::Call>()) {
if (v->name==target_)
{
auto expr = v->args;
op_(expr);
node->a = replace_func_(expr);
}
}
if (const Halide::Internal::Call *v= node->b.as<Halide::Internal::Call>()) {
if (v->name==target_)
{
auto expr = v->args;
op_(expr);
node->b = replace_func_(expr);
}
}
}
void visit(const Div *div) override {
IRVisitor::visit(div);
Div *node = const_cast<Div *>(reinterpret_cast<const Div *>(div)) ;
if (const Halide::Internal::Call *v= node->a.as<Halide::Internal::Call>()) {
if (v->name==target_)
{
auto expr = v->args;
op_(expr);
node->a = replace_func_(expr);
}
}
if (const Halide::Internal::Call *v= node->b.as<Halide::Internal::Call>()) {
if (v->name==target_)
{
auto expr = v->args;
op_(expr);
node->b = replace_func_(expr);
}
}
}
};
} // namespace
template<typename T>
void data_transform_impl(Function f,Func target)
{
std::map<std::string, Function> env;
populate_environment(f, env);
Function target_function;
bool flag = false;
for (auto iter = env.begin();iter!=env.end();iter++){
if (iter->first==target.name()){
flag = true;
break;
}
}
if (!flag)
{
std::cout<<"erro file name:"<<target.name()<<std::endl;
return;
}
T op;
std::string new_name = target.name()+op.name();
Func newfunc(new_name);
op(newfunc,target);
for (auto iter = env.begin();iter!=env.end();iter++){
if (iter->first!=target.name()){
DataTransformer<T> tf(target,newfunc,op);
iter->second.accept(&tf);
}
}
}
void data_transform(std::vector<Function> &outputs,Func target,DataTransformMethod method){
for (Function &o : outputs) {
switch( method)
{
case DataTransformMethod::REORDER:
data_transform_impl<ReorderOP>(o, target);
break;
case DataTransformMethod::INTERLEAVE:
data_transform_impl<InterleaveOP>(o,target);
break;
case DataTransformMethod::SPLITY:
data_transform_impl<SplitYOP>(o,target);
break;
default:
//TODO error?
break;
}
}
}
double compute_layout_cost_impl(std::vector<Function> &outputs);
std::vector<Function> deep_copy(std::vector<Function> &func)
{
std::map<std::string, Function> env;
for (Function f : func) {
populate_environment(f, env);
}
auto copy_pair = deep_copy(func,env);
return copy_pair.first;
}
std::vector<Function> auto_data_transform(const Pipeline &p)
{
std::vector<Function> outputs;
for (Func f : p.outputs()) {
outputs.push_back(f.function());
}
string use_data_transform = get_env_variable("HL_USE_DATA_TRANSFORM");
if (use_data_transform!="True")
return outputs;
if (data_transform_debug)
std::cout<<"\ndata transform info:\n"<<std::endl;
string data_debug = get_env_variable("HL_DEBUG_DATA_TRANSFORM");
if (data_debug=="True")
data_transform_debug =true;
else
data_transform_debug =false;
double min_cost=compute_layout_cost_impl(outputs);;
bool flag=true;
auto best_outputs = outputs;
std::vector<string> schedule;
while (flag)
{
flag=false;
//std::vector<Function> temp_outputs = deep_copy(best_outputs);
std::vector<Function> copy_outputs = deep_copy(best_outputs);
std::vector<Function> inputs = find_input_function(copy_outputs);
int best_input_idx=-1;
Halide::Internal::DataTransformMethod best_op;
for (unsigned int i=0;i<inputs.size();i++)
{
Func inp = Func(inputs[i]);
data_transform(copy_outputs,inp,Halide::Internal::DataTransformMethod::INTERLEAVE);
double interleave_cost = compute_layout_cost_impl(copy_outputs);
if (interleave_cost<min_cost)
{
//std::cout<<"smaller"<<std::endl;
//data_transform(temp_outputs,inp,Halide::Internal::DataTransformMethod::INTERLEAVE);
best_op = Halide::Internal::DataTransformMethod::INTERLEAVE;
best_input_idx = i;
min_cost=interleave_cost;
flag=true;
}
copy_outputs = deep_copy(best_outputs);
}
//inputs = find_input_function(copy_outputs);
for (unsigned int i=0;i<inputs.size();i++)
{
Func inp = Func(inputs[i]);
data_transform(copy_outputs,inp,Halide::Internal::DataTransformMethod::REORDER);
double reorder_cost = compute_layout_cost_impl(copy_outputs);
if (reorder_cost<min_cost)
{
best_op = Halide::Internal::DataTransformMethod::REORDER;
best_input_idx = i;
min_cost=reorder_cost;
flag=true;
}
copy_outputs = deep_copy(best_outputs);
}
//inputs = find_input_function(copy_outputs);
for (unsigned int i=0;i<inputs.size();i++)
{
Func inp = Func(inputs[i]);
data_transform(copy_outputs,inp,Halide::Internal::DataTransformMethod::SPLITY);
double split_cost = compute_layout_cost_impl(copy_outputs);
if (split_cost<min_cost)
{
best_op = Halide::Internal::DataTransformMethod::SPLITY;
best_input_idx = i;
flag=true;
}
copy_outputs = deep_copy(best_outputs);
}
std::vector<Function> best_inputs = find_input_function(best_outputs);
if (flag)
{
Func inp = Func(best_inputs[best_input_idx]);
data_transform(best_outputs,inp,best_op);
schedule.push_back(scheduler_name(best_op)+"->"+inp.name());
}
}
if (data_transform_debug)
{
if (schedule.size()==0)
{
std::cout<<"the best schedule is Original one,we do not change the data layout"<<std::endl;
}else{
std::cout<<"the best schedule is:"<<std::endl;
for(auto str:schedule)
{
std::cout<<" "<<str;
}
std::cout<<std::endl;
std::cout<<"the min cost is :"<<min_cost<<std::endl;
}
}
return best_outputs;
}
const int ORDER_COST_LIST[3] = {512*32,512*32*2,512*32*32};
double sigmoid(double x)
{
return 1.0/(1+std::exp(-x));
}
double compute_order_cost(const Definition &def,std::string function_name,const std::string &target, std::map<std::string,std::pair<int,int> >& bounds)
{
//func(x,y) = input_a(k,y)*input_b(x,k)
const StageSchedule &s = def.schedule();
std::vector<std::string> loop_order;
for (auto d:s.dims())
{
if (d.var.find("outermost")==string::npos)
{
loop_order.emplace_back(d.var);
}
}
std::vector<Expr> rvalue = def.values();
std::unordered_map<std::string,int> args_to_idx;
for (unsigned int i=0;i<loop_order.size();i++)
{
args_to_idx[loop_order[i]] = i;
}
FindOrder findorder(target,args_to_idx);
// def.accept(&findlooporder);
for (Expr expr:rvalue)
{
expr.accept(&findorder);
}
double res=0.0;
if (findorder.IsMisorder())
{
std::string first_var = target+".0";
std::string second_var = target+".1";
int range_first = bounds[first_var].second-bounds[first_var].first+1;
int range_second = bounds[second_var].second-bounds[second_var].first+1;
int range_fastest = range_first*range_second;
if (range_fastest>ORDER_COST_LIST[0]&&range_fastest<=ORDER_COST_LIST[1])
{
res=0.0000075;
int offset=0;
for (unsigned int i=0;i<loop_order.size();i++)
{
double range = 1.0;
if (bounds.find(loop_order[i])!=bounds.end())
{
//RDom
offset++;
range = 1.0*(bounds[loop_order[i]].second-bounds[loop_order[i]].first+1);
}else
{
std::string var = function_name +"."+std::to_string(i-offset);
range = 1.0*(bounds[var].second-bounds[var].first+1);
}
res*= range;
}
}else if (range_fastest>ORDER_COST_LIST[1]&&range_fastest<=ORDER_COST_LIST[2])
{
res=0.000095;
int offset=0;
for (unsigned int i=0;i<loop_order.size();i++)
{
double range = 1.0;
if (bounds.find(loop_order[i])!=bounds.end())
{
//RDom
offset++;
range = 1.0*(bounds[loop_order[i]].second-bounds[loop_order[i]].first+1);
}else
{
std::string var = function_name +"."+std::to_string(i-offset);
range = 1.0*(bounds[var].second-bounds[var].first+1);
}
res*= range;
}
}else if (range_fastest>ORDER_COST_LIST[2])
{
res=0.000395;
int offset=0;
for (unsigned int i=0;i<loop_order.size();i++)
{
double range = 1.0;
if (bounds.find(loop_order[i])!=bounds.end())
{
//RDom
offset++;
range = 1.0*(bounds[loop_order[i]].second-bounds[loop_order[i]].first+1);
}else
{
std::string var = function_name +"."+std::to_string(i-offset);
range = 1.0*(bounds[var].second-bounds[var].first+1);
//std::cout<<"shape var:"<<var<<" range:"<<range<<std::endl;
}
res*= range;
}
}
}
return res;
}
double compute_use_distance(const Definition &def,const std::string &target,std::map<std::string,std::pair<int,int> >& bounds)
{
const StageSchedule &s = def.schedule();
std::vector<std::string> loop_order;
for (auto d:s.dims())
{
if (d.var.find("outermost")==string::npos)
{
loop_order.emplace_back(d.var);
}
}
std::vector<Expr> rvalue = def.values();
std::unordered_map<std::string,int> args_to_idx;
for (unsigned int i=0;i<loop_order.size();i++)
{
args_to_idx[loop_order[i]] = i;
}
FindOrder findorder(target,args_to_idx);
// def.accept(&findlooporder);
for (Expr expr:rvalue)
{
expr.accept(&findorder);
}
auto orders = findorder.get_orders();
std::string fastest_var = loop_order[0];
int idx=-1;
for (unsigned int i=0;i<orders.size();i++)
{
if (orders[i].find(fastest_var)!=orders[i].end())
{
idx=i;
break;
}
}
double res=0.1;
if(idx==-1){
//TODO
std::cout<<"warning:var "<<fastest_var<<" not in the function"<<std::endl;
res =0.0;
}
if (idx==0){
// the fastest var are also in the first order of target function
res= 0.0;
}
std::string func_name = findorder.get_target();
for (int i=0;i<idx;i++)
{
std::string bound_name = func_name+"."+std::to_string(i);
double range=1.0*(bounds[bound_name].second-bounds[bound_name].first+1);
res *= range;
}
return res;
}
double compute_layout_cost_impl(std::vector<Function> &outputs){
std::vector<Function> inputs = find_input_function(outputs);
double cost=0;
for (Function &func:inputs){
std::string target_name = func.name();
std::map<std::string,std::pair<int,int> > bounds;
estimate_bound(outputs,bounds);
std::vector<std::pair<Definition,std::string>> consumers = find_definition(outputs,target_name);
for (std::pair<Definition,std::string> &consumer:consumers)
{
double cost_order = compute_order_cost(consumer.first,consumer.second,target_name,bounds);
double cost_distance = compute_use_distance(consumer.first,target_name,bounds);
cost += cost_order+cost_distance;
}
}
return cost;
}
} // namespace Internal
} // namespace Halide
#endif
| 7,965 |
369 | // Copyright (c) 2017-2021, Mudit<NAME>. All rights reserved.
// For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
#include <catch2/catch.hpp>
#include <module-bsp/board/rt1051/puretx/bsp/battery_charger/battery_charger_utils.hpp>
TEST_CASE("BatteryChargerUtilsTest")
{
SECTION("2s compliment conversion test")
{
constexpr std::array uint8Fixtures = {std::make_pair<std::uint8_t, int>(0x00, 0),
std::make_pair<std::uint8_t, int>(0x80, -128),
std::make_pair<std::uint8_t, int>(0xFF, -1),
std::make_pair<std::uint8_t, int>(0x7F, 127)};
constexpr std::array uint16Fixtures = {std::make_pair<std::uint16_t, int>(0x0000, 0),
std::make_pair<std::uint16_t, int>(0x8000, -32768),
std::make_pair<std::uint16_t, int>(0xFFFF, -1),
std::make_pair<std::uint16_t, int>(0x7FFF, 32767)};
constexpr std::array uint32Fixtures = {std::make_pair<std::uint32_t, int>(0x00000000, 0),
std::make_pair<std::uint32_t, int>(0x80000000, -2147483648),
std::make_pair<std::uint32_t, int>(0xFFFFFFFF, -1),
std::make_pair<std::uint32_t, int>(0x7FFFFFFF, 2147483647)};
for (const auto &fixture : uint8Fixtures) {
CHECK(bsp::battery_charger::utils::twosComplimentToInt(fixture.first) == fixture.second);
}
for (const auto &fixture : uint16Fixtures) {
CHECK(bsp::battery_charger::utils::twosComplimentToInt(fixture.first) == fixture.second);
}
for (const auto &fixture : uint32Fixtures) {
CHECK(bsp::battery_charger::utils::twosComplimentToInt(fixture.first) == fixture.second);
}
}
}
| 1,110 |
4,798 | // Copyright 2018-present the Material Components for iOS authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#import <Foundation/Foundation.h>
/**
The MDCBottomDrawerState enum provides the different possible states the bottom drawer can be in.
There are 2 different states for the bottom drawer:
- MDCBottomDrawerStateCollapsed: This state is reached when the bottom drawer is collapsed
(can be expanded to present more content), but is not taking up the entire screen.
- MDCBottomDrawerStateExpanded: This state is reached when the bottom drawer is fully expanded
(displaying the entire content), but is not taking up the entire screen.
- MDCBottomDrawerStateFullScreen: This state is reached when the bottom drawer
is in full screen.
*/
typedef NS_ENUM(NSUInteger, MDCBottomDrawerState) {
MDCBottomDrawerStateCollapsed = 0,
MDCBottomDrawerStateExpanded = 1,
MDCBottomDrawerStateFullScreen = 2,
};
| 371 |
1,565 | <gh_stars>1000+
#pragma once
#include "RunNode.h"
#include <vector>
#include <string>
class Talk : public RunNode
{
public:
Talk() {}
Talk(std::string c, int h = -1) : Talk() { setContent(c); setHeadID(h); }
virtual ~Talk() {}
virtual void draw() override;
//virtual void dealEvent(BP_Event& e) override;
virtual void onPressedOK() override;
private:
std::string content_;
int head_id_ = -1;
int head_style_ = 0;
int current_line_ = 0;
const int width_ = 40;
const int height_ = 5;
std::vector<std::string> content_lines_;
public:
void setContent(std::string c) { content_ = c; }
void setHeadID(int h) { head_id_ = h; }
void setHeadStyle(int s) { head_style_ = s; }
virtual void onEntrance() override;
DEFAULT_CANCEL_EXIT;
};
| 320 |
4,795 | #!/usr/bin/env python
'''CREATED:2013-12-08 14:28:34 by <NAME> <<EMAIL>>
Demonstration of phase vocoder time stretching.
'''
from __future__ import print_function
import argparse
import sys
import librosa
import soundfile as sf
def stretch_demo(input_file, output_file, speed):
'''Phase-vocoder time stretch demo function.
:parameters:
- input_file : str
path to input audio
- output_file : str
path to save output (wav)
- speed : float > 0
speed up by this factor
'''
# 1. Load the wav file, resample
print('Loading ', input_file)
y, sr = librosa.load(input_file)
# 2. Time-stretch through effects module
print('Playing back at {:3.0f}% speed'.format(speed * 100))
y_stretch = librosa.effects.time_stretch(y, speed)
print('Saving stretched audio to: ', output_file)
sf.write(output_file, y_stretch, sr)
def process_arguments(args):
'''Argparse function to get the program parameters'''
parser = argparse.ArgumentParser(description='Time stretching example')
parser.add_argument('input_file',
action='store',
help='path to the input file (wav, mp3, etc)')
parser.add_argument('output_file',
action='store',
help='path to save the stretched output')
parser.add_argument('-s', '--speed',
action='store',
type=float,
default=2.0,
required=False,
help='speed')
return vars(parser.parse_args(args))
if __name__ == '__main__':
# get the parameters
parameters = process_arguments(sys.argv[1:])
# Run the HPSS code
stretch_demo(parameters['input_file'],
parameters['output_file'],
parameters['speed'])
| 839 |
648 | {"resourceType":"DataElement","id":"Measure.supplementalData.usage","meta":{"lastUpdated":"2017-04-19T07:44:43.294+10:00"},"url":"http://hl7.org/fhir/DataElement/Measure.supplementalData.usage","status":"draft","experimental":true,"stringency":"fully-specified","element":[{"id":"Measure.supplementalData.usage","path":"Measure.supplementalData.usage","short":"supplemental-data | risk-adjustment-factor","definition":"An indicator of the intended usage for the supplemental data element. Supplemental data indicates the data is additional information requested to augment the measure information. Risk adjustment factor indicates the data is additional information used to calculate risk adjustment factors when applying a risk model to the measure calculation.","min":0,"max":"*","type":[{"code":"CodeableConcept"}],"binding":{"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/elementdefinition-bindingName","valueString":"MeasureDataUsage"}],"strength":"extensible","description":"The intended usage for supplemental data elements in the measure","valueSetReference":{"reference":"http://hl7.org/fhir/ValueSet/measure-data-usage"}}}]} | 274 |
945 | <filename>arch/arm/arm.h
/* arm.h -- check for ARM features.
* For conditions of distribution and use, see copyright notice in zlib.h
*/
#ifndef ARM_H_
#define ARM_H_
extern int arm_cpu_has_neon;
extern int arm_cpu_has_crc32;
void Z_INTERNAL arm_check_features(void);
#endif /* ARM_H_ */
| 112 |
837 | <gh_stars>100-1000
import unittest
import base
class Suite(base.Base):
def test_1(self):
"""Test case 1"""
self.start('main')
bd = self.driver.find_element_by_xpath('/html/body')
bd.click()
alert = self.driver.switch_to.alert
self.assertEqual("You clicked the body.", alert.text)
alert.accept()
bd.send_keys('h')
alert = self.driver.switch_to.alert
self.assertEqual("Key", alert.text)
alert.accept()
| 228 |
524 | <gh_stars>100-1000
#include <iostream>
#include <random>
#include <unordered_map>
#include <algorithm>
#include <iomanip>
using namespace std;
int main() {
unordered_map<uint32_t, uint32_t> mem;
random_device rd;
mt19937 gen(rd());
uniform_int_distribution<uint32_t> dist(0, 0xfffffffful);
cout<<hex;
// Generate writes
for(uint32_t i = 0; i < 256; ++i)
mem[i] = dist(gen);
for(uint32_t i = 0; i < 256; ++i)
cout<<"w "<<i*4<<" "<<mem[i]<<endl;
for(uint32_t i = 0; i < 256; ++i)
cout<<"r "<<i*4<<" "<<mem[i]<<endl;
// Rewrite
for(uint32_t i = 0; i < 256; ++i)
mem[i] = dist(gen);
for(uint32_t i = 0; i < 256; ++i)
cout<<"w "<<i*4<<" "<<mem[i]<<endl;
for(uint32_t i = 0; i < 256; ++i)
cout<<"r "<<i*4<<" "<<mem[i]<<endl;
// Random read
vector<uint32_t> addrs;
addrs.reserve(256);
for(uint32_t i = 0; i<256; ++i)
addrs.push_back(i);
shuffle(addrs.begin(), addrs.end(), gen);
for(const auto &addr : addrs)
cout<<"r "<<addr*4<<" "<<mem[addr]<<endl;
}
| 490 |
475 | //
// BmobEvent.h
// BmobSDK
//
// Created by Bmob on 14-7-4.
// Copyright (c) 2014年 Bmob. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "BmobConfig.h"
@protocol BmobEventDelegate ;
@interface BmobEvent : NSObject{
}
@property(weak,nonatomic)id<BmobEventDelegate>delegate;
-(instancetype)init;
/**
* 单例模式创建BmobEvent对象
*
* @return 创建BmobEvent对象
*/
+(instancetype)defaultBmobEvent;
/**
* 启动
*/
-(void)start;
/**
* 停止
*/
-(void)stop;
/**
* 订阅表的变化事件
*
* @param actionType 包括表更新,表删除
* @param tableName 表名
*/
-(void)listenTableChange:(BmobActionType)actionType tableName:(NSString *)tableName;
/**
* 订阅行的变化事件
*
* @param actionType 包含行更新,行删除
* @param tableName 表名
* @param objectId 行的objectId
*/
-(void)listenRowChange:(BmobActionType)actionType tableName:(NSString *)tableName objectId:(NSString *)objectId;
/**
* 取消订阅表的变化事件
*
* @param actionType 包括表更新,表删除
* @param tableName 表名
*/
-(void)cancelListenTableChange:(BmobActionType)actionType tableName:(NSString *)tableName;
/**
* 取消订阅行的变化事件
*
* @param actionType 包含行更新,行删除
* @param tableName 表名
* @param objectId 行的objectId
*/
-(void)cancelListenRowChange:(BmobActionType)actionType tableName:(NSString *)tableName objectId:(NSString *)objectId;
@end
@protocol BmobEventDelegate <NSObject>
@optional
/**
* 连接上服务器
*
* @param event BmobEvent对象
*/
-(void)bmobEventDidConnect:(BmobEvent *)event;
/**
* 连接不了服务器
*
* @param event BmobEvent对象
* @param error 错误信息
*/
-(void)bmobEventDidDisConnect:(BmobEvent *)event error:(NSError *)error;
/**
* 可以订阅或者取消订阅
*
* @param event BmobEvent对象
*/
-(void)bmobEventCanStartListen:(BmobEvent*)event;
/**
* BmobEvent发生错误时
*
* @param event BmobEvent对象
* @param error 错误信息
*/
-(void)bmobEvent:(BmobEvent*)event error:(NSError *)error;
/**
* 订阅事件时,接收信息
*
* @param event BmobEvent对象
* @param message 消息内容
*/
-(void)bmobEvent:(BmobEvent *)event didReceiveMessage:(NSString *)message;
@end | 1,058 |
3,861 | <reponame>crazyinsanejames/CloverBootloader
/*
* resolution.h
*
*
* Created by <NAME> on 3/4/10.
* Copyright 2009. All rights reserved.
*
*/
#ifndef __RESOLUTION_H
#define __RESOLUTION_H
#include "shortatombios.h"
#include "edid.h"
//Slice - moved to edid.h
/*
typedef struct _edid_mode {
UINT16 pixel_clock;
UINT16 h_active;
UINT16 h_blanking;
UINT16 v_active;
UINT16 v_blanking;
UINT16 h_sync_offset;
UINT16 h_sync_width;
UINT16 v_sync_offset;
UINT16 v_sync_width;
} edid_mode;
*/
VOID patchVideoBios();
/* Copied from 915 resolution created by <NAME>
*
* This code is based on the techniques used in :
*
* - 855patch. Many thanks to <NAME> (czietz gmx net)
* for demonstrating how to shadow the VBIOS into system RAM
* and then modify it.
*
* - 1280patch by <NAME> (andrewtipton null li).
*
* - 855resolution by <NAME>
*
* This source code is into the public domain.
*/
#define VBIOS_START 0xc0000
#define VBIOS_SIZE 0x10000
#define MODE_TABLE_OFFSET_845G 617
#define ATI_SIGNATURE1 "ATI MOBILITY RADEON"
#define ATI_SIGNATURE2 "ATI Technologies Inc"
#define NVIDIA_SIGNATURE "NVIDIA Corp"
#define INTEL_SIGNATURE "Intel Corp"
/*
* NVidia Defines and structures
*/
#define OFFSET_TO_VESA_TABLE_INDEX 2
typedef struct {
CHAR8 ucTable_Major;
CHAR8 ucTable_Minor;
CHAR8 ucTable_Rev;
UINT16 usTable_Size;
} __attribute__((packed)) NV_COMMON_TABLE_HEADER;
typedef struct {
INT16 reserved1;
INT16 reserved2;
INT16 reserved3;
} __attribute__((packed)) NV_RESERVED;
typedef struct {
UINT16 usPixel_Clock;
UINT16 usH_Active;
NV_RESERVED reserved1;
UINT16 usH_SyncStart;
UINT16 usH_SyncEnd;
UINT16 usH_Total;
UINT16 usV_Active;
NV_RESERVED reserved2;
UINT16 usV_SyncStart;
UINT16 usV_SyncEnd;
UINT16 usV_Total;
UINT16 reserved3;
} __attribute__((packed)) NV_MODELINE;
typedef struct {
NV_COMMON_TABLE_HEADER sHeader;
NV_MODELINE * sModelines;
} __attribute__((packed)) NV_VESA_TABLE;
/*---*/
typedef enum {
CT_UNKNOWN, CT_UNKNOWN_INTEL, CT_830, CT_845G, CT_855GM, CT_865G,
CT_915G, CT_915GM, CT_945G, CT_945GM, CT_945GME, CT_946GZ,
CT_955X, CT_G965, CT_Q965, CT_965GM, CT_975X,
CT_P35, CT_X48, CT_B43, CT_Q45, CT_P45,
CT_GM45, CT_G41, CT_G31, CT_G45, CT_500, CT_3150
} chipset_type;
typedef enum {
BT_UNKNOWN, BT_1, BT_2, BT_3, BT_ATI_1, BT_ATI_2, BT_NVDA, BT_INTEL
} bios_type;
typedef struct {
CHAR8 *base;
ATOM_ROM_HEADER *AtomRomHeader;
UINT16 *MasterCommandTables;
UINT16 *MasterDataTables;
} bios_tables_t;
typedef struct {
UINT8 mode;
UINT8 bits_per_pixel;
UINT16 resolution;
UINT8 unknown;
} __attribute__((packed)) vbios_mode;
typedef struct {
UINT8 unknow1[2];
UINT8 x1;
UINT8 x_total;
UINT8 x2;
UINT8 y1;
UINT8 y_total;
UINT8 y2;
} __attribute__((packed)) vbios_resolution_type1;
typedef struct {
UINT32 clock;
UINT16 x1;
UINT16 htotal;
UINT16 x2;
UINT16 hblank;
UINT16 hsyncstart;
UINT16 hsyncend;
UINT16 y1;
UINT16 vtotal;
UINT16 y2;
UINT16 vblank;
UINT16 vsyncstart;
UINT16 vsyncend;
} __attribute__((packed)) vbios_modeline_type2;
typedef struct {
UINT8 xCHAR8s;
UINT8 yCHAR8s;
UINT8 unknown[4];
vbios_modeline_type2 modelines[];
} __attribute__((packed)) vbios_resolution_type2;
typedef struct {
UINT32 clock;
UINT16 x1;
UINT16 htotal;
UINT16 x2;
UINT16 hblank;
UINT16 hsyncstart;
UINT16 hsyncend;
UINT16 y1;
UINT16 vtotal;
UINT16 y2;
UINT16 vblank;
UINT16 vsyncstart;
UINT16 vsyncend;
UINT16 timing_h;
UINT16 timing_v;
UINT8 unknown[6];
} __attribute__((packed)) vbios_modeline_type3;
typedef struct {
CHAR8 unknown[6];
vbios_modeline_type3 modelines[];
} __attribute__((packed)) vbios_resolution_type3;
typedef struct {
UINT32 chipset_id;
chipset_type chipset;
bios_type bios;
bios_tables_t ati_tables;
UINT32 bios_fd;
CHAR8* bios_ptr;
vbios_mode * mode_table;
CHAR8 * ati_mode_table;
CHAR8 * nv_mode_table;
UINT32 mode_table_size;
UINT8 b1, b2;
UINT8 unlocked;
} vbios_map;
vbios_map * open_vbios(chipset_type);
VOID close_vbios (vbios_map*);
VOID unlock_vbios(vbios_map*);
VOID relock_vbios(vbios_map*);
VOID set_mode(vbios_map*, UINT32, UINT32, UINT32, UINT32, UINT32);
#endif //__RESOLUTION_H
| 1,989 |
4,772 | package example.repo;
import example.model.Customer365;
import java.util.List;
import org.springframework.data.repository.CrudRepository;
public interface Customer365Repository extends CrudRepository<Customer365, Long> {
List<Customer365> findByLastName(String lastName);
}
| 83 |
485 | package io.indexr.query;
import java.util.Iterator;
import io.indexr.query.plan.logical.LogicalPlan;
import io.indexr.query.plan.physical.PhysicalPlan;
import io.indexr.query.row.InternalRow;
public class QueryExecution {
private QueryContext queryContext;
private LogicalPlan logicalPlan;
public QueryExecution(QueryContext queryContext, LogicalPlan logicalPlan) {
this.queryContext = queryContext;
this.logicalPlan = logicalPlan;
}
private LogicalPlan analyzedPlan;
private LogicalPlan optimizedPlan;
private PhysicalPlan physicalPlan;
private Iterator<InternalRow> result;
public LogicalPlan logicalPlan() {
return logicalPlan;
}
public LogicalPlan analyzedPlan() {
if (analyzedPlan == null) {
analyzedPlan = queryContext.analyzer().execute(logicalPlan);
new CheckAnalysis().checkAnalysis(analyzedPlan);
}
return analyzedPlan;
}
public LogicalPlan optimizedPlan() {
if (optimizedPlan == null) {
optimizedPlan = queryContext.optimizer().execute(analyzedPlan());
}
return optimizedPlan;
}
public PhysicalPlan physicalPlan() {
if (physicalPlan == null) {
physicalPlan = queryContext.planner().plan(optimizedPlan()).next();
}
return physicalPlan;
}
public Iterator<InternalRow> result() {
if (result == null) {
result = physicalPlan().execute();
}
return result;
}
}
| 574 |
311 | package org.joyqueue.broker.kafka.network.codec;
import io.netty.buffer.ByteBuf;
import org.joyqueue.broker.kafka.KafkaCommandType;
import org.joyqueue.broker.kafka.command.SaslAuthenticateRequest;
import org.joyqueue.broker.kafka.command.SaslAuthenticateResponse;
import org.joyqueue.broker.kafka.network.KafkaHeader;
import org.joyqueue.broker.kafka.network.KafkaPayloadCodec;
import org.joyqueue.network.serializer.Serializer;
import org.joyqueue.network.transport.command.Type;
/**
* SaslAuthenticateCodec
* author: gaohaoxiang
* date: 2020/4/9
*/
public class SaslAuthenticateCodec implements KafkaPayloadCodec<SaslAuthenticateResponse>, Type {
private static final byte AUTH_SEP = 0;
@Override
public SaslAuthenticateRequest decode(KafkaHeader header, ByteBuf buffer) throws Exception {
int length = buffer.readInt();
byte[] authBytes = new byte[length];
buffer.readBytes(authBytes);
SaslAuthenticateRequest request = new SaslAuthenticateRequest();
request.setAuthBytes(authBytes);
request.setData(parseData(authBytes));
return request;
}
protected SaslAuthenticateRequest.SaslAuthenticateData parseData(byte[] authBytes) {
StringBuilder buffer = new StringBuilder();
String app = null;
String token = null;
for (int i = 0; i < authBytes.length; i++) {
byte current = authBytes[i];
if (current == AUTH_SEP) {
if (i != 0) {
app = buffer.toString();
buffer.delete(0, buffer.length());
}
} else {
buffer.append((char) current);
if (i == authBytes.length - 1) {
token = buffer.toString();
}
}
}
return new SaslAuthenticateRequest.SaslAuthenticateData(app, token);
}
@Override
public void encode(SaslAuthenticateResponse payload, ByteBuf buffer) throws Exception {
buffer.writeShort(payload.getErrorCode());
Serializer.write(payload.getErrorMessage(), buffer, Serializer.SHORT_SIZE);
buffer.writeInt(payload.getAuthBytes().length);
buffer.writeBytes(payload.getAuthBytes());
if (payload.getVersion() >= 1) {
buffer.writeLong(payload.getSessionLifeTimeMs());
}
}
@Override
public int type() {
return KafkaCommandType.SASL_AUTHENTICATE.getCode();
}
} | 1,027 |
2,058 | <filename>gitlab/git_mixins/remotes.py<gh_stars>1000+
class GitLabRemotesMixin():
def get_integrated_branch_name(self):
configured_branch_name = self.git(
"config",
"--local",
"--get",
"GitSavvy.glBranch",
throw_on_error=False
).strip()
if configured_branch_name:
return configured_branch_name
else:
return "master"
def get_integrated_remote_name(self):
configured_remote_name = self.git(
"config",
"--local",
"--get",
"GitSavvy.glRemote",
throw_on_error=False
).strip()
remotes = self.get_remotes()
if len(remotes) == 0:
raise ValueError("GitLab integration will not function when no remotes defined.")
if configured_remote_name and configured_remote_name in remotes:
return configured_remote_name
elif len(remotes) == 1:
return list(remotes.keys())[0]
elif "origin" in remotes:
return "origin"
elif self.get_upstream_for_active_branch():
# fall back to the current active remote
return self.get_upstream_for_active_branch().split("/")[0]
else:
raise ValueError("Cannot determine GitLab integrated remote.")
def get_integrated_remote_url(self):
configured_remote_name = self.get_integrated_remote_name()
remotes = self.get_remotes()
return remotes[configured_remote_name]
def guess_gitlab_remote(self):
upstream = self.get_upstream_for_active_branch()
integrated_remote = self.get_integrated_remote_name()
remotes = self.get_remotes()
if len(self.remotes) == 1:
return list(remotes.keys())[0]
elif upstream:
tracked_remote = upstream.split("/")[0] if upstream else None
if tracked_remote and tracked_remote == integrated_remote:
return tracked_remote
else:
return None
else:
return integrated_remote
| 995 |
348 | {"nom":"Saulxures-sur-Moselotte","circ":"3ème circonscription","dpt":"Vosges","inscrits":2079,"abs":1235,"votants":844,"blancs":56,"nuls":38,"exp":750,"res":[{"nuance":"DVD","nom":"M. <NAME>","voix":481},{"nuance":"REM","nom":"M. <NAME>","voix":269}]} | 104 |
3,301 | package com.alibaba.alink.params.shared.tree;
import com.alibaba.alink.params.shared.colname.HasCategoricalCols;
import com.alibaba.alink.params.shared.colname.HasWeightColDefaultAsNull;
public interface TreeTrainParams<T> extends
HasCategoricalCols <T>,
HasWeightColDefaultAsNull <T>,
HasMaxLeaves <T>,
HasMinSampleRatioPerChild <T>,
HasMinInfoGain <T> {
}
| 133 |
3,680 | <filename>pxr/base/trace/eventTree.h
//
// Copyright 2018 Pixar
//
// Licensed under the Apache License, Version 2.0 (the "Apache License")
// with the following modification; you may not use this file except in
// compliance with the Apache License and the following modification to it:
// Section 6. Trademarks. is deleted and replaced with:
//
// 6. Trademarks. This License does not grant permission to use the trade
// names, trademarks, service marks, or product names of the Licensor
// and its affiliates, except as required to comply with Section 4(c) of
// the License and to reproduce the content of the NOTICE file.
//
// You may obtain a copy of the Apache License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the Apache License with the above modification is
// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the Apache License for the specific
// language governing permissions and limitations under the Apache License.
//
#ifndef PXR_BASE_TRACE_EVENT_TREE_H
#define PXR_BASE_TRACE_EVENT_TREE_H
#include "pxr/pxr.h"
#include "pxr/base/trace/api.h"
#include "pxr/base/trace/event.h"
#include "pxr/base/trace/eventNode.h"
#include "pxr/base/trace/threads.h"
#include "pxr/base/tf/refBase.h"
#include "pxr/base/tf/refPtr.h"
#include "pxr/base/tf/token.h"
#include "pxr/base/tf/weakBase.h"
#include "pxr/base/tf/weakPtr.h"
#include "pxr/base/tf/declarePtrs.h"
#include <functional>
#include <vector>
#include <unordered_map>
PXR_NAMESPACE_OPEN_SCOPE
class TraceCollection;
class JsWriter;
TF_DECLARE_WEAK_AND_REF_PTRS(TraceEventTree);
////////////////////////////////////////////////////////////////////////////////
/// \class TraceEventTree
///
/// This class contains a timeline call tree and a map of counters to their
/// values over time.
///
///
class TraceEventTree : public TfRefBase, public TfWeakBase {
public:
using CounterValues = std::vector<std::pair<TraceEvent::TimeStamp, double>>;
using CounterValuesMap =
std::unordered_map<TfToken, CounterValues, TfToken::HashFunctor>;
using CounterMap =
std::unordered_map<TfToken, double, TfToken::HashFunctor>;
using MarkerValues = std::vector<std::pair<TraceEvent::TimeStamp, TraceThreadId>>;
using MarkerValuesMap =
std::unordered_map<TfToken, MarkerValues, TfToken::HashFunctor>;
/// Creates a new TraceEventTree instance from the data in \p collection
/// and \p initialCounterValues.
TRACE_API static TraceEventTreeRefPtr New(
const TraceCollection& collection,
const CounterMap* initialCounterValues = nullptr);
static TraceEventTreeRefPtr New() {
return TfCreateRefPtr(
new TraceEventTree(TraceEventNode::New()));
}
static TraceEventTreeRefPtr New(
TraceEventNodeRefPtr root,
CounterValuesMap counters,
MarkerValuesMap markers) {
return TfCreateRefPtr(
new TraceEventTree(root, std::move(counters), std::move(markers)));
}
/// Returns the root node of the tree.
const TraceEventNodeRefPtr& GetRoot() const { return _root; }
/// Returns the map of counter values.
const CounterValuesMap& GetCounters() const { return _counters; }
/// Returns the map of markers values.
const MarkerValuesMap& GetMarkers() const { return _markers; }
/// Return the final value of the counters in the report.
CounterMap GetFinalCounterValues() const;
/// Writes a JSON object representing the data in the call tree that
/// conforms to the Chrome Trace format.
using ExtraFieldFn = std::function<void(JsWriter&)>;
TRACE_API void WriteChromeTraceObject(
JsWriter& writer, ExtraFieldFn extraFields = ExtraFieldFn()) const;
/// Adds the contexts of \p tree to this tree.
TRACE_API void Merge(const TraceEventTreeRefPtr& tree);
/// Adds the data from \p collection to this tree.
TRACE_API TraceEventTreeRefPtr Add(const TraceCollection& collection);
private:
TraceEventTree(TraceEventNodeRefPtr root)
: _root(root) {}
TraceEventTree( TraceEventNodeRefPtr root,
CounterValuesMap counters,
MarkerValuesMap markers)
: _root(root)
, _counters(std::move(counters))
, _markers(std::move(markers)) {}
// Root of the call tree.
TraceEventNodeRefPtr _root;
// Counter data of the trace.
CounterValuesMap _counters;
// Marker data of the trace.
MarkerValuesMap _markers;
};
PXR_NAMESPACE_CLOSE_SCOPE
#endif // PXR_BASE_TRACE_EVENT_TREE_H
| 1,660 |
3,007 | <gh_stars>1000+
#import <Cocoa/Cocoa.h>
//! Project version number for NEKit.
FOUNDATION_EXPORT double NEKitVersionNumber;
//! Project version string for NEKit.
FOUNDATION_EXPORT const unsigned char NEKitVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <NEKit/PublicHeader.h> | 103 |
412 | typedef enum : unsigned
{
X
} my_enum1;
enum my_enum2 : unsigned
{
Y
};
struct S
{
enum my_enum2 : unsigned a;
enum my_enum2 : unsigned b : 2;
};
int main()
{
enum my_enum2 : unsigned enum_var1;
}
| 90 |
582 | package com.almasb.fxglgames.bomberman.components;
import com.almasb.fxgl.entity.Entity;
import com.almasb.fxgl.entity.SpawnData;
import com.almasb.fxgl.entity.component.Component;
import com.almasb.fxgl.pathfinding.CellMoveComponent;
import com.almasb.fxgl.pathfinding.astar.AStarMoveComponent;
import com.almasb.fxglgames.bomberman.BombermanApp;
import javafx.util.Duration;
import static com.almasb.fxgl.dsl.FXGL.getGameTimer;
import static com.almasb.fxgl.dsl.FXGL.spawn;
/**
* @author <NAME> (<EMAIL>)
*/
public class PlayerComponent extends Component {
private CellMoveComponent cell;
private AStarMoveComponent astar;
private int maxBombs = 1;
private int bombsPlaced = 0;
public void increaseMaxBombs() {
maxBombs++;
}
public void placeBomb() {
if (bombsPlaced == maxBombs) {
return;
}
bombsPlaced++;
Entity bomb = spawn("Bomb", new SpawnData(cell.getCellX() * 40, cell.getCellY() * 40).put("radius", BombermanApp.TILE_SIZE / 2));
getGameTimer().runOnceAfter(() -> {
bomb.getComponent(BombComponent.class).explode();
bombsPlaced--;
}, Duration.seconds(2));
}
public void moveRight() {
astar.moveToRightCell();
}
public void moveLeft() {
astar.moveToLeftCell();
}
public void moveUp() {
astar.moveToUpCell();
}
public void moveDown() {
astar.moveToDownCell();
}
}
| 606 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.web.webkit.debugging.spi;
import java.net.URL;
import org.netbeans.modules.web.webkit.debugging.api.TransportStateException;
/**
* Transport of {@link Command}s or {@link Response}s between IDE and WebKit browser.
*/
public interface TransportImplementation {
// requestChildNodes was introduced in
// http://trac.webkit.org/changeset/93396/trunk/Source/WebCore/inspector/Inspector.json
public static final String VERSION_UNKNOWN_BEFORE_requestChildNodes = "version without requestChildNodes";
public static final String VERSION_1 = "version 1.0";
/**
* Activate transport.
*/
boolean attach();
/**
* Deactivate transport.
*/
boolean detach();
/**
* Send command to WebKit.
*
* @throws TransportStateException when the transport is not in a state
* that allows execution of the given command.
*/
void sendCommand(Command command) throws TransportStateException;
/**
* Register callback for receiving responses from WebKit.
*/
void registerResponseCallback(ResponseCallback callback);
/**
* Descriptive name of the established transport. For example URL being debugged.
*/
String getConnectionName();
/**
* URL being debugged.
*/
URL getConnectionURL();
/**
* Returns version of the protocol supported on browser side. See constants
* above.
*/
String getVersion();
}
| 707 |
746 | <reponame>desto12/MixedReality-WebRTC
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
#pragma once
#include "interop_api.h"
extern "C" {
/// Register a custom callback to be called when the audio track source produced
/// a frame.
/// WARNING: The default platform source internal implementation currently does
/// not hook those callbacks, and therefore the callback will never be called.
/// This is a limitation of the underlying implementation.
/// See https://bugs.chromium.org/p/webrtc/issues/detail?id=11602
//MRS_API void MRS_CALL mrsAudioTrackSourceRegisterFrameCallback(
// mrsAudioTrackSourceHandle source_handle,
// mrsAudioFrameCallback callback,
// void* user_data) noexcept;
} // extern "C"
| 221 |
1,855 | <gh_stars>1000+
/* Ppmd7.h -- Ppmd7 (PPMdH) compression codec
2021-04-13 : <NAME> : Public domain
This code is based on:
PPMd var.H (2001): <NAME> : Public domain */
#ifndef __PPMD7_H
#define __PPMD7_H
#include "Ppmd.h"
EXTERN_C_BEGIN
#define PPMD7_MIN_ORDER 2
#define PPMD7_MAX_ORDER 64
#define PPMD7_MIN_MEM_SIZE (1 << 11)
#define PPMD7_MAX_MEM_SIZE (0xFFFFFFFF - 12 * 3)
struct CPpmd7_Context_;
typedef Ppmd_Ref_Type(struct CPpmd7_Context_) CPpmd7_Context_Ref;
// MY_CPU_pragma_pack_push_1
typedef struct CPpmd7_Context_
{
UInt16 NumStats;
union
{
UInt16 SummFreq;
CPpmd_State2 State2;
} Union2;
union
{
CPpmd_State_Ref Stats;
CPpmd_State4 State4;
} Union4;
CPpmd7_Context_Ref Suffix;
} CPpmd7_Context;
// MY_CPU_pragma_pop
#define Ppmd7Context_OneState(p) ((CPpmd_State *)&(p)->Union2)
typedef struct
{
UInt32 Range;
UInt32 Code;
UInt32 Low;
IByteIn *Stream;
} CPpmd7_RangeDec;
typedef struct
{
UInt32 Range;
Byte Cache;
// Byte _dummy_[3];
UInt64 Low;
UInt64 CacheSize;
IByteOut *Stream;
} CPpmd7z_RangeEnc;
typedef struct
{
CPpmd7_Context *MinContext, *MaxContext;
CPpmd_State *FoundState;
unsigned OrderFall, InitEsc, PrevSuccess, MaxOrder, HiBitsFlag;
Int32 RunLength, InitRL; /* must be 32-bit at least */
UInt32 Size;
UInt32 GlueCount;
UInt32 AlignOffset;
Byte *Base, *LoUnit, *HiUnit, *Text, *UnitsStart;
union
{
CPpmd7_RangeDec dec;
CPpmd7z_RangeEnc enc;
} rc;
Byte Indx2Units[PPMD_NUM_INDEXES + 2]; // +2 for alignment
Byte Units2Indx[128];
CPpmd_Void_Ref FreeList[PPMD_NUM_INDEXES];
Byte NS2BSIndx[256], NS2Indx[256];
Byte ExpEscape[16];
CPpmd_See DummySee, See[25][16];
UInt16 BinSumm[128][64];
// int LastSymbol;
} CPpmd7;
void Ppmd7_Construct(CPpmd7 *p);
BoolInt Ppmd7_Alloc(CPpmd7 *p, UInt32 size, ISzAllocPtr alloc);
void Ppmd7_Free(CPpmd7 *p, ISzAllocPtr alloc);
void Ppmd7_Init(CPpmd7 *p, unsigned maxOrder);
#define Ppmd7_WasAllocated(p) ((p)->Base != NULL)
/* ---------- Internal Functions ---------- */
#define Ppmd7_GetPtr(p, ptr) Ppmd_GetPtr(p, ptr)
#define Ppmd7_GetContext(p, ptr) Ppmd_GetPtr_Type(p, ptr, CPpmd7_Context)
#define Ppmd7_GetStats(p, ctx) Ppmd_GetPtr_Type(p, (ctx)->Union4.Stats, CPpmd_State)
void Ppmd7_Update1(CPpmd7 *p);
void Ppmd7_Update1_0(CPpmd7 *p);
void Ppmd7_Update2(CPpmd7 *p);
#define PPMD7_HiBitsFlag_3(sym) ((((unsigned)sym + 0xC0) >> (8 - 3)) & (1 << 3))
#define PPMD7_HiBitsFlag_4(sym) ((((unsigned)sym + 0xC0) >> (8 - 4)) & (1 << 4))
// #define PPMD7_HiBitsFlag_3(sym) ((sym) < 0x40 ? 0 : (1 << 3))
// #define PPMD7_HiBitsFlag_4(sym) ((sym) < 0x40 ? 0 : (1 << 4))
#define Ppmd7_GetBinSumm(p) \
&p->BinSumm[(size_t)(unsigned)Ppmd7Context_OneState(p->MinContext)->Freq - 1] \
[ p->PrevSuccess + ((p->RunLength >> 26) & 0x20) \
+ p->NS2BSIndx[(size_t)Ppmd7_GetContext(p, p->MinContext->Suffix)->NumStats - 1] \
+ PPMD7_HiBitsFlag_4(Ppmd7Context_OneState(p->MinContext)->Symbol) \
+ (p->HiBitsFlag = PPMD7_HiBitsFlag_3(p->FoundState->Symbol)) ]
CPpmd_See *Ppmd7_MakeEscFreq(CPpmd7 *p, unsigned numMasked, UInt32 *scale);
/*
We support two versions of Ppmd7 (PPMdH) methods that use same CPpmd7 structure:
1) Ppmd7a_*: original PPMdH
2) Ppmd7z_*: modified PPMdH with 7z Range Coder
Ppmd7_*: the structures and functions that are common for both versions of PPMd7 (PPMdH)
*/
/* ---------- Decode ---------- */
#define PPMD7_SYM_END (-1)
#define PPMD7_SYM_ERROR (-2)
/*
You must set (CPpmd7::rc.dec.Stream) before Ppmd7*_RangeDec_Init()
Ppmd7*_DecodeSymbol()
out:
>= 0 : decoded byte
-1 : PPMD7_SYM_END : End of payload marker
-2 : PPMD7_SYM_ERROR : Data error
*/
/* Ppmd7a_* : original PPMdH */
BoolInt Ppmd7a_RangeDec_Init(CPpmd7_RangeDec *p);
#define Ppmd7a_RangeDec_IsFinishedOK(p) ((p)->Code == 0)
int Ppmd7a_DecodeSymbol(CPpmd7 *p);
/* Ppmd7z_* : modified PPMdH with 7z Range Coder */
BoolInt Ppmd7z_RangeDec_Init(CPpmd7_RangeDec *p);
#define Ppmd7z_RangeDec_IsFinishedOK(p) ((p)->Code == 0)
int Ppmd7z_DecodeSymbol(CPpmd7 *p);
// Byte *Ppmd7z_DecodeSymbols(CPpmd7 *p, Byte *buf, const Byte *lim);
/* ---------- Encode ---------- */
void Ppmd7z_Init_RangeEnc(CPpmd7 *p);
void Ppmd7z_Flush_RangeEnc(CPpmd7 *p);
// void Ppmd7z_EncodeSymbol(CPpmd7 *p, int symbol);
void Ppmd7z_EncodeSymbols(CPpmd7 *p, const Byte *buf, const Byte *lim);
EXTERN_C_END
#endif
| 2,064 |
324 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.oauth.v2.config;
import static com.google.common.base.Suppliers.ofInstance;
import static org.testng.Assert.assertNotNull;
import java.io.File;
import java.security.PrivateKey;
import java.util.Properties;
import org.jclouds.domain.Credentials;
import org.jclouds.oauth.v2.OAuthTestUtils;
import org.jclouds.oauth.v2.config.PrivateKeySupplier.PrivateKeyForCredentials;
import org.jclouds.rest.AuthorizationException;
import org.testng.annotations.Test;
import com.google.common.base.Charsets;
import com.google.common.base.Suppliers;
import com.google.common.io.Files;
@Test(groups = "unit")
public class PrivateKeySupplierTest {
/** Test loading the credentials by extracting a pk from a PKCS12 keystore. */
public void testLoadPKString() throws Exception {
assertNotNull(loadPrivateKey());
}
@Test(expectedExceptions = AuthorizationException.class)
public void testAuthorizationExceptionIsThrownOnBadKeys() {
PrivateKeySupplier supplier = new PrivateKeySupplier(
Suppliers.ofInstance(new Credentials("MOMMA", "FileNotFoundCredential")),
new PrivateKeyForCredentials());
supplier.get();
}
public void testCredentialsAreLoadedOnRightAlgoAndCredentials() {
Properties propertied = OAuthTestUtils.defaultProperties(new Properties());
Credentials validCredentials = new Credentials(propertied.getProperty("oauth.identity"),
propertied.getProperty("oauth.credential"));
PrivateKeySupplier supplier = new PrivateKeySupplier(Suppliers.ofInstance(validCredentials),
new PrivateKeyForCredentials());
assertNotNull(supplier.get());
}
public static PrivateKey loadPrivateKey() throws Exception {
PrivateKeySupplier supplier = new PrivateKeySupplier(ofInstance(new Credentials("foo",
Files.asCharSource(new File("src/test/resources/testpk.pem"), Charsets.UTF_8).read())),
new PrivateKeyForCredentials());
return supplier.get();
}
}
| 892 |
737 | /* fbx_parsing.cc
<NAME>, 19 June 2013
Structure descriptions for FBX.
*/
#include "fbx_parsing.h"
#include "soa/types/json_parsing.h"
using namespace FBX;
//using namespace RTBKIT;
using namespace std;
namespace Datacratic {
DefaultDescription<BidRequest>::
DefaultDescription()
{
onUnknownField = [=] (BidRequest * br, JsonParsingContext & context)
{
//cerr << "got unknown field " << context.printPath() << endl;
std::function<Json::Value & (int, Json::Value &)> getEntry
= [&] (int n, Json::Value & curr) -> Json::Value &
{
if (n == context.path.size())
return curr;
else if (context.path[n].index != -1)
return getEntry(n + 1, curr[context.path[n].index]);
else return getEntry(n + 1, curr[context.path[n].fieldName()]);
};
getEntry(0, br->unparseable)
= context.expectJson();
};
addField("requestId", &BidRequest::requestId, "Bid request ID");
addField("partnerMatchId", &BidRequest::partnerMatchId, "Partner’s user ID");
addField("userContext", &BidRequest::userContext, "An object of type UserContext");
addField("pageContext", &BidRequest::pageContext, "An object of type PageContext");
addField("istest", &BidRequest::istest, "Indicates an auction being held purely for debugging purposes");
addField("allowViewTag", &BidRequest::allowViewTag, "Indicates if view tags are accepted.");
addField("unparseable", &BidRequest::unparseable, "Unparseable fields are collected here");
}
DefaultDescription<RtbPageContext>::
DefaultDescription()
{
addField("pageTypeId", &RtbPageContext::pageTypeId, "Page type");
addField("numSlots", &RtbPageContext::numSlots, "Estimated number of ad slots in the placement");
}
DefaultDescription<RtbUserContext>::
DefaultDescription()
{
addField("ipAddressMasked", &RtbUserContext::ipAddressMasked, "User IP address");
addField("userAgent", &RtbUserContext::userAgent, "User agent from the user browser");
addField("country", &RtbUserContext::country, "Country");
}
DefaultDescription<BidResponse>::
DefaultDescription()
{
addField("requestId", &BidResponse::requestId, "Same requestId as in the bid request");
addField("bids", &BidResponse::bids, "Array of type RtbBid");
addField("processingTimeMs", &BidResponse::processingTimeMs, "Time it takes for your servers to process the bid request");
}
DefaultDescription<RtbBidDynamicCreativeSpec>::
DefaultDescription()
{
addField("title", &RtbBidDynamicCreativeSpec::title, "Title");
addField("body", &RtbBidDynamicCreativeSpec::body, "Body");
addField("link", &RtbBidDynamicCreativeSpec::link, "Link");
addField("creativeHash", &RtbBidDynamicCreativeSpec::creativeHash, "CreativeHash");
addField("imageUrl", &RtbBidDynamicCreativeSpec::imageUrl, "Image Url");
}
DefaultDescription<RtbBid>::
DefaultDescription()
{
addField("adId", &RtbBid::adId, "FB ad id for ad which partner wishes to show");
addField("bidNative", &RtbBid::bidNative, "The CPM bid in cents");
addField("impressionPayload", &RtbBid::impressionPayload, "Opaque blob which FB will return to the partner in the win notification");
addField("clickPayload", &RtbBid::clickPayload, "Opaque blob which FB will return to the partner upon user click");
addField("dynamicCreativeSpec", &RtbBid::dynamicCreativeSpec, "Dynamic creative");
addField("viewTagUrls", &RtbBid::viewTagUrls, "A list of view tag URL's to be fired when the impression is served.");
}
} // namespace Datacratic
| 1,458 |
377 | <filename>inception/inception-api-annotation/src/main/java/de/tudarmstadt/ukp/clarin/webanno/api/annotation/preferences/UserPreferencesService.java
/*
* Licensed to the Technische Universität Darmstadt under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The Technische Universität Darmstadt
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.clarin.webanno.api.annotation.preferences;
import java.io.IOException;
import org.springframework.beans.BeansException;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.model.AnnotationPreference;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.model.AnnotatorState;
import de.tudarmstadt.ukp.clarin.webanno.model.Mode;
import de.tudarmstadt.ukp.clarin.webanno.model.Project;
public interface UserPreferencesService
{
/**
* Set annotation preferences of users for a given project such as window size, annotation
* layers,... reading from the file system.
*
* @param aState
* The {@link AnnotatorState} that will be populated with preferences from the file
* @param aUsername
* The user for whom we need to read the preference (preferences are stored per user)
*
* @throws BeansException
* hum?
* @throws IOException
* hum?
*/
void loadPreferences(AnnotatorState aState, String aUsername)
throws BeansException, IOException;
AnnotationPreference loadPreferences(Project aProject, String aUsername, Mode aMode)
throws IOException;
void savePreference(AnnotatorState aState, String aUsername) throws IOException;
void savePreferences(Project aProject, String aUsername, Mode aMode, AnnotationPreference aPref)
throws IOException;
}
| 772 |
1,473 | /*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.common.util;
import java.util.Objects;
/**
* @author emeroad
*/
public final class ClassLoaderUtils {
public static final ClassLoaderCallable DEFAULT_CLASS_LOADER_CALLABLE = new ClassLoaderCallable() {
@Override
public ClassLoader getClassLoader() {
return ClassLoaderUtils.class.getClassLoader();
}
};
private static final ClassLoader SYSTEM_CLASS_LOADER;
private static final ClassLoader EXT_CLASS_LOADER;
private static final ClassLoader BOOT_CLASS_LOADER;
static {
BOOT_CLASS_LOADER = Object.class.getClassLoader();
// SystemClassLoader can be changed by "java.system.class.loader"
// https://docs.oracle.com/javase/8/docs/api/java/lang/ClassLoader.html
// If the system property "java.system.class.loader" is defined when this method is first invoked
// then the value of that property is taken to be the name of a class that will be returned as the system class loader.
final ClassLoader systemClassLoader = ClassLoader.getSystemClassLoader();
EXT_CLASS_LOADER = findChildClassLoader(BOOT_CLASS_LOADER, systemClassLoader);
SYSTEM_CLASS_LOADER = findChildClassLoader(EXT_CLASS_LOADER, systemClassLoader);
}
private static ClassLoader findChildClassLoader(ClassLoader parent, ClassLoader searchTarget) {
ClassLoader prev = searchTarget;
while (parent != prev.getParent()) {
prev = prev.getParent();
}
return prev;
}
private ClassLoaderUtils() {
}
// TODO check @CallerSensitive, Reflection.getCallerClass()
// private static ClassLoader getClassLoader(ClassLoader classLoader) {
// if (classLoader == null) {
// return ClassLoader.getSystemClassLoader();
// }
// return classLoader;
// }
public static ClassLoader getDefaultClassLoader() {
return getDefaultClassLoader(DEFAULT_CLASS_LOADER_CALLABLE);
}
public static ClassLoader getDefaultClassLoader(ClassLoaderCallable defaultClassLoaderCallable) {
Objects.requireNonNull(defaultClassLoaderCallable, "defaultClassLoaderCallable");
try {
final Thread th = Thread.currentThread();
final ClassLoader contextClassLoader = th.getContextClassLoader();
if (contextClassLoader != null) {
return contextClassLoader;
}
} catch (Throwable ignore) {
// skip
}
// Timing for security exceptions is different when the ClassLoader is received as an argument
return defaultClassLoaderCallable.getClassLoader();
}
public interface ClassLoaderCallable {
ClassLoader getClassLoader();
}
public static boolean isJvmClassLoader(ClassLoader classLoader) {
return BOOT_CLASS_LOADER == classLoader || SYSTEM_CLASS_LOADER == classLoader || EXT_CLASS_LOADER == classLoader;
}
public static String dumpStandardClassLoader() {
final StringBuilder buffer = new StringBuilder();
appendClassLoaderLog(buffer, "SYSTEM_CLASS_LOADER", SYSTEM_CLASS_LOADER);
appendClassLoaderLog(buffer, "EXT_CLASS_LOADER", EXT_CLASS_LOADER);
appendClassLoaderLog(buffer, "BOOT_CLASS_LOADER", BOOT_CLASS_LOADER);
return buffer.toString();
}
private static void appendClassLoaderLog(StringBuilder buffer, String classLoaderName, ClassLoader classLoader) {
buffer.append(classLoaderName);
buffer.append(':');
if (classLoader == null) {
buffer.append("null");
} else {
buffer.append(classLoader);
}
buffer.append(", ");
}
}
| 1,502 |
404 | <filename>Sources/WireGuardKitC/x25519.h<gh_stars>100-1000
#ifndef X25519_H
#define X25519_H
void curve25519_derive_public_key(unsigned char public_key[32], const unsigned char private_key[32]);
void curve25519_generate_private_key(unsigned char private_key[32]);
#endif
| 98 |
450 | <filename>src/pl/pljava/src/C/include/pljava/type/Type.h<gh_stars>100-1000
/*
* Copyright (c) 2004, 2005, 2006 TADA AB - Taby Sweden
* Distributed under the terms shown in the file COPYRIGHT
* found in the root folder of this project or at
* http://eng.tada.se/osprojects/COPYRIGHT.html
*
* @author <NAME>
*/
#ifndef __pljava_type_Type_h
#define __pljava_type_Type_h
#include "pljava/PgObject.h"
#ifdef __cplusplus
extern "C" {
#endif
#include <catalog/pg_type.h>
/*********************************************************************
* The Type class is responsible for data type conversions between the
* Postgres Datum and the Java jvalue. A Type can also perform optimized
* JNI calls that are type dependent (returning primitives) such as
* CallIntMethod(...) or CallBooleanMethod(...). Consequently, the Type
* of the return value of a function is responsible for its invocation.
*
* Types that are not mapped will default to a java.lang.String mapping
* and use the Form_pg_type text conversion routines.
*
* @author <NAME>
*
*********************************************************************/
struct Type_;
typedef struct Type_* Type;
struct TypeClass_;
typedef struct TypeClass_* TypeClass;
/*
* Returns the TypeClass
*/
extern TypeClass Type_getClass(Type self);
/*
* Returns true if the Type is primitive (i.e. not a real object in
* the Java domain).
*/
extern bool Type_isPrimitive(Type self);
/*
* Returns true if this type uses the same postgres type the other type.
* This is used when explicit java signatures are declared functions to
* verify that the declared Java type is compatible with the SQL type.
*
* At present, the type argument must be either equal to self, or if
* self is a Boolean, Character, or any Number, the primitive that
* corresponds to that number (i.e. java.lang.Short == short).
*/
extern bool Type_canReplaceType(Type self, Type type);
/*
* Translate a given Datum into a jvalue accorging to the type represented
* by this instance.
*/
extern jvalue Type_coerceDatum(Type self, Datum datum);
/*
* Translate a given Object into a Datum accorging to the type represented
* by this instance.
*/
extern Datum Type_coerceObject(Type self, jobject object);
/*
* Return a Type based on a Postgres Oid. Creates a new type if
* necessary.
*/
extern Type Type_fromOid(Oid typeId, jobject typeMap);
/*
* Return a Type from the Oid cache based on a Postgres Oid. This method
* returns NULL if no such Type is cached.
*/
extern Type Type_fromOidCache(Oid typeId);
/*
* Return a coerce type that can front this type when doing parameter coercion
*/
extern Type Type_getCoerceIn(Type self, Type other);
/*
* Return a coerce type that this type can hand over to when doing result value
* coercion
*/
extern Type Type_getCoerceOut(Type self, Type other);
/*
* Returns true if the type represents the dynamic (any) type.
*/
extern bool Type_isDynamic(Type self);
/*
* Returns the type alignment (i.e. pg_type->typalign).
*/
extern char Type_getAlign(Type self);
/*
* Returns the type length (i.e. pg_type->typlen).
*/
extern int16 Type_getLength(Type self);
/*
* Returns the type length (i.e. pg_type->typlen).
*/
extern jclass Type_getJavaClass(Type self);
/*
* Returns true if the type is passed by value (i.e. pg_type->typbyval).
*/
extern bool Type_isByValue(Type self);
/*
* Returns true if the invocation will create an out parameter (ResultSet typically)
* to collect the return value. If so, the real return value will be a bool.
*/
extern bool Type_isOutParameter(Type self);
/*
* Returns the real type for a dynamic type. A non dynamic type will
* return itself.
*/
extern Type Type_getRealType(Type self, Oid realTypeID, jobject typeMap);
/*
* Return a Type based on a PostgreSQL Oid. If the found
* type is a primitive, return it's object corresponcance
*/
extern Type Type_objectTypeFromOid(Oid typeId, jobject typeMap);
/*
* Return a Type based on a default SQL type and a java type name.
*/
extern Type Type_fromJavaType(Oid dfltType, const char* javaTypeName);
/*
* Returns the Java type name for the Type.
*/
extern const char* Type_getJavaTypeName(Type self);
/*
* Returns the JNI signature for the Type.
*/
extern const char* Type_getJNISignature(Type self);
/*
* Returns the JNI signature used when returning instances
* of this type.
*/
extern const char* Type_getJNIReturnSignature(Type self, bool forMultiCall, bool useAltRepr);
/*
* Returns the array Type. The type is created if it doesn't exist
*/
extern Type Type_getArrayType(Type self, Oid arrayTypeId);
/*
* Returns the element Type if this type is an array.
*/
extern Type Type_getElementType(Type self);
/*
* Returns the object Type if the type is primitive and NULL if not.
*/
extern Type Type_getObjectType(Type self);
/*
* Returns the Oid associated with this type.
*/
extern Oid Type_getOid(Type self);
/*
* Returns the TupleDesc associated with this type.
*/
extern TupleDesc Type_getTupleDesc(Type self, PG_FUNCTION_ARGS);
/*
* Calls a java method using one of the Call<type>MethodA routines where
* <type> corresponds to the type represented by this instance and
* coerces the returned value into a Datum.
*
* The method will set the value pointed to by the wasNull parameter
* to true if the Java method returned null. The method expects that
* the wasNull parameter is set to false by the caller prior to the
* call.
*/
extern Datum Type_invoke(Type self, jclass clazz, jmethodID method, jvalue* args, PG_FUNCTION_ARGS);
/*
* Calls a Set Returning Function (SRF).
*/
extern Datum Type_invokeSRF(Type self, jclass clazz, jmethodID method, jvalue* args, PG_FUNCTION_ARGS);
/*
* Obtains the Java object that acts as the SRF producer. This instance will be
* called once for each row that should be produced.
*/
extern jobject Type_getSRFProducer(Type self, jclass clazz, jmethodID method, jvalue* args);
/*
* Obtains the optional Java object that will act as the value collector for
* the SRF producer. The collector typically manifest itself as an OUT
* parameter of type java.sql.ResultSet in calls to the SRF producer.
*/
extern jobject Type_getSRFCollector(Type self, PG_FUNCTION_ARGS);
/*
* Called to determine if the producer will produce another row.
*/
extern bool Type_hasNextSRF(Type self, jobject producer, jobject collector, jint counter);
/*
* Converts the next row into a Datum of the expected type.
*/
extern Datum Type_nextSRF(Type self, jobject producer, jobject collector);
/*
* Called at the end of an SRF iteration.
*/
extern void Type_closeSRF(Type self, jobject producer);
/*
* Function used when obtaining a type based on an Oid
* structure. In most cases, this function should return a
* singleton. The only current exception from this is the
* String since it makes use of functions stored in the
* Form_pg_type structure.
*/
typedef Type (*TypeObtainer)(Oid typeId);
/*
* Function that can coerce a Datum into a jvalue
*/
typedef jvalue (*DatumCoercer)(Type, Datum);
/*
* Function that can coerce a jobject into a Datum
*/
typedef Datum (*ObjectCoercer)(Type, jobject);
/*
* Register this type as the default mapping for a postgres type.
*/
extern void Type_registerType(const char* javaTypeName, Type type);
extern void Type_registerType2(Oid typeId, const char* javaTypeName, TypeObtainer obtainer);
#ifdef __cplusplus
}
#endif
#endif
| 2,213 |
848 |
#
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import math
class Evaluator(object):
@staticmethod
def shape(node):
shape = node.in_tensors[0].shape
dim = node.node_attr(node.op.AttrName.AXIS)
node.out_tensors[0].data = shape[dim]
@staticmethod
def tensor(node):
assert node.in_tensors[0].ndim == 0
node.out_tensors[0].data = float(node.in_tensors[0].data)
@staticmethod
def mul(node):
node.out_tensors[0].data = node.in_tensors[0].data * node.in_tensors[1].data
@staticmethod
def cast(node):
node.out_tensors[0].data = node.in_tensors[0].data
@staticmethod
def floor(node):
node.out_tensors[0].data = math.floor(node.in_tensors[0].data)
@staticmethod
def int(node):
node.out_tensors[0].data = int(node.in_tensors[0].data)
@staticmethod
def sub(node):
node.out_tensors[0].data = node.node_attr(node.op.AttrName.INPUT) - node.node_attr(node.op.AttrName.OTHER)
@staticmethod
def elemwise_div(node):
node.out_tensors[0].data = float(node.node_attr(node.op.AttrName.INPUT) / node.node_attr(node.op.AttrName.OTHER))
@staticmethod
def floor_div(node):
node.out_tensors[0].data = int(node.node_attr(node.op.AttrName.INPUT) // node.node_attr(node.op.AttrName.OTHER))
@staticmethod
def add(node):
node.out_tensors[0].data = node.node_attr(node.op.AttrName.INPUT) + node.node_attr(node.op.AttrName.OTHER)
| 771 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-2xhg-w2g5-w95x",
"modified": "2021-12-06T21:36:47Z",
"published": "2021-11-24T21:01:23Z",
"aliases": [
"CVE-2021-41270"
],
"summary": "CSV Injection in symfony/serializer",
"details": "Description\n-----------\n\nCSV Injection, also known as Formula Injection, occurs when websites embed untrusted input inside CSV files. When a spreadsheet program opens a CSV, any cell starting with `=` is interpreted by the software as a formula and could be abused by an attacker.\n\nIn Symfony 4.1, we've added the opt-in `csv_escape_formulas` option in `CsvEncoder`, to prefix all cells starting by `=`, `+`, `-` or `@` by a tab `\\t`. \n\nSince then, OWASP added 2 chars in that list: \n- Tab (0x09)\n- Carriage return (0x0D)\n\nThis makes our previous prefix char (Tab `\\t`) part of the vulnerable characters, and [OWASP suggests](https://owasp.org/www-community/attacks/CSV_Injection) using the single quote `'` for prefixing the value.\n\nResolution\n----------\n\nSymfony now follows the OWASP recommendations and use the single quote `'` to prefix formulas and adds the prefix to cells starting by `\\t`, `\\r` as well as `=`, `+`, `-` and `@`.\n\nThe patch for this issue is available [here](https://github.com/symfony/symfony/commit/3da6f2d45e7536ccb2a26f52fbaf340917e208a8) for branch 4.4.\n\nCredits\n-------\n\nWe would like to thank <NAME> for reporting the issue and <NAME> for fixing the issue.\n",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:N/A:N"
}
],
"affected": [
{
"package": {
"ecosystem": "Packagist",
"name": "symfony/serializer"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "5.0.0"
},
{
"fixed": "5.3.12"
}
]
}
]
},
{
"package": {
"ecosystem": "Packagist",
"name": "symfony/serializer"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "4.1.0"
},
{
"fixed": "4.4.35"
}
]
}
]
}
],
"references": [
{
"type": "WEB",
"url": "https://github.com/symfony/symfony/security/advisories/GHSA-2xhg-w2g5-w95x"
},
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-41270"
},
{
"type": "WEB",
"url": "https://github.com/symfony/symfony/pull/44243"
},
{
"type": "WEB",
"url": "https://github.com/symfony/symfony/commit/3da6f2d45e7536ccb2a26f52fbaf340917e208a8"
},
{
"type": "WEB",
"url": "https://github.com/symfony/symfony/releases/tag/v5.3.12"
},
{
"type": "WEB",
"url": "https://lists.fedoraproject.org/archives/list/[email protected]/message/3BPT4SF6SIXFMZARDWED5T32J7JEH3EP/"
},
{
"type": "WEB",
"url": "https://lists.fedoraproject.org/archives/list/[email protected]/message/QSREFD2TJT5LWKM6S4MD3W26NQQ5WJUP/"
},
{
"type": "PACKAGE",
"url": "https://github.com/symfony/symfony"
}
],
"database_specific": {
"cwe_ids": [
"CWE-1236"
],
"severity": "MODERATE",
"github_reviewed": true
}
} | 1,719 |
778 | // | / |
// ' / __| _` | __| _ \ __|
// . \ | ( | | ( |\__ `
// _|\_\_| \__,_|\__|\___/ ____/
// Multi-Physics
//
// License: BSD License
// Kratos default license: kratos/license.txt
//
// Main authors: <NAME> (https://github.com/philbucher)
//
#if !defined(KRATOS_CO_SIM_IO_CONVERSION_UTILITIES_H_INCLUDED )
#define KRATOS_CO_SIM_IO_CONVERSION_UTILITIES_H_INCLUDED
// System includes
// External includes
#include "custom_external_libraries/co_sim_io/impl/model_part.hpp"
#include "custom_external_libraries/co_sim_io/impl/info.hpp"
// Project includes
#include "includes/define.h"
#include "includes/model_part.h"
#include "includes/kratos_parameters.h"
namespace Kratos
{
///@addtogroup CoSimulationApplication
///@{
///@name Kratos Classes
///@{
/// Short class definition.
/** Detail class definition.
*/
class KRATOS_API(CO_SIMULATION_APPLICATION) CoSimIOConversionUtilities
{
public:
///@name Type Definitions
///@{
/// Pointer definition of CoSimIOConversionUtilities
KRATOS_CLASS_POINTER_DEFINITION(CoSimIOConversionUtilities);
///@}
///@name Life Cycle
///@{
/// Default constructor.
CoSimIOConversionUtilities() = delete;
/// Assignment operator.
CoSimIOConversionUtilities& operator=(CoSimIOConversionUtilities const& rOther) = delete;
/// Copy constructor.
CoSimIOConversionUtilities(CoSimIOConversionUtilities const& rOther) = delete;
///@}
///@name Operations
///@{
static void CoSimIOModelPartToKratosModelPart(
const CoSimIO::ModelPart& rCoSimIOModelPart,
Kratos::ModelPart& rKratosModelPart);
static void KratosModelPartToCoSimIOModelPart(
const Kratos::ModelPart& rKratosModelPart,
CoSimIO::ModelPart& rCoSimIOModelPart);
static CoSimIO::Info InfoFromParameters(const Parameters rSettings);
///@}
}; // Class CoSimIOConversionUtilities
///@}
///@} addtogroup block
} // namespace Kratos.
#endif // KRATOS_CO_SIM_IO_CONVERSION_UTILITIES_H_INCLUDED defined
| 839 |
575 | <reponame>lmxing/abseil-cpp<filename>absl/random/discrete_distribution_test.cc
// Copyright 2017 The Abseil Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "absl/random/discrete_distribution.h"
#include <cmath>
#include <cstddef>
#include <cstdint>
#include <iterator>
#include <numeric>
#include <random>
#include <sstream>
#include <string>
#include <vector>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "absl/base/internal/raw_logging.h"
#include "absl/random/internal/chi_square.h"
#include "absl/random/internal/distribution_test_util.h"
#include "absl/random/internal/pcg_engine.h"
#include "absl/random/internal/sequence_urbg.h"
#include "absl/random/random.h"
#include "absl/strings/str_cat.h"
#include "absl/strings/strip.h"
namespace {
template <typename IntType>
class DiscreteDistributionTypeTest : public ::testing::Test {};
using IntTypes = ::testing::Types<int8_t, uint8_t, int16_t, uint16_t, int32_t,
uint32_t, int64_t, uint64_t>;
TYPED_TEST_SUITE(DiscreteDistributionTypeTest, IntTypes);
TYPED_TEST(DiscreteDistributionTypeTest, ParamSerializeTest) {
using param_type =
typename absl::discrete_distribution<TypeParam>::param_type;
absl::discrete_distribution<TypeParam> empty;
EXPECT_THAT(empty.probabilities(), testing::ElementsAre(1.0));
absl::discrete_distribution<TypeParam> before({1.0, 2.0, 1.0});
// Validate that the probabilities sum to 1.0. We picked values which
// can be represented exactly to avoid floating-point roundoff error.
double s = 0;
for (const auto& x : before.probabilities()) {
s += x;
}
EXPECT_EQ(s, 1.0);
EXPECT_THAT(before.probabilities(), testing::ElementsAre(0.25, 0.5, 0.25));
// Validate the same data via an initializer list.
{
std::vector<double> data({1.0, 2.0, 1.0});
absl::discrete_distribution<TypeParam> via_param{
param_type(std::begin(data), std::end(data))};
EXPECT_EQ(via_param, before);
}
std::stringstream ss;
ss << before;
absl::discrete_distribution<TypeParam> after;
EXPECT_NE(before, after);
ss >> after;
EXPECT_EQ(before, after);
}
TYPED_TEST(DiscreteDistributionTypeTest, Constructor) {
auto fn = [](double x) { return x; };
{
absl::discrete_distribution<int> unary(0, 1.0, 9.0, fn);
EXPECT_THAT(unary.probabilities(), testing::ElementsAre(1.0));
}
{
absl::discrete_distribution<int> unary(2, 1.0, 9.0, fn);
// => fn(1.0 + 0 * 4 + 2) => 3
// => fn(1.0 + 1 * 4 + 2) => 7
EXPECT_THAT(unary.probabilities(), testing::ElementsAre(0.3, 0.7));
}
}
TEST(DiscreteDistributionTest, InitDiscreteDistribution) {
using testing::Pair;
{
std::vector<double> p({1.0, 2.0, 3.0});
std::vector<std::pair<double, size_t>> q =
absl::random_internal::InitDiscreteDistribution(&p);
EXPECT_THAT(p, testing::ElementsAre(1 / 6.0, 2 / 6.0, 3 / 6.0));
// Each bucket is p=1/3, so bucket 0 will send half it's traffic
// to bucket 2, while the rest will retain all of their traffic.
EXPECT_THAT(q, testing::ElementsAre(Pair(0.5, 2), //
Pair(1.0, 1), //
Pair(1.0, 2)));
}
{
std::vector<double> p({1.0, 2.0, 3.0, 5.0, 2.0});
std::vector<std::pair<double, size_t>> q =
absl::random_internal::InitDiscreteDistribution(&p);
EXPECT_THAT(p, testing::ElementsAre(1 / 13.0, 2 / 13.0, 3 / 13.0, 5 / 13.0,
2 / 13.0));
// A more complex bucketing solution: Each bucket has p=0.2
// So buckets 0, 1, 4 will send their alternate traffic elsewhere, which
// happens to be bucket 3.
// However, summing up that alternate traffic gives bucket 3 too much
// traffic, so it will send some traffic to bucket 2.
constexpr double b0 = 1.0 / 13.0 / 0.2;
constexpr double b1 = 2.0 / 13.0 / 0.2;
constexpr double b3 = (5.0 / 13.0 / 0.2) - ((1 - b0) + (1 - b1) + (1 - b1));
EXPECT_THAT(q, testing::ElementsAre(Pair(b0, 3), //
Pair(b1, 3), //
Pair(1.0, 2), //
Pair(b3, 2), //
Pair(b1, 3)));
}
}
TEST(DiscreteDistributionTest, ChiSquaredTest50) {
using absl::random_internal::kChiSquared;
constexpr size_t kTrials = 10000;
constexpr int kBuckets = 50; // inclusive, so actally +1
// 1-in-100000 threshold, but remember, there are about 8 tests
// in this file. And the test could fail for other reasons.
// Empirically validated with --runs_per_test=10000.
const int kThreshold =
absl::random_internal::ChiSquareValue(kBuckets, 0.99999);
std::vector<double> weights(kBuckets, 0);
std::iota(std::begin(weights), std::end(weights), 1);
absl::discrete_distribution<int> dist(std::begin(weights), std::end(weights));
// We use a fixed bit generator for distribution accuracy tests. This allows
// these tests to be deterministic, while still testing the qualify of the
// implementation.
absl::random_internal::pcg64_2018_engine rng(0x2B7E151628AED2A6);
std::vector<int32_t> counts(kBuckets, 0);
for (size_t i = 0; i < kTrials; i++) {
auto x = dist(rng);
counts[x]++;
}
// Scale weights.
double sum = 0;
for (double x : weights) {
sum += x;
}
for (double& x : weights) {
x = kTrials * (x / sum);
}
double chi_square =
absl::random_internal::ChiSquare(std::begin(counts), std::end(counts),
std::begin(weights), std::end(weights));
if (chi_square > kThreshold) {
double p_value =
absl::random_internal::ChiSquarePValue(chi_square, kBuckets);
// Chi-squared test failed. Output does not appear to be uniform.
std::string msg;
for (size_t i = 0; i < counts.size(); i++) {
absl::StrAppend(&msg, i, ": ", counts[i], " vs ", weights[i], "\n");
}
absl::StrAppend(&msg, kChiSquared, " p-value ", p_value, "\n");
absl::StrAppend(&msg, "High ", kChiSquared, " value: ", chi_square, " > ",
kThreshold);
ABSL_RAW_LOG(INFO, "%s", msg.c_str());
FAIL() << msg;
}
}
TEST(DiscreteDistributionTest, StabilityTest) {
// absl::discrete_distribution stabilitiy relies on
// absl::uniform_int_distribution and absl::bernoulli_distribution.
absl::random_internal::sequence_urbg urbg(
{0x0003eb76f6f7f755ull, 0xFFCEA50FDB2F953Bull, 0xC332DDEFBE6C5AA5ull,
0x6558218568AB9702ull, 0x2AEF7DAD5B6E2F84ull, 0x1521B62829076170ull,
0xECDD4775619F1510ull, 0x13CCA830EB61BD96ull, 0x0334FE1EAA0363CFull,
0xB5735C904C70A239ull, 0xD59E9E0BCBAADE14ull, 0xEECC86BC60622CA7ull});
std::vector<int> output(6);
{
absl::discrete_distribution<int32_t> dist({1.0, 2.0, 3.0, 5.0, 2.0});
EXPECT_EQ(0, dist.min());
EXPECT_EQ(4, dist.max());
for (auto& v : output) {
v = dist(urbg);
}
EXPECT_EQ(12, urbg.invocations());
}
// With 12 calls to urbg, each call into discrete_distribution consumes
// precisely 2 values: one for the uniform call, and a second for the
// bernoulli.
//
// Given the alt mapping: 0=>3, 1=>3, 2=>2, 3=>2, 4=>3, we can
//
// uniform: 443210143131
// bernoulli: b0 000011100101
// bernoulli: b1 001111101101
// bernoulli: b2 111111111111
// bernoulli: b3 001111101111
// bernoulli: b4 001111101101
// ...
EXPECT_THAT(output, testing::ElementsAre(3, 3, 1, 3, 3, 3));
{
urbg.reset();
absl::discrete_distribution<int64_t> dist({1.0, 2.0, 3.0, 5.0, 2.0});
EXPECT_EQ(0, dist.min());
EXPECT_EQ(4, dist.max());
for (auto& v : output) {
v = dist(urbg);
}
EXPECT_EQ(12, urbg.invocations());
}
EXPECT_THAT(output, testing::ElementsAre(3, 3, 0, 3, 0, 4));
}
} // namespace
| 3,587 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_PUBLIC_TEST_SCOPED_PAGE_FOCUS_OVERRIDE_H_
#define CONTENT_PUBLIC_TEST_SCOPED_PAGE_FOCUS_OVERRIDE_H_
#include "base/callback_forward.h"
#include "base/containers/span.h"
#include "base/memory/ref_counted.h"
#include "content/public/browser/devtools_agent_host.h"
#include "content/public/browser/devtools_agent_host_client.h"
namespace content {
class WebContents;
// This calls into devtools to enable focus emulation for the given WebContents.
// As long as this class is alive any calls to Document::hasFocus() will return
// true which emulates focus on that document.
class ScopedPageFocusOverride : public DevToolsAgentHostClient {
public:
explicit ScopedPageFocusOverride(WebContents* web_contents);
ScopedPageFocusOverride(const ScopedPageFocusOverride&) = delete;
ScopedPageFocusOverride& operator=(const ScopedPageFocusOverride&) = delete;
~ScopedPageFocusOverride() override;
protected:
// DevToolsAgentHostClient:
void DispatchProtocolMessage(DevToolsAgentHost* agent_host,
base::span<const uint8_t> message) override;
void AgentHostClosed(DevToolsAgentHost* agent_host) override;
private:
void SetFocusEmulationEnabled(bool enabled);
int last_sent_id_ = 0;
base::OnceClosure run_loop_quit_closure_;
scoped_refptr<DevToolsAgentHost> agent_host_;
};
} // namespace content
#endif // CONTENT_PUBLIC_TEST_SCOPED_PAGE_FOCUS_OVERRIDE_H_
| 523 |
335 | <reponame>Safal08/Hacktoberfest-1<filename>A/Accommodating_adjective.json
{
"word": "Accommodating",
"definitions": [
"Fitting in with someone's wishes or demands in a helpful way."
],
"parts-of-speech": "Adjective"
} | 99 |
331 | <filename>src/main/java/org/yx/common/json/ParamPojoTypeAdapter.java
/**
* Copyright (C) 2016 - 2030 youtongluan.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yx.common.json;
import java.io.IOException;
import org.yx.annotation.doc.NotNull;
import org.yx.asm.ParamPojo;
import org.yx.asm.Parameters;
import org.yx.exception.SumkException;
import com.google.gson.Gson;
import com.google.gson.TypeAdapter;
import com.google.gson.reflect.TypeToken;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import com.google.gson.stream.JsonWriter;
public class ParamPojoTypeAdapter<T extends ParamPojo> extends TypeAdapter<T> {
@NotNull
private final Gson gson;
@NotNull
private final Parameters info;
public ParamPojoTypeAdapter(Gson gson, Parameters info) {
this.info = info;
this.gson = gson;
}
@Override
public T read(JsonReader in) throws IOException {
if (in.peek() == JsonToken.NULL) {
in.nextNull();
return null;
}
String name = null;
try {
T pojo = info.createEmptyParamObj();
Object[] objs = new Object[info.paramLength()];
in.beginObject();
while (in.hasNext()) {
name = in.nextName();
int index = info.getIndex(name);
if (index < 0) {
in.skipValue();
continue;
}
objs[index] = gson.getAdapter(TypeToken.get(info.getParamType(index))).read(in);
}
in.endObject();
pojo.setParams(objs);
return pojo;
} catch (Exception e) {
throw new SumkException(-34534234, info.paramClz().getSimpleName() + "解析" + name + "字段出错:" + e.getMessage(),
e);
}
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void write(JsonWriter out, ParamPojo pojo) throws IOException {
if (pojo == null) {
out.nullValue();
return;
}
out.beginObject();
Object[] objs = pojo.params();
int len = info.paramLength();
for (int i = 0; i < len; i++) {
out.name(info.getParamName(i));
TypeAdapter adapter = gson.getAdapter(TypeToken.get(info.getParamType(i)));
adapter.write(out, objs[i]);
}
out.endObject();
}
} | 968 |
705 | package week01_part01.practice;
public class Problem04
{
public static void main(String[] args)
{
//
// Reviewing 2D Arrays
String [] names = {"Alice", "Bob", "Cindy"};
int [] gradesStudent01 = {100, 90, 95, 90};
int [] gradesStudent02 = {100, 95, 80, 95};
int [] gradesStudent03 = {85, 90, 80, 100};
int numStudents = names.length;
int numGrades = gradesStudent01.length;
int [][] allGrades; // reference
// option 1
allGrades = new int [ numStudents ][ numGrades ]; // ALLOCATE SPACE
// TO COMPLETE: SET allGrades[0] TO GRADES OF student 1, etc.
for (int gradePos = 0; gradePos < gradesStudent01.length; gradePos++)
{
allGrades[0][gradePos] = gradesStudent01[gradePos];
}
// TO COMPLETE: DO THE SAME FOR the rest of the students
}
}
| 304 |
2,542 | // ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
using namespace std;
using namespace Common;
using namespace Hosting2;
OperationStatusMap::OperationStatusMap()
: lock_()
{
}
void OperationStatusMap::Initialize(wstring const & id)
{
{
AcquireWriteLock lock(lock_);
auto status = OperationStatus();
status.State = OperationState::NotStarted;
map_[id] = status;
}
}
bool OperationStatusMap::TryInitialize(wstring const & id, __out OperationStatus & initializedStatus)
{
{
AcquireWriteLock lock(lock_);
auto iter = map_.find(id);
if (iter == map_.end())
{
initializedStatus = OperationStatus();
initializedStatus.State = OperationState::NotStarted;
map_[id] = initializedStatus;
return true;
}
else
{
initializedStatus = iter->second;
return false;
}
}
}
OperationStatus OperationStatusMap::Get(wstring const & id) const
{
{
AcquireReadLock lock(lock_);
auto iter = map_.find(id);
if (iter != map_.end())
{
return iter->second;
}
else
{
return OperationStatus();
}
}
}
bool OperationStatusMap::TryGet(wstring const & id, __out OperationStatus & status) const
{
{
AcquireReadLock lock(lock_);
auto iter = map_.find(id);
if (iter != map_.end())
{
status = iter->second;
return true;
}
else
{
return false;
}
}
}
void OperationStatusMap::Set(wstring const & id, OperationStatus const status)
{
{
AcquireWriteLock lock(lock_);
auto iter = map_.find(id);
if (iter != map_.end())
{
iter->second = status;
}
}
}
void OperationStatusMap::Set(wstring const & id, ErrorCode const & error, OperationState::Enum const & state, ULONG const failureCount, ULONG const internalFailureCount)
{
{
AcquireWriteLock lock(lock_);
auto iter = map_.find(id);
if (iter != map_.end())
{
iter->second.State = state;
iter->second.LastError = error;
iter->second.FailureCount = failureCount;
iter->second.InternalFailureCount = internalFailureCount;
}
}
}
void OperationStatusMap::SetState(wstring const & id, OperationState::Enum const & state)
{
{
AcquireWriteLock lock(lock_);
auto iter = map_.find(id);
if (iter != map_.end())
{
iter->second.State = state;
}
}
}
void OperationStatusMap::SetFailureCount(wstring const & id, ULONG const failureCount)
{
{
AcquireWriteLock lock(lock_);
auto iter = map_.find(id);
if (iter != map_.end())
{
iter->second.FailureCount = failureCount;
}
}
}
void OperationStatusMap::SetError(wstring const & id, ErrorCode const & error)
{
{
AcquireWriteLock lock(lock_);
auto iter = map_.find(id);
if (iter != map_.end())
{
iter->second.LastError = error;
}
}
}
bool OperationStatusMap::TryRemove(std::wstring const & id, __out OperationStatus & status)
{
{
AcquireWriteLock lock(lock_);
auto iter = map_.find(id);
if (iter != map_.end())
{
status = iter->second;
map_.erase(iter);
return true;
}
else
{
status = OperationStatus();
return false;
}
}
}
void OperationStatusMap::WriteTo(__in TextWriter & w,FormatOptions const &) const
{
{
AcquireReadLock lock(lock_);
for(auto iter = map_.begin(); iter != map_.end(); ++iter)
{
w.Write("Id={0}, Status={1}", iter->first, iter->second);
}
}
}
| 1,839 |
1,086 | /* in_flac - Winamp2 FLAC input plugin
* Copyright (C) 2000,2001,2002,2003,2004,2005,2006,2007 <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#if HAVE_CONFIG_H
# include <config.h>
#endif
#include <windows.h>
#include <limits.h> /* for INT_MAX */
#include <stdio.h>
#include "share/alloc.h"
#include "winamp2/in2.h"
#include "configure.h"
#include "infobox.h"
#include "tagz.h"
#define PLUGIN_VERSION "1.2.1"
static In_Module mod_; /* the input module (declared near the bottom of this file) */
static char lastfn_[MAX_PATH]; /* currently playing file (used for getting info on the current file) */
flac_config_t flac_cfg;
static stream_data_struct stream_data_;
static int paused;
static FLAC__StreamDecoder *decoder_;
static char sample_buffer_[SAMPLES_PER_WRITE * FLAC_PLUGIN__MAX_SUPPORTED_CHANNELS * (24/8) * 2];
/* (24/8) for max bytes per sample, and 2 for DSPs */
static HANDLE thread_handle = NULL; /* the handle to the decode thread */
static DWORD WINAPI DecodeThread(void *b); /* the decode thread procedure */
/*
* init/quit
*/
static void init()
{
decoder_ = FLAC__stream_decoder_new();
strcpy(lastfn_, "");
InitConfig();
ReadConfig();
InitInfobox();
}
static void quit()
{
WriteConfig();
DeinitInfobox();
FLAC_plugin__decoder_delete(decoder_);
decoder_ = 0;
}
/*
* open/close
*/
static int isourfile(char *fn) { return 0; }
static int play(char *fn)
{
LONGLONG filesize;
DWORD thread_id;
int maxlatency;
/* checks */
if (decoder_ == 0) return 1;
if (!(filesize = FileSize(fn))) return -1;
/* init decoder */
if (!FLAC_plugin__decoder_init(decoder_, fn, filesize, &stream_data_, &flac_cfg.output))
return 1;
strcpy(lastfn_, fn);
/* open output */
maxlatency = mod_.outMod->Open(stream_data_.sample_rate, stream_data_.channels, stream_data_.output_bits_per_sample, -1, -1);
if (maxlatency < 0)
{
FLAC_plugin__decoder_finish(decoder_);
return 1;
}
/* set defaults */
mod_.outMod->SetVolume(-666);
mod_.outMod->SetPan(0);
/* initialize vis stuff */
mod_.SAVSAInit(maxlatency, stream_data_.sample_rate);
mod_.VSASetInfo(stream_data_.sample_rate, stream_data_.channels);
/* set info */
mod_.SetInfo(stream_data_.average_bps, stream_data_.sample_rate/1000, stream_data_.channels, 1);
/* start playing thread */
paused = 0;
thread_handle = CreateThread(NULL, 0, DecodeThread, NULL, 0, &thread_id);
if (!thread_handle) return 1;
return 0;
}
static void stop()
{
if (thread_handle)
{
stream_data_.is_playing = false;
if (WaitForSingleObject(thread_handle, 2000) == WAIT_TIMEOUT)
{
FLAC_plugin__show_error("Error while stopping decoding thread.");
TerminateThread(thread_handle, 0);
}
CloseHandle(thread_handle);
thread_handle = NULL;
}
FLAC_plugin__decoder_finish(decoder_);
mod_.outMod->Close();
mod_.SAVSADeInit();
}
/*
* play control
*/
static void pause()
{
paused = 1;
mod_.outMod->Pause(1);
}
static void unpause()
{
paused = 0;
mod_.outMod->Pause(0);
}
static int ispaused()
{
return paused;
}
static int getlength()
{
return stream_data_.length_in_msec;
}
static int getoutputtime()
{
return mod_.outMod->GetOutputTime();
}
static void setoutputtime(int time_in_ms)
{
stream_data_.seek_to = time_in_ms;
}
static void setvolume(int volume)
{
mod_.outMod->SetVolume(volume);
}
static void setpan(int pan)
{
mod_.outMod->SetPan(pan);
}
static void eq_set(int on, char data[10], int preamp) {}
/*
* playing loop
*/
static void do_vis(char *data, int nch, int resolution, int position, unsigned samples)
{
static char vis_buffer[SAMPLES_PER_WRITE * FLAC_PLUGIN__MAX_SUPPORTED_CHANNELS];
char *ptr;
int size, count;
/*
* Winamp visuals may have problems accepting sample sizes larger than
* 16 bits, so we reduce the sample size here if necessary.
*/
switch(resolution) {
case 32:
case 24:
size = resolution / 8;
count = samples * nch;
data += size - 1;
ptr = vis_buffer;
while(count--) {
*ptr++ = data[0] ^ 0x80;
data += size;
}
data = vis_buffer;
resolution = 8;
/* fall through */
case 16:
case 8:
mod_.SAAddPCMData(data, nch, resolution, position);
mod_.VSAAddPCMData(data, nch, resolution, position);
}
}
static DWORD WINAPI DecodeThread(void *unused)
{
const unsigned channels = stream_data_.channels;
const unsigned bits_per_sample = stream_data_.bits_per_sample;
const unsigned target_bps = stream_data_.output_bits_per_sample;
const unsigned sample_rate = stream_data_.sample_rate;
const unsigned fact = channels * (target_bps/8);
while (stream_data_.is_playing)
{
/* seek needed */
if (stream_data_.seek_to != -1)
{
const int pos = FLAC_plugin__seek(decoder_, &stream_data_);
if (pos != -1) mod_.outMod->Flush(pos);
}
/* stream ended */
else if (stream_data_.eof)
{
if (!mod_.outMod->IsPlaying())
{
PostMessage(mod_.hMainWindow, WM_WA_MPEG_EOF, 0, 0);
return 0;
}
Sleep(10);
}
/* decode */
else
{
/* decode samples */
int bytes = FLAC_plugin__decode(decoder_, &stream_data_, sample_buffer_);
const int n = bytes / fact;
/* visualization */
do_vis(sample_buffer_, channels, target_bps, mod_.outMod->GetWrittenTime(), n);
/* dsp */
if (mod_.dsp_isactive())
bytes = mod_.dsp_dosamples((short*)sample_buffer_, n, target_bps, channels, sample_rate) * fact;
/* output */
while (mod_.outMod->CanWrite()<bytes && stream_data_.is_playing && stream_data_.seek_to==-1)
Sleep(20);
if (stream_data_.is_playing && stream_data_.seek_to==-1)
mod_.outMod->Write(sample_buffer_, bytes);
/* show bitrate */
if (flac_cfg.display.show_bps)
{
const int rate = FLAC_plugin__get_rate(mod_.outMod->GetWrittenTime(), mod_.outMod->GetOutputTime(), &stream_data_);
if (rate) mod_.SetInfo(rate/1000, stream_data_.sample_rate/1000, stream_data_.channels, 1);
}
}
}
return 0;
}
/*
* title formatting
*/
static T_CHAR *get_tag(const T_CHAR *tag, void *param)
{
FLAC__StreamMetadata *tags = (FLAC__StreamMetadata*)param;
char *tagname, *p;
T_CHAR *val;
if (!tag)
return 0;
/* Vorbis comment names must be ASCII, so convert 'tag' first */
tagname = safe_malloc_add_2op_(wcslen(tag), /*+*/1);
for(p=tagname;*tag;) {
if(*tag > 0x7d) {
free(tagname);
return 0;
}
else
*p++ = (char)(*tag++);
}
*p++ = '\0';
/* now get it */
val = FLAC_plugin__tags_get_tag_ucs2(tags, tagname);
free(tagname);
/* some "user friendly cheavats" */
if (!val)
{
if (!wcsicmp(tag, L"ARTIST"))
{
val = FLAC_plugin__tags_get_tag_ucs2(tags, "PERFORMER");
if (!val) val = FLAC_plugin__tags_get_tag_ucs2(tags, "COMPOSER");
}
else if (!wcsicmp(tag, L"YEAR") || !wcsicmp(tag, L"DATE"))
{
val = FLAC_plugin__tags_get_tag_ucs2(tags, "YEAR_RECORDED");
if (!val) val = FLAC_plugin__tags_get_tag_ucs2(tags, "YEAR_PERFORMED");
}
}
return val;
}
static void free_tag(T_CHAR *tag, void *param)
{
(void)param;
free(tag);
}
static void format_title(const char *filename, WCHAR *title, unsigned max_size)
{
FLAC__StreamMetadata *tags;
ReadTags(filename, &tags, /*forDisplay=*/true);
tagz_format(flac_cfg.title.tag_format_w, get_tag, free_tag, tags, title, max_size);
FLAC_plugin__tags_destroy(&tags);
}
static void getfileinfo(char *filename, char *title, int *length_in_msec)
{
FLAC__StreamMetadata streaminfo;
if (!filename || !*filename) {
filename = lastfn_;
if (length_in_msec) {
*length_in_msec = stream_data_.length_in_msec;
length_in_msec = 0; /* force skip in following code */
}
}
if (!FLAC__metadata_get_streaminfo(filename, &streaminfo)) {
if (length_in_msec)
*length_in_msec = -1;
return;
}
if (title) {
static WCHAR buffer[400];
format_title(filename, buffer, 400);
WideCharToMultiByte(CP_ACP, WC_COMPOSITECHECK, buffer, -1, title, 400, NULL, NULL);
}
if (length_in_msec) {
/* with VC++ you have to spoon feed it the casting from uint64->int64->double */
FLAC__uint64 l = (FLAC__uint64)((double)(FLAC__int64)streaminfo.data.stream_info.total_samples / (double)streaminfo.data.stream_info.sample_rate * 1000.0 + 0.5);
if (l > INT_MAX)
l = INT_MAX;
*length_in_msec = (int)l;
}
}
/*
* interface
*/
void FLAC_plugin__show_error(const char *message,...)
{
char foo[512];
va_list args;
va_start(args, message);
vsprintf(foo, message, args);
va_end(args);
MessageBox(mod_.hMainWindow, foo, "FLAC Plug-in Error", MB_ICONSTOP);
}
static void about(HWND hwndParent)
{
MessageBox(hwndParent, "Winamp2 FLAC Plugin v"PLUGIN_VERSION"\nby <NAME> and X-Fixer\n\nuses libFLAC "VERSION"\nSee http://flac.sourceforge.net/\n", "About FLAC Plugin", MB_ICONINFORMATION);
}
static void config(HWND hwndParent)
{
if (DoConfig(mod_.hDllInstance, hwndParent))
WriteConfig();
}
static int infobox(char *fn, HWND hwnd)
{
DoInfoBox(mod_.hDllInstance, hwnd, fn);
return 0;
}
/*
* exported stuff
*/
static In_Module mod_ =
{
IN_VER,
"FLAC Decoder v" PLUGIN_VERSION,
0, /* hMainWindow */
0, /* hDllInstance */
"FLAC\0FLAC Audio File (*.FLAC)\0",
1, /* is_seekable */
1, /* uses output */
config,
about,
init,
quit,
getfileinfo,
infobox,
isourfile,
play,
pause,
unpause,
ispaused,
stop,
getlength,
getoutputtime,
setoutputtime,
setvolume,
setpan,
0,0,0,0,0,0,0,0,0, /* vis stuff */
0,0, /* dsp */
eq_set,
NULL, /* setinfo */
0 /* out_mod */
};
__declspec(dllexport) In_Module *winampGetInModule2()
{
return &mod_;
}
BOOL WINAPI _DllMainCRTStartup(HANDLE hInst, ULONG ul_reason_for_call, LPVOID lpReserved)
{
return TRUE;
}
| 4,383 |
713 | package org.infinispan.persistence.remote.upgrade;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.infinispan.commons.dataconversion.MediaType.APPLICATION_JSON;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import org.infinispan.commons.configuration.io.ConfigurationWriter;
import org.infinispan.commons.dataconversion.internal.Json;
import org.infinispan.commons.io.StringBuilderWriter;
import org.infinispan.configuration.cache.Configuration;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.configuration.parsing.ConfigurationBuilderHolder;
import org.infinispan.configuration.parsing.ParserRegistry;
import org.infinispan.persistence.remote.configuration.RemoteStoreConfiguration;
import org.infinispan.persistence.remote.configuration.RemoteStoreConfigurationBuilder;
/**
* Utilities to parse and serialize {@link RemoteStoreConfiguration} to and from JSON.
*
* @since 13.0
*/
public final class SerializationUtils {
private static final ParserRegistry parserRegistry = new ParserRegistry();
private static final String PLACEHOLDER = "cache-holder";
private SerializationUtils() {
}
public static String toJson(RemoteStoreConfiguration configuration) {
ConfigurationBuilder builder = new ConfigurationBuilder();
RemoteStoreConfigurationBuilder storeBuilder = builder.persistence().addStore(RemoteStoreConfigurationBuilder.class);
storeBuilder.read(configuration);
StringBuilderWriter sw = new StringBuilderWriter();
try (ConfigurationWriter w = ConfigurationWriter.to(sw).withType(APPLICATION_JSON).build()) {
parserRegistry.serialize(w, null, builder.build());
}
return Json.read(sw.toString()).at("local-cache").at("persistence").toString();
}
public static RemoteStoreConfiguration fromJson(String json) throws IOException {
ConfigurationBuilderHolder holder = new ConfigurationBuilderHolder();
holder.newConfigurationBuilder(PLACEHOLDER);
try (ByteArrayInputStream bais = new ByteArrayInputStream(json.getBytes(UTF_8))) {
ConfigurationBuilderHolder parsedHolder = parserRegistry.parse(bais, holder, null, APPLICATION_JSON);
Configuration parsedConfig = parsedHolder.getNamedConfigurationBuilders().get(PLACEHOLDER).build();
return (RemoteStoreConfiguration) parsedConfig.persistence().stores().iterator().next();
}
}
}
| 728 |
324 | /*
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.css.compiler.ast;
import java.util.List;
/**
* Encapsulates a custom function callable from GSS stylesheet files.
*
*/
public interface GssFunction {
/**
* Returns the number of parsed arguments that this function takes,
* or {@code null} if the number of arguments may vary.
*/
Integer getNumExpectedArguments();
/**
* Processes a list of function call arguments and returns a list of
* CssNodes representing this call's output, which replace the input
* nodes in the AST. Errors will be reported to the ErrorManager.
*/
List<CssValueNode> getCallResultNodes(
List<CssValueNode> args, ErrorManager errorManager)
throws GssFunctionException;
/**
* Processes a list of strings as function arguments and returns a string
* result. Errors are reported by throwing {@link GssFunctionException}.
*/
String getCallResultString(List<String> args) throws GssFunctionException;
}
| 432 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.