max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
1,258
<reponame>CaesarHo/CustomShapeImageView package com.svgandroid; import android.graphics.Path; /** * Created by <NAME> on 19.01.2016. * <EMAIL> */ public class PathParser { /** * Parses a single SVG path and returns it as a <code>android.graphics.Path</code> object. * An example path is <code>M250,150L150,350L350,350Z</code>, which draws a triangle. * * @param pathString the SVG path, see the specification <a href="http://www.w3.org/TR/SVG/paths.html">here</a>. * @param p the Path object contains the result of path parsing</a>. */ public static void parse(String pathString, Path p) { int n = pathString.length(); ParserHelper ph = new ParserHelper(pathString, 0); ph.skipWhitespace(); float lastX = 0; float lastY = 0; float lastX1 = 0; float lastY1 = 0; float subPathStartX = 0; float subPathStartY = 0; char prevCmd = 0; while (ph.pos < n) { char cmd = pathString.charAt(ph.pos); switch (cmd) { case '-': case '+': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': if (prevCmd == 'm' || prevCmd == 'M') { cmd = (char) (((int) prevCmd) - 1); break; } else if (prevCmd == 'c' || prevCmd == 'C') { cmd = prevCmd; break; } else if (prevCmd == 'l' || prevCmd == 'L') { cmd = prevCmd; break; } default: { ph.advance(); prevCmd = cmd; } } boolean wasCurve = false; switch (cmd) { case 'M': case 'm': { float x = ph.nextFloat(); float y = ph.nextFloat(); if (cmd == 'm') { subPathStartX += x; subPathStartY += y; p.rMoveTo(x, y); lastX += x; lastY += y; } else { subPathStartX = x; subPathStartY = y; p.moveTo(x, y); lastX = x; lastY = y; } break; } case 'Z': case 'z': { p.close(); p.moveTo(subPathStartX, subPathStartY); lastX = subPathStartX; lastY = subPathStartY; lastX1 = subPathStartX; lastY1 = subPathStartY; wasCurve = true; break; } case 'L': case 'l': { float x = ph.nextFloat(); float y = ph.nextFloat(); if (cmd == 'l') { p.rLineTo(x, y); lastX += x; lastY += y; } else { p.lineTo(x, y); lastX = x; lastY = y; } break; } case 'H': case 'h': { float x = ph.nextFloat(); if (cmd == 'h') { p.rLineTo(x, 0); lastX += x; } else { p.lineTo(x, lastY); lastX = x; } break; } case 'V': case 'v': { float y = ph.nextFloat(); if (cmd == 'v') { p.rLineTo(0, y); lastY += y; } else { p.lineTo(lastX, y); lastY = y; } break; } case 'C': case 'c': { wasCurve = true; float x1 = ph.nextFloat(); float y1 = ph.nextFloat(); float x2 = ph.nextFloat(); float y2 = ph.nextFloat(); float x = ph.nextFloat(); float y = ph.nextFloat(); if (cmd == 'c') { x1 += lastX; x2 += lastX; x += lastX; y1 += lastY; y2 += lastY; y += lastY; } p.cubicTo(x1, y1, x2, y2, x, y); lastX1 = x2; lastY1 = y2; lastX = x; lastY = y; break; } case 'S': case 's': { wasCurve = true; float x2 = ph.nextFloat(); float y2 = ph.nextFloat(); float x = ph.nextFloat(); float y = ph.nextFloat(); if (cmd == 's') { x2 += lastX; x += lastX; y2 += lastY; y += lastY; } float x1 = 2 * lastX - lastX1; float y1 = 2 * lastY - lastY1; p.cubicTo(x1, y1, x2, y2, x, y); lastX1 = x2; lastY1 = y2; lastX = x; lastY = y; break; } case 'A': case 'a': { float rx = ph.nextFloat(); float ry = ph.nextFloat(); float theta = ph.nextFloat(); int largeArc = (int) ph.nextFloat(); int sweepArc = (int) ph.nextFloat(); float x = ph.nextFloat(); float y = ph.nextFloat(); drawArc(p, lastX, lastY, x, y, rx, ry, theta, largeArc, sweepArc); lastX = x; lastY = y; break; } } if (!wasCurve) { lastX1 = lastX; lastY1 = lastY; } ph.skipWhitespace(); } } private static void drawArc(Path p, float lastX, float lastY, float x, float y, float rx, float ry, float theta, int largeArc, int sweepArc) { // todo - not implemented yet, may be very hard to do using Android drawing facilities. } }
4,769
365
<gh_stars>100-1000 # This example assumes we have a mesh object selected import bpy import bmesh # Get the active mesh me = bpy.context.object.data # Get a BMesh representation bm = bmesh.new() # create an empty BMesh bm.from_mesh(me) # fill it in from a Mesh # Modify the BMesh, can do anything here... for v in bm.verts: v.co.x += 1.0 # Finish up, write the bmesh back to the mesh bm.to_mesh(me) bm.free() # free and prevent further access
165
793
// RUN: %clang_cc1 -std=c99 -Wmissing-field-initializers -Wmissing-braces -verify %s // Tests that using {0} in struct initialization or assignment is supported struct foo { int x; int y; }; struct bar { struct foo a; struct foo b; }; struct A { int a; }; struct B { struct A a; }; struct C { struct B b; }; struct D { struct C c; int n; }; int main(void) { struct foo f = { 0 }; // no-warning struct foo g = { 9 }; // expected-warning {{missing field 'y' initializer}} struct foo h = { 9, 9 }; // no-warning struct bar i = { 0 }; // no-warning struct bar j = { 0, 0 }; // expected-warning {{suggest braces around initialization of subobject}} expected-warning {{missing field 'b' initializer}} struct bar k = { { 9, 9 }, { 9, 9 } }; // no-warning struct bar l = { { 9, 9 }, { 0 } }; // no-warning struct bar m = { { 0 }, { 0 } }; // no-warning struct bar n = { { 0 }, { 9, 9 } }; // no-warning struct bar o = { { 9 }, { 9, 9 } }; // expected-warning {{missing field 'y' initializer}} struct C p = { 0 }; // no-warning struct C q = { 9 }; // warning suppressed for struct with single element struct D r = { 9 }; // expected-warning {{suggest braces around initialization of subobject}} expected-warning {{missing field 'n' initializer}} f = (struct foo ) { 0 }; // no-warning g = (struct foo ) { 9 }; // expected-warning {{missing field 'y' initializer}} h = (struct foo ) { 9, 9 }; // no-warning i = (struct bar) { 0 }; // no-warning j = (struct bar) { 0, 0 }; // expected-warning {{suggest braces around initialization of subobject}} expected-warning {{missing field 'b' initializer}} k = (struct bar) { { 9, 9 }, { 9, 9 } }; // no-warning l = (struct bar) { { 9, 9 }, { 0 } }; // no-warning m = (struct bar) { { 0 }, { 0 } }; // no-warning n = (struct bar) { { 0 }, { 9, 9 } }; // no-warning o = (struct bar) { { 9 }, { 9, 9 } }; // expected-warning {{missing field 'y' initializer}} p = (struct C) { 0 }; // no-warning q = (struct C) { 9 }; // warning suppressed for struct with single element r = (struct D) { 9 }; // expected-warning {{suggest braces around initialization of subobject}} expected-warning {{missing field 'n' initializer}} return 0; }
698
778
<reponame>clazaro/Kratos<filename>kratos/modeler/serial_model_part_combinator_modeler.cpp<gh_stars>100-1000 // | / | // ' / __| _` | __| _ \ __| // . \ | ( | | ( |\__ ` // _|\_\_| \__,_|\__|\___/ ____/ // Multi-Physics // // License: BSD License // Kratos default license: kratos/license.txt // // Main authors: <NAME> // // System includes // External includes // Project includes #include "includes/define.h" #include "modeler/serial_model_part_combinator_modeler.h" #include "utilities/single_import_model_part.h" #include "utilities/model_part_combination_utilities.h" namespace Kratos { Modeler::Pointer SerialModelPartCombinatorModeler::Create( Model& rModel, const Parameters ModelParameters ) const { return Kratos::make_shared<SerialModelPartCombinatorModeler>(rModel, ModelParameters); } /***********************************************************************************/ /***********************************************************************************/ void SerialModelPartCombinatorModeler::SetupModelPart() { // Import parameters const auto& r_model_import_settings = mParameters["model_import_settings"]; const auto& r_input_type = r_model_import_settings["input_type"]; // Multiple import auto combine_param = Parameters(R"({ "model_parts_list" : [] })"); combine_param.AddValue("combined_model_part_name", mParameters["model_part_name"]); const auto filenames_list = r_model_import_settings["input_filename"].GetStringArray(); auto copy_model_part_import_settings = Parameters(r_model_import_settings); copy_model_part_import_settings.RemoveValue("input_filename"); copy_model_part_import_settings.AddString("input_filename", ""); for (std::size_t i = 0; i < filenames_list.size(); ++i) { const std::string aux_name = "AUX_MODELPART" + std::to_string(i); combine_param["model_parts_list"].Append(aux_name); auto& r_aux_model_part = mpModel->CreateModelPart(aux_name); copy_model_part_import_settings["input_filename"].SetString(filenames_list[i]); const std::string& input_type = r_input_type.IsArray() ? r_input_type[i].GetString() : r_input_type.GetString(); SingleImportModelPart::Import(r_aux_model_part, r_model_import_settings, input_type); } ModelPartCombinationUtilities(*mpModel).CombineModelParts(combine_param); } } // namespace Kratos
934
1,451
<filename>pandora-core/src/main/java/tech/linjiang/pandora/ui/fragment/FileFragment.java package tech.linjiang.pandora.ui.fragment; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import androidx.annotation.Nullable; import androidx.appcompat.widget.Toolbar; import android.view.MenuItem; import android.view.View; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Locale; import tech.linjiang.pandora.core.R; import tech.linjiang.pandora.sandbox.Sandbox; import tech.linjiang.pandora.ui.GeneralDialog; import tech.linjiang.pandora.ui.item.FileItem; import tech.linjiang.pandora.ui.item.TitleItem; import tech.linjiang.pandora.ui.recyclerview.BaseItem; import tech.linjiang.pandora.ui.recyclerview.UniversalAdapter; import tech.linjiang.pandora.util.FileUtil; import tech.linjiang.pandora.util.Utils; /** * Created by linjiang on 04/06/2018. */ public class FileFragment extends BaseListFragment { private File file; @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); file = (File) getArguments().getSerializable(PARAM1); getToolbar().setTitle(file.getName()); getToolbar().getMenu().add(0,0,0,R.string.pd_name_delete_key) .setIcon(R.drawable.pd_delete) .setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS); getToolbar().setOnMenuItemClickListener(new Toolbar.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { if (item.getOrder() == 0) { GeneralDialog.build(CODE2) .title(R.string.pd_help_title) .message(R.string.pd_make_sure, true) .positiveButton(R.string.pd_ok) .negativeButton(R.string.pd_cancel) .show(FileFragment.this); } return true; } }); refresh(); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == Activity.RESULT_OK) { if (requestCode == CODE1) { refresh(); getTargetFragment().onActivityResult(getTargetRequestCode(), Activity.RESULT_OK, null); } else if (requestCode == CODE2) { FileUtil.deleteDirectory(file); getTargetFragment().onActivityResult(getTargetRequestCode(), Activity.RESULT_OK, null); onBackPressed(); } } } private void refresh() { List<File> files = Sandbox.getFiles(file); if (Utils.isNotEmpty(files)) { List<BaseItem> data = new ArrayList<>(); data.add(new TitleItem(String.format(Locale.getDefault(), "%d FILES", files.size()))); for (int i = 0; i < files.size(); i++) { data.add(new FileItem(files.get(i))); } getAdapter().setItems(data); getAdapter().setListener(new UniversalAdapter.OnItemClickListener() { @Override public void onItemClick(int position, BaseItem item) { Bundle bundle = new Bundle(); if (item instanceof FileItem) { bundle.putSerializable(PARAM1, (File) item.data); if (((File) item.data).isDirectory()) { launch(FileFragment.class, bundle, CODE1); } else { launch(FileAttrFragment.class, bundle, CODE1); } } } }); } else { showError(null); } } }
1,880
930
<filename>mate-core/mate-starter-gray/src/main/java/vip/mate/core/gray/config/GrayConfig.java package vip.mate.core.gray.config; import feign.RequestInterceptor; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.cloud.loadbalancer.annotation.LoadBalancerClients; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import vip.mate.core.gray.fegin.GrayFeignRequestInterceptor; /** * 灰度配置类 * * @author madi * @date 2021-03-02 18:30 */ @Configuration @ConditionalOnProperty(value = "gray.rule.enabled", havingValue = "true") @LoadBalancerClients(defaultConfiguration = GrayLoadbalancerConfig.class) public class GrayConfig { @Bean public RequestInterceptor grayFeignRequestInterceptor() { return new GrayFeignRequestInterceptor(); } }
278
7,737
extern zend_class_entry *phalcon_mvc_model_relation_ce; ZEPHIR_INIT_CLASS(Phalcon_Mvc_Model_Relation); PHP_METHOD(Phalcon_Mvc_Model_Relation, __construct); PHP_METHOD(Phalcon_Mvc_Model_Relation, getFields); PHP_METHOD(Phalcon_Mvc_Model_Relation, getForeignKey); PHP_METHOD(Phalcon_Mvc_Model_Relation, getIntermediateFields); PHP_METHOD(Phalcon_Mvc_Model_Relation, getIntermediateModel); PHP_METHOD(Phalcon_Mvc_Model_Relation, getIntermediateReferencedFields); PHP_METHOD(Phalcon_Mvc_Model_Relation, getOption); PHP_METHOD(Phalcon_Mvc_Model_Relation, getOptions); PHP_METHOD(Phalcon_Mvc_Model_Relation, getParams); PHP_METHOD(Phalcon_Mvc_Model_Relation, getType); PHP_METHOD(Phalcon_Mvc_Model_Relation, getReferencedFields); PHP_METHOD(Phalcon_Mvc_Model_Relation, getReferencedModel); PHP_METHOD(Phalcon_Mvc_Model_Relation, isForeignKey); PHP_METHOD(Phalcon_Mvc_Model_Relation, isThrough); PHP_METHOD(Phalcon_Mvc_Model_Relation, isReusable); PHP_METHOD(Phalcon_Mvc_Model_Relation, setIntermediateRelation); zend_object *zephir_init_properties_Phalcon_Mvc_Model_Relation(zend_class_entry *class_type); ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation___construct, 0, 0, 4) ZEND_ARG_TYPE_INFO(0, type, IS_LONG, 0) ZEND_ARG_TYPE_INFO(0, referencedModel, IS_STRING, 0) ZEND_ARG_INFO(0, fields) ZEND_ARG_INFO(0, referencedFields) #if PHP_VERSION_ID >= 80000 ZEND_ARG_TYPE_INFO_WITH_DEFAULT_VALUE(0, options, IS_ARRAY, 0, "[]") #else ZEND_ARG_ARRAY_INFO(0, options, 0) #endif ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_getfields, 0, 0, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_getforeignkey, 0, 0, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_getintermediatefields, 0, 0, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_mvc_model_relation_getintermediatemodel, 0, 0, IS_STRING, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_getintermediatereferencedfields, 0, 0, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_getoption, 0, 0, 1) ZEND_ARG_TYPE_INFO(0, name, IS_STRING, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_mvc_model_relation_getoptions, 0, 0, IS_ARRAY, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_getparams, 0, 0, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_mvc_model_relation_gettype, 0, 0, IS_LONG, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_getreferencedfields, 0, 0, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_mvc_model_relation_getreferencedmodel, 0, 0, IS_STRING, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_mvc_model_relation_isforeignkey, 0, 0, _IS_BOOL, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_mvc_model_relation_isthrough, 0, 0, _IS_BOOL, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_WITH_RETURN_TYPE_INFO_EX(arginfo_phalcon_mvc_model_relation_isreusable, 0, 0, _IS_BOOL, 0) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_setintermediaterelation, 0, 0, 3) ZEND_ARG_INFO(0, intermediateFields) ZEND_ARG_TYPE_INFO(0, intermediateModel, IS_STRING, 0) ZEND_ARG_INFO(0, intermediateReferencedFields) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_phalcon_mvc_model_relation_zephir_init_properties_phalcon_mvc_model_relation, 0, 0, 0) ZEND_END_ARG_INFO() ZEPHIR_INIT_FUNCS(phalcon_mvc_model_relation_method_entry) { PHP_ME(Phalcon_Mvc_Model_Relation, __construct, arginfo_phalcon_mvc_model_relation___construct, ZEND_ACC_PUBLIC|ZEND_ACC_CTOR) #if PHP_VERSION_ID >= 80000 PHP_ME(Phalcon_Mvc_Model_Relation, getFields, arginfo_phalcon_mvc_model_relation_getfields, ZEND_ACC_PUBLIC) #else PHP_ME(Phalcon_Mvc_Model_Relation, getFields, NULL, ZEND_ACC_PUBLIC) #endif #if PHP_VERSION_ID >= 80000 PHP_ME(Phalcon_Mvc_Model_Relation, getForeignKey, arginfo_phalcon_mvc_model_relation_getforeignkey, ZEND_ACC_PUBLIC) #else PHP_ME(Phalcon_Mvc_Model_Relation, getForeignKey, NULL, ZEND_ACC_PUBLIC) #endif #if PHP_VERSION_ID >= 80000 PHP_ME(Phalcon_Mvc_Model_Relation, getIntermediateFields, arginfo_phalcon_mvc_model_relation_getintermediatefields, ZEND_ACC_PUBLIC) #else PHP_ME(Phalcon_Mvc_Model_Relation, getIntermediateFields, NULL, ZEND_ACC_PUBLIC) #endif PHP_ME(Phalcon_Mvc_Model_Relation, getIntermediateModel, arginfo_phalcon_mvc_model_relation_getintermediatemodel, ZEND_ACC_PUBLIC) #if PHP_VERSION_ID >= 80000 PHP_ME(Phalcon_Mvc_Model_Relation, getIntermediateReferencedFields, arginfo_phalcon_mvc_model_relation_getintermediatereferencedfields, ZEND_ACC_PUBLIC) #else PHP_ME(Phalcon_Mvc_Model_Relation, getIntermediateReferencedFields, NULL, ZEND_ACC_PUBLIC) #endif PHP_ME(Phalcon_Mvc_Model_Relation, getOption, arginfo_phalcon_mvc_model_relation_getoption, ZEND_ACC_PUBLIC) PHP_ME(Phalcon_Mvc_Model_Relation, getOptions, arginfo_phalcon_mvc_model_relation_getoptions, ZEND_ACC_PUBLIC) #if PHP_VERSION_ID >= 80000 PHP_ME(Phalcon_Mvc_Model_Relation, getParams, arginfo_phalcon_mvc_model_relation_getparams, ZEND_ACC_PUBLIC) #else PHP_ME(Phalcon_Mvc_Model_Relation, getParams, NULL, ZEND_ACC_PUBLIC) #endif PHP_ME(Phalcon_Mvc_Model_Relation, getType, arginfo_phalcon_mvc_model_relation_gettype, ZEND_ACC_PUBLIC) #if PHP_VERSION_ID >= 80000 PHP_ME(Phalcon_Mvc_Model_Relation, getReferencedFields, arginfo_phalcon_mvc_model_relation_getreferencedfields, ZEND_ACC_PUBLIC) #else PHP_ME(Phalcon_Mvc_Model_Relation, getReferencedFields, NULL, ZEND_ACC_PUBLIC) #endif PHP_ME(Phalcon_Mvc_Model_Relation, getReferencedModel, arginfo_phalcon_mvc_model_relation_getreferencedmodel, ZEND_ACC_PUBLIC) PHP_ME(Phalcon_Mvc_Model_Relation, isForeignKey, arginfo_phalcon_mvc_model_relation_isforeignkey, ZEND_ACC_PUBLIC) PHP_ME(Phalcon_Mvc_Model_Relation, isThrough, arginfo_phalcon_mvc_model_relation_isthrough, ZEND_ACC_PUBLIC) PHP_ME(Phalcon_Mvc_Model_Relation, isReusable, arginfo_phalcon_mvc_model_relation_isreusable, ZEND_ACC_PUBLIC) PHP_ME(Phalcon_Mvc_Model_Relation, setIntermediateRelation, arginfo_phalcon_mvc_model_relation_setintermediaterelation, ZEND_ACC_PUBLIC) PHP_FE_END };
2,779
1,253
<filename>dp/Tree Distances Cses/Tree_Distances_II.cpp /* In this problem we have to calculate for a given tree , the sum of distances of every node to all other nodes in the tree */ #include<bits/stdc++.h> using namespace std; #define int long long int #define SZ 200005 int n, m, k, x; vector<int> adj[SZ]; int S[SZ]; int in[SZ]; int out[SZ]; // Calculating Subtree DP void dfs_in(int u,int p) { S[u] = 1; for (int v: adj[u]) { if (v != p) { dfs_in(v, u); S[u] += S[v]; in[u] += in[v] + S[v]; } } } // Applying Rerooting Technique void dfs_out(int u, int p) { int store = 0; for(int v: adj[u]) { if(v != p) store += in[v] + S[v]*2; } for (int v : adj[u]) { if (v != p) { out[v] = (out[u] + 1*(n-S[u]+1)) + (store - (in[v] + S[v] * 2)); dfs_out(v, u); } } } signed main() { int u,v; // Input number of nodes cin >> n; for(int i = 0; i < n-1; i++) { // Input Bidirectional edge u->v cin >> u >> v; adj[u].push_back(v); adj[v].push_back(u); } dfs_in(1, 0); dfs_out(1, 0); for(int i = 1; i <= n ; i++) cout << in[i] + out[i] << " "; }
573
1,091
<reponame>Shinkirou/onos<gh_stars>1000+ /* * Copyright 2016-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.service; import java.util.Collection; import java.util.Collections; import java.util.concurrent.CompletableFuture; import java.util.function.Consumer; import org.onosproject.core.ApplicationId; /** * Interface for all distributed primitives. */ public interface DistributedPrimitive { /** * Type of distributed primitive. */ enum Type { /** * Map with strong consistency semantics. */ CONSISTENT_MAP, /** * Map with eventual consistency semantics. */ EVENTUALLY_CONSISTENT_MAP, /** * Consistent Multimap. */ CONSISTENT_MULTIMAP, /** * Distributed set. */ SET, /** * Tree map. */ CONSISTENT_TREEMAP, /** * Atomic counter. */ COUNTER, /** * Numeric ID generator. */ ID_GENERATOR, /** * Atomic counter map. */ COUNTER_MAP, /** * Atomic value. */ VALUE, /** * Distributed work queue. */ WORK_QUEUE, /** * Document tree. */ DOCUMENT_TREE, /** * Distributed topic. */ TOPIC, /** * Leader elector. */ LEADER_ELECTOR, /** * Transaction Context. */ TRANSACTION_CONTEXT, /** * Distributed lock. */ LOCK, } /** * Status of distributed primitive. */ enum Status { /** * Signifies a state wherein the primitive is operating correctly and is capable of meeting the advertised * consistency and reliability guarantees. */ ACTIVE, /** * Signifies a state wherein the primitive is temporarily incapable of providing the advertised * consistency properties. */ SUSPENDED, /** * Signifies a state wherein the primitive has been shutdown and therefore cannot perform its functions. */ INACTIVE } /** * Default timeout for primitive operations. */ long DEFAULT_OPERATION_TIMEOUT_MILLIS = 15000L; /** * Returns the name of this primitive. * @return name */ String name(); /** * Returns the type of primitive. * @return primitive type */ Type primitiveType(); /** * Returns the application owning this primitive. * @return application id */ default ApplicationId applicationId() { return null; } /** * Purges state associated with this primitive. * <p> * Implementations can override and provide appropriate clean up logic for purging * any state state associated with the primitive. Whether modifications made within the * destroy method have local or global visibility is left unspecified. * @return {@code CompletableFuture} that is completed when the operation completes */ default CompletableFuture<Void> destroy() { return CompletableFuture.completedFuture(null); } /** * Registers a listener to be called when the primitive's status changes. * @param listener The listener to be called when the status changes. */ default void addStatusChangeListener(Consumer<Status> listener) {} /** * Unregisters a previously registered listener to be called when the primitive's status changes. * @param listener The listener to unregister */ default void removeStatusChangeListener(Consumer<Status> listener) {} /** * Returns the collection of status change listeners previously registered. * @return collection of status change listeners */ default Collection<Consumer<Status>> statusChangeListeners() { return Collections.emptyList(); } }
1,788
879
package org.zstack.storage.ceph.primary; import org.zstack.header.query.ExpandedQueries; import org.zstack.header.query.ExpandedQuery; import org.zstack.header.search.Inventory; import org.zstack.header.search.Parent; import org.zstack.header.storage.primary.PrimaryStorageInventory; import org.zstack.header.tag.SystemTagInventory; import org.zstack.storage.ceph.CephConstants; import java.util.ArrayList; import java.util.Collection; import java.util.List; /** * Created by frank on 7/28/2015. */ @Inventory(mappingVOClass = CephPrimaryStorageVO.class, collectionValueOfMethod = "valueOf1", parent = {@Parent(inventoryClass = PrimaryStorageInventory.class, type = CephConstants.CEPH_PRIMARY_STORAGE_TYPE)}) @ExpandedQueries({ @ExpandedQuery(expandedField = "mons", inventoryClass = CephPrimaryStorageMonInventory.class, foreignKey = "uuid", expandedInventoryKey = "primaryStorageUuid"), @ExpandedQuery(expandedField = "pools", inventoryClass = CephPrimaryStoragePoolInventory.class, foreignKey = "uuid", expandedInventoryKey = "primaryStorageUuid") }) public class CephPrimaryStorageInventory extends PrimaryStorageInventory { private List<CephPrimaryStorageMonInventory> mons; private List<CephPrimaryStoragePoolInventory> pools; private String fsid; public List<CephPrimaryStorageMonInventory> getMons() { return mons; } public void setMons(List<CephPrimaryStorageMonInventory> mons) { this.mons = mons; } public List<CephPrimaryStoragePoolInventory> getPools() { return pools; } public void setPools(List<CephPrimaryStoragePoolInventory> pools) { this.pools = pools; } public CephPrimaryStorageInventory() { } public CephPrimaryStorageInventory(CephPrimaryStorageVO vo) { super(vo); setMons(CephPrimaryStorageMonInventory.valueOf(vo.getMons())); setPools(CephPrimaryStoragePoolInventory.valueOf(vo.getPools())); setFsid(vo.getFsid()); } public static CephPrimaryStorageInventory valueOf(CephPrimaryStorageVO vo) { return new CephPrimaryStorageInventory(vo); } public static List<CephPrimaryStorageInventory> valueOf1(Collection<CephPrimaryStorageVO> vos) { List<CephPrimaryStorageInventory> invs = new ArrayList<CephPrimaryStorageInventory>(); for (CephPrimaryStorageVO vo : vos) { invs.add(valueOf(vo)); } return invs; } public String getFsid() { return fsid; } public void setFsid(String fsid) { this.fsid = fsid; } }
981
2,151
{ // chrome-extension://fbjakikfhfdajcamjleinfciajelkpek/ "key": "<KEY>", "name": "Chrome Webstore Gallery Widget app", "description": "App for showing Chrome Webstore Gallery widget and handling extension installs from the widget", "version": "0.2", "manifest_version": 2, "display_in_launcher": false, "web_accessible_resources": [ "cws_widget/app_installer.js", "cws_widget/cws_webview_client.js", "cws_widget/cws_widget_container.css", "cws_widget/cws_widget_container.js", "cws_widget/cws_widget_container_error_dialog.js" ], "icons": { "16": "app/icons/icon_16.png", "32": "app/icons/icon_32.png", "128": "app/icons/icon_128.png" }, "app": { "background": { "scripts": ["app/background.js"] }, "content_security_policy": "style-src 'self' blob: filesystem: chrome://resources; img-src 'self' blob: filesystem: chrome://theme chrome://resources;" }, "permissions": [ "chrome://resources/", "chrome://theme/", "fileManagerPrivate", "metricsPrivate", "webstoreWidgetPrivate", "webview" ] }
431
381
<filename>jgiven-tests/src/main/java/com/tngtech/jgiven/tags/IssueDescriptionGenerator.java package com.tngtech.jgiven.tags; import java.lang.annotation.Annotation; import com.tngtech.jgiven.annotation.TagDescriptionGenerator; import com.tngtech.jgiven.config.TagConfiguration; public class IssueDescriptionGenerator implements TagDescriptionGenerator { private static final String ISSUE_URL = "https://github.com/TNG/JGiven/issues/"; @Override public String generateDescription( TagConfiguration tagConfiguration, Annotation annotation, Object value ) { String valueAsString = String.valueOf( value ); return String.format( "Scenarios of <a href='%s%s'>Issue %s</a>", ISSUE_URL, valueAsString.substring( 1 ), valueAsString ); } }
244
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.csl.api; import java.util.Set; import javax.swing.Icon; import org.netbeans.api.annotations.common.CheckForNull; import org.netbeans.api.annotations.common.NonNull; import org.netbeans.api.annotations.common.NullAllowed; import org.netbeans.api.project.Project; import org.netbeans.modules.parsing.spi.indexing.support.QuerySupport; import org.openide.filesystems.FileObject; /** * Helper to locate types for Goto Type, Go To Symbol, etc. * * @author <NAME> */ public interface IndexSearcher { @NonNull Set<? extends Descriptor> getTypes(@NullAllowed Project project, @NonNull String textForQuery, @NonNull QuerySupport.Kind searchType, @NonNull Helper helper); @NonNull Set<? extends Descriptor> getSymbols(@NullAllowed Project project, @NonNull String textForQuery, @NonNull QuerySupport.Kind searchType, @NonNull Helper helper); public abstract class Descriptor { @NonNull public abstract ElementHandle getElement(); /** * Return the simple name of the type (not including qualifiers). The entries * will typically be sorted by this key. * * @return The name of this type, e.g. for java.util.List it would be "List" */ public abstract String getSimpleName(); /** * <p>Return the "outer" name of the type, if any. For Java for example, this would be * the outer class if this type is an inner class.</p> * <p>Do not confuse with {@link #getContextName}!</p> * * @return The name of the outer class of this type, if any, otherwise return null */ public abstract String getOuterName(); /** * Return the name of this type, along with the outer name. This might * for example be "Entry in Map" for java.util.Map.Entry * * @return The outer and inner name of this type, e.g. for java.util.Map.Entry it would be "Entry in Map" */ public abstract String getTypeName(); /** * Provide additional context for the type name. This would typically be * the fully qualified name, minus the name part. Return null if there is * no applicable context. For example, "java.util.List" would return "java.util" * here. * * @return A description of the context of the type, such as the fully qualified name * minus the name part */ public abstract String getContextName(); /** * Return an icon that should be shown for this type descriptor. The icon * should give a visual indication of the type of match, e.g. class versus * module. A default icon will be supplied if this method returns null. * * @return An Icon to be shown on the left hand side with the type entry */ public abstract Icon getIcon(); /** * Return the display name of the project containing this type (if any). * * @return The display name of the project containing the type declaration */ public abstract String getProjectName(); /** * Return an icon that is applicable for the project defining the type. * Generally, this should be the same as the project icon. This method will only * be calld if {@link #getProjectName} returned a non-null value. * * @return A project icon corresponding to the project defining this type */ public abstract Icon getProjectIcon(); /** * Return a FileObject for this type. * This will only be called when the dialog is opening the type or when * the user selects the file, so it does not have to be as fast as the other * descriptor attributes. * * @return The file object where the type is defined */ public abstract FileObject getFileObject(); /** * Return the document offset corresponding to the type. * This will only be called when the dialog is opening the type, so * does not have to be as fast as the other descriptor attributes. * * @todo This method is intended to replace the open() call below. * * @return The document offset of the type declaration in the declaration file */ public abstract int getOffset(); /** * Open the type declaration in the editor. * @todo Should we nuke this method and only have type declarations return * their offsets? I looked at the Java implementation and it's leveraging * some utility methods to open the type declaration; I have similar methods * in Ruby. It might be more convenient */ public abstract void open(); @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof Descriptor)) { return false; } final Descriptor other = (Descriptor) obj; final FileObject thisFo = getFileObject(); final FileObject otherFo = other.getFileObject(); return thisFo == null ? otherFo == null : thisFo.equals(otherFo); } @Override public int hashCode() { final FileObject fo = getFileObject(); return fo == null ? 0 : fo.hashCode(); } } public interface Helper { @CheckForNull Icon getIcon(@NonNull ElementHandle element); void open(@NonNull FileObject fileObject, @NonNull ElementHandle element); } }
2,427
2,151
<reponame>rio-31/android_frameworks_base-1 /* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.systemui.recents.events.activity; import com.android.systemui.recents.events.EventBus; /** * This is sent when the Recents activity configuration has changed. */ public class ConfigurationChangedEvent extends EventBus.AnimatedEvent { public final boolean fromMultiWindow; public final boolean fromDeviceOrientationChange; public final boolean fromDisplayDensityChange; public final boolean hasStackTasks; public ConfigurationChangedEvent(boolean fromMultiWindow, boolean fromDeviceOrientationChange, boolean fromDisplayDensityChange, boolean hasStackTasks) { this.fromMultiWindow = fromMultiWindow; this.fromDeviceOrientationChange = fromDeviceOrientationChange; this.fromDisplayDensityChange = fromDisplayDensityChange; this.hasStackTasks = hasStackTasks; } }
435
6,989
<gh_stars>1000+ #include "executor.h"
17
8,747
<gh_stars>1000+ // Copyright 2015-2021 Espressif Systems (Shanghai) PTE LTD // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma once /** * This file will be included in `tasks.c` file, thus, it must NOT be included * by any (other) file. * The functions below only consist in getters for the static variables in * `tasks.c` file. * The only source files that should call these functions are the ones in * `/additions` directory. */ #if ( configENABLE_TASK_SNAPSHOT == 1 ) UBaseType_t pxTCBGetSize ( void ) { return sizeof(TCB_t); } ListItem_t* pxTCBGetStateListItem ( void *pxTCB ) { return &(((TCB_t*)pxTCB)->xStateListItem); } StackType_t* pxTCBGetStartOfStack ( void *pxTCB ) { return (StackType_t*) ((TCB_t*)pxTCB)->pxStack; } StackType_t* pxTCBGetTopOfStack ( void *pxTCB ) { return (StackType_t*) ((TCB_t*)pxTCB)->pxTopOfStack; } StackType_t* pxTCBGetEndOfStack ( void *pxTCB ) { return (StackType_t*) ((TCB_t*)pxTCB)->pxEndOfStack; } List_t* pxListGetReadyTask ( UBaseType_t idx ) { return &( pxReadyTasksLists[idx] ); } List_t* pxListGetReadyPendingTask ( UBaseType_t idx ) { return &( xPendingReadyList[idx] ); } List_t* pxGetDelayedTaskList ( void ) { return pxDelayedTaskList; } List_t* pxGetOverflowDelayedTaskList ( void ) { return pxOverflowDelayedTaskList; } List_t* pxGetTasksWaitingTermination ( void ) { return &xTasksWaitingTermination; } List_t* pxGetSuspendedTaskList ( void ) { return &xSuspendedTaskList; } #endif
754
318
<filename>src/vw/Core/tests/TestTypeDeduction.cxx // __BEGIN_LICENSE__ // Copyright (c) 2006-2013, United States Government as represented by the // Administrator of the National Aeronautics and Space Administration. All // rights reserved. // // The NASA Vision Workbench is licensed under the Apache License, // Version 2.0 (the "License"); you may not use this file except in // compliance with the License. You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // __END_LICENSE__ #include <test/Helpers.h> #include <vw/Core/TypeDeduction.h> using namespace vw; using namespace std; // These tests generated with this python script (requires >= python 2.7): #if 0 import itertools import sys assert len(sys.argv) > 1, 'Usage: %s <yes|no>\nArgument determines whether the standard types are used' assert sys.argv[1] in ('yes', 'no'), 'Usage: %s <yes|no>\nArgument determines whether the standard types are used' USE_STANDARD = True if sys.argv[1] == 'yes' else False # In increasing order of preference if USE_STANDARD: typelist = ['char', 'signed char', 'unsigned char', 'short', 'unsigned short', 'int', 'unsigned int', 'long', 'unsigned long', 'long long', 'unsigned long long', 'float', 'double', 'long double'] else: typelist = ['%sint%s' % (u,s) for s in 8, 16, 32, 64 for u in '', 'u'] + ['float32', 'float64', 'user_t'] def prefer(a,b): return a if typelist.index(a) > typelist.index(b) else b def note(a,b): if USE_STANDARD: p = prefer(a,b) return ' /*int*/' if p != 'int' and prefer('int', p) == 'int' else '' else: return '' pad = 1 + max(map(len, typelist)) for l,r in itertools.combinations_with_replacement(typelist, 2): print 'TRIAL(%s,%s,%s%s);' % tuple([i.rjust(pad) for i in (l,r,prefer(l,r))] + [note(l,r)]) #endif #define DEDUCE(a,b,c) (boost::is_same<TypeDeductionHelper<a,b>::type, c>::value) #define TRIAL_(a,b,c) do {\ EXPECT_TRUE(DEDUCE(a,b,c)) \ << "[" << #a << "] and [" << #b << "] produced [" << gi::GetTypeName<TypeDeductionHelper<a,b>::type>() \ << "] expected [" << #c << "]"; \ } while (0) #define TRIAL(a,b,c) do {\ TRIAL_(a,b,c); \ TRIAL_(b,a,c); \ } while (0) struct user_t {}; struct user2_t {}; // This should be a compile error, so it's commented out (duplicate index) //namespace vw { namespace core { namespace detail { // _VW_INTERNAL_TYPE_DEDUCTION(user_t, TypeDeductionIndex<int8>::value); //}}} TEST(TypeDeduction, VW) { TRIAL( int8, int8, int8); TRIAL( int8, uint8, uint8); TRIAL( int8, int16, int16); TRIAL( int8, uint16, uint16); TRIAL( int8, int32, int32); TRIAL( int8, uint32, uint32); TRIAL( int8, int64, int64); TRIAL( int8, uint64, uint64); TRIAL( int8, float32, float32); TRIAL( int8, float64, float64); TRIAL( int8, user_t, user_t); TRIAL( uint8, uint8, uint8); TRIAL( uint8, int16, int16); TRIAL( uint8, uint16, uint16); TRIAL( uint8, int32, int32); TRIAL( uint8, uint32, uint32); TRIAL( uint8, int64, int64); TRIAL( uint8, uint64, uint64); TRIAL( uint8, float32, float32); TRIAL( uint8, float64, float64); TRIAL( uint8, user_t, user_t); TRIAL( int16, int16, int16); TRIAL( int16, uint16, uint16); TRIAL( int16, int32, int32); TRIAL( int16, uint32, uint32); TRIAL( int16, int64, int64); TRIAL( int16, uint64, uint64); TRIAL( int16, float32, float32); TRIAL( int16, float64, float64); TRIAL( int16, user_t, user_t); TRIAL( uint16, uint16, uint16); TRIAL( uint16, int32, int32); TRIAL( uint16, uint32, uint32); TRIAL( uint16, int64, int64); TRIAL( uint16, uint64, uint64); TRIAL( uint16, float32, float32); TRIAL( uint16, float64, float64); TRIAL( uint16, user_t, user_t); TRIAL( int32, int32, int32); TRIAL( int32, uint32, uint32); TRIAL( int32, int64, int64); TRIAL( int32, uint64, uint64); TRIAL( int32, float32, float32); TRIAL( int32, float64, float64); TRIAL( int32, user_t, user_t); TRIAL( uint32, uint32, uint32); TRIAL( uint32, int64, int64); TRIAL( uint32, uint64, uint64); TRIAL( uint32, float32, float32); TRIAL( uint32, float64, float64); TRIAL( uint32, user_t, user_t); TRIAL( int64, int64, int64); TRIAL( int64, uint64, uint64); TRIAL( int64, float32, float32); TRIAL( int64, float64, float64); TRIAL( int64, user_t, user_t); TRIAL( uint64, uint64, uint64); TRIAL( uint64, float32, float32); TRIAL( uint64, float64, float64); TRIAL( uint64, user_t, user_t); TRIAL( float32, float32, float32); TRIAL( float32, float64, float64); TRIAL( float32, user_t, user_t); TRIAL( float64, float64, float64); TRIAL( float64, user_t, user_t); TRIAL( user_t, user_t, user_t); // and finally two different user-made types. this should be a compile error, so it's commented out. //TRIAL( user_t, user2_t, user_t); } TEST(TypeDeduction, Standard) { // The commented out ints are the places where C++ does automatic int promotion, and we don't TRIAL( char, char, char /*int*/); TRIAL( char, signed char, signed char /*int*/); TRIAL( char, unsigned char, unsigned char /*int*/); TRIAL( char, short, short /*int*/); TRIAL( char, unsigned short, unsigned short /*int*/); TRIAL( char, int, int); TRIAL( char, unsigned int, unsigned int); TRIAL( char, long, long); TRIAL( char, unsigned long, unsigned long); TRIAL( char, float, float); TRIAL( char, double, double); TRIAL( char, long double, long double); TRIAL( signed char, signed char, signed char /*int*/); TRIAL( signed char, unsigned char, unsigned char /*int*/); TRIAL( signed char, short, short /*int*/); TRIAL( signed char, unsigned short, unsigned short /*int*/); TRIAL( signed char, int, int); TRIAL( signed char, unsigned int, unsigned int); TRIAL( signed char, long, long); TRIAL( signed char, unsigned long, unsigned long); TRIAL( signed char, float, float); TRIAL( signed char, double, double); TRIAL( signed char, long double, long double); TRIAL( unsigned char, unsigned char, unsigned char /*int*/); TRIAL( unsigned char, short, short /*int*/); TRIAL( unsigned char, unsigned short, unsigned short /*int*/); TRIAL( unsigned char, int, int); TRIAL( unsigned char, unsigned int, unsigned int); TRIAL( unsigned char, long, long); TRIAL( unsigned char, unsigned long, unsigned long); TRIAL( unsigned char, float, float); TRIAL( unsigned char, double, double); TRIAL( unsigned char, long double, long double); TRIAL( short, short, short /*int*/); TRIAL( short, unsigned short, unsigned short /*int*/); TRIAL( short, int, int); TRIAL( short, unsigned int, unsigned int); TRIAL( short, long, long); TRIAL( short, unsigned long, unsigned long); TRIAL( short, float, float); TRIAL( short, double, double); TRIAL( short, long double, long double); TRIAL( unsigned short, unsigned short, unsigned short /*int*/); TRIAL( unsigned short, int, int); TRIAL( unsigned short, unsigned int, unsigned int); TRIAL( unsigned short, long, long); TRIAL( unsigned short, unsigned long, unsigned long); TRIAL( unsigned short, float, float); TRIAL( unsigned short, double, double); TRIAL( unsigned short, long double, long double); TRIAL( int, int, int); TRIAL( int, unsigned int, unsigned int); TRIAL( int, long, long); TRIAL( int, unsigned long, unsigned long); TRIAL( int, float, float); TRIAL( int, double, double); TRIAL( int, long double, long double); TRIAL( unsigned int, unsigned int, unsigned int); TRIAL( unsigned int, long, long); TRIAL( unsigned int, unsigned long, unsigned long); TRIAL( unsigned int, float, float); TRIAL( unsigned int, double, double); TRIAL( unsigned int, long double, long double); TRIAL( long, long, long); TRIAL( long, unsigned long, unsigned long); TRIAL( long, float, float); TRIAL( long, double, double); TRIAL( long, long double, long double); TRIAL( unsigned long, unsigned long, unsigned long); TRIAL( unsigned long, float, float); TRIAL( unsigned long, double, double); TRIAL( unsigned long, long double, long double); TRIAL( float, float, float); TRIAL( float, double, double); TRIAL( float, long double, long double); TRIAL( double, double, double); TRIAL( double, long double, long double); TRIAL( long double, long double, long double); #if defined(BOOST_HAS_LONG_LONG) TRIAL( char, long long, long long); TRIAL( char, unsigned long long, unsigned long long); TRIAL( signed char, long long, long long); TRIAL( signed char, unsigned long long, unsigned long long); TRIAL( unsigned char, long long, long long); TRIAL( unsigned char, unsigned long long, unsigned long long); TRIAL( short, long long, long long); TRIAL( short, unsigned long long, unsigned long long); TRIAL( unsigned short, long long, long long); TRIAL( unsigned short, unsigned long long, unsigned long long); TRIAL( int, long long, long long); TRIAL( int, unsigned long long, unsigned long long); TRIAL( unsigned int, long long, long long); TRIAL( unsigned int, unsigned long long, unsigned long long); TRIAL( long, long long, long long); TRIAL( long, unsigned long long, unsigned long long); TRIAL( unsigned long, long long, long long); TRIAL( unsigned long, unsigned long long, unsigned long long); TRIAL( long long, long long, long long); TRIAL( long long, unsigned long long, unsigned long long); TRIAL( long long, float, float); TRIAL( long long, double, double); TRIAL( long long, long double, long double); TRIAL( unsigned long long, unsigned long long, unsigned long long); TRIAL( unsigned long long, float, float); TRIAL( unsigned long long, double, double); TRIAL( unsigned long long, long double, long double); #endif }
6,549
572
import os import os.path import shutil from pathlib import Path # Criar path caso ele não exista if not os.path.exists('aulinha_1'): os.mkdir('aulinha_1') os.chdir('aulinha_1') # Criar arquivo xpto Path('xpto.txt').touch() for el in range(1, 4): shutil.copy('xpto.txt', f'xpto_{el}.txt') # print(os.getcwd()) # Assertiva assert len(os.listdir('.')) == 4
166
335
<filename>axcell/scripts/guess_main.py # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved from pathlib import Path import re import sys import codecs doccls = re.compile(r"\s*\\documentclass") docbeg = re.compile(r"\s*\\begin\s*\{\s*document\s*\}") title = re.compile(r"\s*\\(icml)?title\s*\{(?P<title>[^%}]*)") aux = re.compile(r"(rebuttal\s+|instructions\s+(for\s+\\confname|.*proceedings)|(supplementary|supplemental)\s+materials?|appendix|author\s+guidelines|ieeetran\.cls|formatting\s+instructions)") def aux_title(t): t = t.strip().lower() return bool(aux.search(t)) def calc_priority(path): priority = 0 if path.name.lower() == "ms.tex": return 30 with codecs.open(path, 'r', encoding='utf8', errors='ignore') as f: for line in f: if doccls.match(line): priority += 10 break for line in f: m = title.match(line) if m: priority += 5 t = m["title"] if aux_title(t): priority = 5 break return priority def guess_main(path): path = Path(path) files = sorted(path.glob("*.tex"), key=lambda p: p.stem.lower()) if len(files) > 1: with_priority = [(f, calc_priority(f)) for f in files] with_priority = sorted(with_priority, key=lambda fp: fp[1], reverse=True) files = [fp[0] for fp in with_priority] return files[0] if len(files) else None if __name__ == '__main__': if len(sys.argv) != 2: print(f"Usage:\n\t{sys.argv[0]} DIR", file=sys.stderr) exit(1) main = guess_main(sys.argv[1]) if not main: print("Unable to find any suitable tex file", file=sys.stderr) exit(1) else: print(main)
863
1,121
<reponame>adamkulawik/factory<filename>shortages-prediction-model/src/main/java/io/dddbyexamples/factory/shortages/prediction/notification/QualityTasks.java package io.dddbyexamples.factory.shortages.prediction.notification; /** * Created by michal on 02.02.2017. */ public interface QualityTasks { void increasePriorityFor(String productRefNo); }
122
892
<gh_stars>100-1000 { "schema_version": "1.2.0", "id": "GHSA-hfgg-5j8v-8j94", "modified": "2022-05-01T07:45:16Z", "published": "2022-05-01T07:45:16Z", "aliases": [ "CVE-2006-7148" ], "details": "PHP remote file inclusion vulnerability in includes/bb_usage_stats.php in maluinfo 206.2.38 for Brazilian PHPBB allows remote attackers to execute arbitrary PHP code via the phpbb_root_path parameter. NOTE: this might be the same issues as CVE-2006-4893.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-7148" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/29516" }, { "type": "WEB", "url": "http://securityreason.com/securityalert/2380" }, { "type": "WEB", "url": "http://www.securityfocus.com/archive/1/448639/100/0/threaded" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/20507" } ], "database_specific": { "cwe_ids": [ ], "severity": "HIGH", "github_reviewed": false } }
520
5,169
{ "name": "FritzVision", "version": "7.0.1", "summary": "Official Fritz SDK for Swift 5.0 and Objective-C", "homepage": "https://fritz.ai", "license": { "type": "Apache 2.0", "file": "LICENSE.md" }, "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/fritzlabs/fritz-ai-ios-sdk.git", "tag": "7.0.1" }, "requires_arc": true, "platforms": { "ios": "10.0" }, "swift_versions": "5.0", "weak_frameworks": "CoreML", "pod_target_xcconfig": { "EXCLUDED_ARCHS[sdk=iphonesimulator*]": "arm64" }, "user_target_xcconfig": { "EXCLUDED_ARCHS[sdk=iphonesimulator*]": "arm64" }, "dependencies": { "FritzCore": [ "7.0.1" ], "FritzManagedModel": [ "7.0.1" ], "FritzCoreMLHelpers": [ "7.0.1" ] }, "source_files": "Source/FritzVision/**/*.{h,swift,c,mlmodel}", "resources": [ "Source/FritzVision/ObjectDetection/Helpers/Anchors.csv" ], "swift_version": "5.0" }
484
1,125
<reponame>karenzone/elasticsearch<gh_stars>1000+ /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.painless; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; import java.util.HashMap; import java.util.Map; import java.util.function.DoubleSupplier; import java.util.function.Function; /** * ScriptImpl can be used as a {@link SearchScript} * to run a previously compiled Painless script. */ final class ScriptImpl extends SearchScript { /** * The Painless script that can be run. */ private final GenericElasticsearchScript script; /** * A map that can be used to access input parameters at run-time. */ private final Map<String, Object> variables; /** * Looks up the {@code _score} from {@link #scorer} if {@code _score} is used, otherwise returns {@code 0.0}. */ private final DoubleSupplier scoreLookup; /** * Looks up the {@code ctx} from the {@link #variables} if {@code ctx} is used, otherwise return {@code null}. */ private final Function<Map<String, Object>, Map<?, ?>> ctxLookup; /** * Current _value for aggregation * @see #setNextAggregationValue(Object) */ private Object aggregationValue; /** * Creates a ScriptImpl for the a previously compiled Painless script. * @param script The previously compiled Painless script. * @param vars The initial variables to run the script with. * @param lookup The lookup to allow search fields to be available if this is run as a search script. */ ScriptImpl(GenericElasticsearchScript script, Map<String, Object> vars, SearchLookup lookup, LeafReaderContext leafContext) { super(null, lookup, leafContext); this.script = script; this.variables = new HashMap<>(); if (vars != null) { variables.putAll(vars); } LeafSearchLookup leafLookup = getLeafLookup(); if (leafLookup != null) { variables.putAll(leafLookup.asMap()); } scoreLookup = script.needs_score() ? this::getScore : () -> 0.0; ctxLookup = script.needsCtx() ? variables -> (Map<?, ?>) variables.get("ctx") : variables -> null; } @Override public Map<String, Object> getParams() { return variables; } @Override public void setNextVar(final String name, final Object value) { variables.put(name, value); } @Override public void setNextAggregationValue(Object value) { this.aggregationValue = value; } @Override public Object run() { return script.execute(variables, scoreLookup.getAsDouble(), getDoc(), aggregationValue, ctxLookup.apply(variables)); } @Override public double runAsDouble() { return ((Number)run()).doubleValue(); } @Override public long runAsLong() { return ((Number)run()).longValue(); } }
1,279
877
import org.checkerframework.checker.regex.qual.Regex; import org.checkerframework.checker.regex.util.RegexUtil; public class Test { void foo(String simple) { if (RegexUtil.isRegex(simple)) { @Regex String in = simple; } } }
97
6,098
from builtins import range import sys sys.path.insert(1,"../../../") import h2o from tests import pyunit_utils from h2o.estimators.glm import H2OGeneralizedLinearEstimator # check varimp for Binomial, Multinomial, Regression directly from model.output._varimp instead of the # model._output._standardized_coefficients. def testvarimp(): print("Checking variable importance for binomials....") training_data = h2o.import_file(pyunit_utils.locate("smalldata/logreg/benign.csv")) Y = 3 X = [0, 1, 2, 4, 5, 6, 7, 8, 9, 10] buildModelCheckVarimp(training_data, X, Y, "binomial") print("Checking variable importance for multinomials....") train = h2o.import_file(path=pyunit_utils.locate("smalldata/iris/iris_wheader.csv")) myY = "class" mX = list(range(0,4)) buildModelCheckVarimp(train, mX, myY, "multinomial") print("Checking variable importance for regression....") h2o_data = h2o.import_file(path=pyunit_utils.locate("smalldata/prostate/prostate_complete.csv.zip")) myY = "GLEASON" myX = ["ID","AGE","RACE","CAPSULE","DCAPS","PSA","VOL","DPROS"] buildModelCheckVarimp(h2o_data, myX, myY, "gaussian") def buildModelCheckVarimp(training_frame, x_indices, y_index, family): model = H2OGeneralizedLinearEstimator(family=family) model.train(training_frame=training_frame, x=x_indices, y=y_index) varimp = model.varimp() print(varimp) standardized_coeff = model._model_json["output"]["standardized_coefficient_magnitudes"] # check to make sure varimp and standardized coefficient magnitudes agree for ind in range(len(varimp)): assert abs(standardized_coeff.cell_values[ind][1]-varimp[ind][1]) < 1e-6, \ "Expected value: {0}, actual: {1}".format(standardized_coeff.cell_values[ind][1], varimp[ind][1]) if __name__ == "__main__": pyunit_utils.standalone_test(testvarimp) else: testvarimp()
731
1,351
/** @file Stats of TLS @section license License Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #pragma once #include <unordered_map> #include "records/I_RecProcess.h" #include "SSLDiags.h" /* Stats should only be accessed using these macros */ #define SSL_INCREMENT_DYN_STAT(x) RecIncrRawStat(ssl_rsb, nullptr, (int)x, 1) #define SSL_DECREMENT_DYN_STAT(x) RecIncrRawStat(ssl_rsb, nullptr, (int)x, -1) #define SSL_SET_COUNT_DYN_STAT(x, count) RecSetRawStatCount(ssl_rsb, x, count) #define SSL_INCREMENT_DYN_STAT_EX(x, y) RecIncrRawStat(ssl_rsb, nullptr, (int)x, y) #define SSL_CLEAR_DYN_STAT(x) \ do { \ RecSetRawStatSum(ssl_rsb, (x), 0); \ RecSetRawStatCount(ssl_rsb, (x), 0); \ } while (0) #define SSL_CLR_ERR_INCR_DYN_STAT(vc, x, fmt, ...) \ do { \ SSLVCDebug((vc), fmt, ##__VA_ARGS__); \ RecIncrRawStat(ssl_rsb, nullptr, (int)x, 1); \ } while (0) enum SSL_Stats { ssl_origin_server_expired_cert_stat, ssl_user_agent_expired_cert_stat, ssl_origin_server_revoked_cert_stat, ssl_user_agent_revoked_cert_stat, ssl_origin_server_unknown_cert_stat, ssl_user_agent_unknown_cert_stat, ssl_origin_server_cert_verify_failed_stat, ssl_user_agent_cert_verify_failed_stat, ssl_origin_server_bad_cert_stat, ssl_user_agent_bad_cert_stat, ssl_origin_server_decryption_failed_stat, ssl_user_agent_decryption_failed_stat, ssl_origin_server_wrong_version_stat, ssl_user_agent_wrong_version_stat, ssl_origin_server_other_errors_stat, ssl_user_agent_other_errors_stat, ssl_origin_server_unknown_ca_stat, ssl_user_agent_unknown_ca_stat, ssl_user_agent_sessions_stat, ssl_user_agent_session_hit_stat, ssl_user_agent_session_miss_stat, ssl_user_agent_session_timeout_stat, ssl_total_handshake_time_stat, ssl_total_attempts_handshake_count_in_stat, ssl_total_success_handshake_count_in_stat, ssl_total_tickets_created_stat, ssl_total_tickets_verified_stat, ssl_total_tickets_verified_old_key_stat, // verified with old key. ssl_total_ticket_keys_renewed_stat, // number of keys renewed. ssl_total_tickets_not_found_stat, ssl_total_tickets_renewed_stat, ssl_total_dyn_def_tls_record_count, ssl_total_dyn_max_tls_record_count, ssl_total_dyn_redo_tls_record_count, ssl_session_cache_hit, ssl_origin_session_cache_hit, ssl_session_cache_miss, ssl_origin_session_cache_miss, ssl_session_cache_eviction, ssl_session_cache_lock_contention, ssl_session_cache_new_session, ssl_early_data_received_count, // how many times we received early data ssl_origin_session_reused_count, /* error stats */ ssl_error_syscall, ssl_error_ssl, ssl_error_async, ssl_sni_name_set_failure, ssl_total_attempts_handshake_count_out_stat, ssl_total_success_handshake_count_out_stat, /* ocsp stapling stats */ ssl_ocsp_revoked_cert_stat, ssl_ocsp_unknown_cert_stat, ssl_ocsp_refreshed_cert_stat, ssl_ocsp_refresh_cert_failure_stat, /* SSL/TLS versions */ ssl_total_sslv3, ssl_total_tlsv1, ssl_total_tlsv11, ssl_total_tlsv12, ssl_total_tlsv13, ssl_cipher_stats_start = 100, ssl_cipher_stats_end = 300, Ssl_Stat_Count }; extern RecRawStatBlock *ssl_rsb; extern std::unordered_map<std::string, intptr_t> cipher_map; // Initialize SSL statistics. void SSLInitializeStatistics();
1,683
1,014
<filename>Database/Equities/Countries/Luxembourg/Industries/Household & Personal Products.json { "COC.F": { "short_name": "L'OCCITANE INTERNATIONAL", "long_name": "L'Occitane International S.A.", "summary": "L'Occitane International S.A., together with its subsidiaries, designs, manufactures, and markets various natural and organic ingredient based cosmetics and well-being products. It offers perfumes, soaps, and fragrant products. The company also provides skincare and haircare products. In addition, it engages in the general warehousing business. The company markets and sells its products under the L'OCCITANE en Provence, Melvita, Erborian, L'Occitane au Br\u00c3\u00a9sil, LimeLife by Alcone, and ELEMIS brand names to final customers, as well as intermediates, such as distributors, wholesalers, TV show channels, and travel retailers. As of March 31, 2020, it operated 3,486 retail locations with 1,608 own retail stores. It also sells its products through online. L'Occitane International S.A. has operations in Japan, the United States, Hong Kong, China, France, Russia, the United Kingdom, Luxembourg, Brazil, Taiwan, and internationally. The company was founded in 1976 and is headquartered in Luxembourg City, Luxembourg. L'Occitane International S.A. is a subsidiary of L'Occitane Groupe S.A.", "currency": "EUR", "sector": "Consumer Defensive", "industry": "Household & Personal Products", "exchange": "FRA", "market": "dr_market", "country": "Luxembourg", "state": null, "city": "Luxembourg City", "zipcode": "1724", "website": "http://group.loccitane.com", "market_cap": "Mid Cap" }, "LCCTF": { "short_name": "L'OCCITANE INTERNATIONAL S.A.", "long_name": "L'Occitane International S.A.", "summary": "L'Occitane International S.A., together with its subsidiaries, designs, manufactures, and markets various natural and organic ingredient based cosmetics and well-being products. It offers perfumes, soaps, and fragrant products. The company also provides skincare and haircare products. In addition, it engages in the general warehousing business. The company markets and sells its products under the L'OCCITANE en Provence, Melvita, Erborian, L'Occitane au Br\u00c3\u00a9sil, LimeLife by Alcone, and ELEMIS brand names to final customers, as well as intermediates, such as distributors, wholesalers, TV show channels, and travel retailers. As of March 31, 2020, it operated 3,486 retail locations with 1,608 own retail stores. It also sells its products through online. L'Occitane International S.A. has operations in Japan, the United States, Hong Kong, China, France, Russia, the United Kingdom, Luxembourg, Brazil, Taiwan, and internationally. The company was founded in 1976 and is headquartered in Luxembourg City, Luxembourg. L'Occitane International S.A. is a subsidiary of L'Occitane Groupe S.A.", "currency": "USD", "sector": "Consumer Defensive", "industry": "Household & Personal Products", "exchange": "PNK", "market": "us_market", "country": "Luxembourg", "state": null, "city": "Luxembourg City", "zipcode": "1724", "website": "http://group.loccitane.com", "market_cap": "Mid Cap" } }
1,124
1,006
/**************************************************************************** * libs/libnx/nxtk/nxtk_setvisibility.c * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include <stdlib.h> #include <errno.h> #include <debug.h> #include <nuttx/nx/nx.h> #include <nuttx/nx/nxtk.h> #include "nxtk.h" /**************************************************************************** * Public Functions ****************************************************************************/ /**************************************************************************** * Name: nxtk_setvisibility * * Description: * Select if the window is visible or hidden. A hidden window is still * present and will update normally, but will not be visible on the * display until it is unhidden. * * Input Parameters: * hfwnd - The window to be modified * hide - True: Window will be hidden; false: Window will be visible * * Returned Value: * OK on success; ERROR on failure with errno set appropriately * ****************************************************************************/ int nxtk_setvisibility(NXTKWINDOW hfwnd, bool hide) { return nx_setvisibility((NXWINDOW)hfwnd, hide); }
525
1,200
package com.xiaojukeji.carrera.config.v4.pproxy; import com.google.common.collect.Maps; import com.xiaojukeji.carrera.config.ConfigurationValidator; import com.xiaojukeji.carrera.utils.ConfigUtils; import org.apache.commons.lang3.StringUtils; import java.util.Map; import java.util.Set; public class TopicConfiguration implements ConfigurationValidator { private static final int DEFAULT_MAX_TPS = ConfigUtils.getDefaultConfig( "com.xiaojukeji.carrera.config.v4.pproxy.TopicConfiguration.maxTps", 1024); private static final int DEFAULT_TOTAL_MAX_TPS = ConfigUtils.getDefaultConfig( "com.xiaojukeji.carrera.config.v4.pproxy.TopicConfiguration.totalMaxTps", 1024); private String brokerCluster; private Map<String/*proxyCluster*/, Set<String>> proxies = Maps.newHashMap(); private int totalMaxTps = DEFAULT_TOTAL_MAX_TPS; private int maxTps = DEFAULT_MAX_TPS; public int getMaxTps() { return maxTps; } public void setMaxTps(int maxTps) { this.maxTps = maxTps; } public int getTotalMaxTps() { return totalMaxTps; } public void setTotalMaxTps(int totalMaxTps) { this.totalMaxTps = totalMaxTps; } public String getBrokerCluster() { return brokerCluster; } public void setBrokerCluster(String brokerCluster) { this.brokerCluster = brokerCluster; } public Map<String, Set<String>> getProxies() { return proxies; } public void setProxies(Map<String, Set<String>> proxies) { this.proxies = proxies; } @Override public boolean validate() { return StringUtils.isNotEmpty(brokerCluster) && totalMaxTps > 0; } @Override public String toString() { return "TopicConfiguration{" + ", brokerCluster='" + brokerCluster + '\'' + ", proxies=" + proxies + ", totalMaxTps=" + totalMaxTps + ", maxTps=" + maxTps + '}'; } }
827
1,402
/* ---------------------------------------------------------------------------- * GTSAM Copyright 2010, Georgia Tech Research Corporation, * Atlanta, Georgia 30332-0415 * All Rights Reserved * Authors: <NAME>, et al. (see THANKS for the full author list) * See LICENSE for the license information * -------------------------------------------------------------------------- */ /** * @file Cal3DS2.cpp * @date Feb 28, 2010 * @author ydjian * @author <NAME> */ #include <gtsam/base/Matrix.h> #include <gtsam/base/Vector.h> #include <gtsam/geometry/Cal3DS2.h> #include <gtsam/geometry/Point2.h> #include <gtsam/geometry/Point3.h> namespace gtsam { /* ************************************************************************* */ std::ostream& operator<<(std::ostream& os, const Cal3DS2& cal) { os << (Cal3DS2_Base&)cal; return os; } /* ************************************************************************* */ void Cal3DS2::print(const std::string& s_) const { Base::print(s_); } /* ************************************************************************* */ bool Cal3DS2::equals(const Cal3DS2& K, double tol) const { const Cal3DS2_Base* base = dynamic_cast<const Cal3DS2_Base*>(&K); return Cal3DS2_Base::equals(*base, tol); } /* ************************************************************************* */ Cal3DS2 Cal3DS2::retract(const Vector& d) const { return Cal3DS2(vector() + d); } /* ************************************************************************* */ Vector Cal3DS2::localCoordinates(const Cal3DS2& T2) const { return T2.vector() - vector(); } } /* ************************************************************************* */
456
335
<reponame>Safal08/Hacktoberfest-1<gh_stars>100-1000 { "word": "Trip", "definitions": [ "A journey or excursion, especially for pleasure.", "The distance from start to finish of a race.", "A stumble or fall due to catching one's foot on something.", "A mistake.", "A hallucinatory experience caused by taking a psychedelic drug, especially LSD.", "An exciting or stimulating experience.", "A self-indulgent attitude or activity.", "A device that activates or disconnects a mechanism, circuit, etc.", "A light, lively movement of a person's feet." ], "parts-of-speech": "Noun" }
240
898
/* * Copyright (c) 2015 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.heroic.http; import com.google.common.collect.ImmutableList; import com.spotify.heroic.HeroicConfigurationContext; import com.spotify.heroic.HeroicModule; import com.spotify.heroic.dagger.LoadingComponent; public class Module implements HeroicModule { @Override public Runnable setup(final LoadingComponent loading) { final HeroicConfigurationContext config = loading.heroicConfigurationContext(); return () -> { config.resources(core -> { final HttpResourcesComponent w = DaggerHttpResourcesComponent.builder().coreComponent(core).build(); // @formatter:off return ImmutableList.of( w.heroicResource(), w.writeResource(), w.utilsResource(), w.statusResource(), w.renderResource(), w.queryResource(), w.metadataResource(), w.clusterResource(), w.parserResource() ); // @formatter:on }); }; } }
761
347
/* * Copyright (c) Baidu Inc. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.baidu.brcc.service.impl; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyVararg; import static org.mockito.Mockito.when; import java.util.Arrays; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.HashMap; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.slf4j.Logger; import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import com.baidu.brcc.dao.ConfigChangeLogMapper; import com.baidu.brcc.dao.base.BaseMapper; import com.baidu.brcc.dao.base.CommonMapper; import com.baidu.brcc.domain.ConfigChangeLogExample; import com.baidu.brcc.domain.ConfigChangeLogWithBLOBs; import com.baidu.brcc.domain.ConfigGroup; import com.baidu.brcc.service.ConfigGroupService; public class ConfigChangeLogServiceImplTest { @Mock Logger LOGGER; @Mock ThreadPoolTaskExecutor executor; @Mock ConfigChangeLogMapper configChangeLogMapper; @Mock ConfigGroupService configGroupService; @Mock CommonMapper commonMapper; @InjectMocks ConfigChangeLogServiceImpl configChangeLogServiceImpl; @Before public void setUp() { MockitoAnnotations.initMocks(this); } @Test public void testGetMapper() throws Exception { BaseMapper<ConfigChangeLogWithBLOBs, Long, ConfigChangeLogExample> result = configChangeLogServiceImpl.getMapper(); } @Test public void testNewExample() throws Exception { ConfigChangeLogExample result = configChangeLogServiceImpl.newExample(); } @Test public void testNewIdInExample() throws Exception { ConfigChangeLogExample result = configChangeLogServiceImpl.newIdInExample(Arrays.<Long>asList(Long.valueOf(1))); } @Test public void testSaveLogWithBackground() throws Exception { when(configGroupService.selectByPrimaryKey(any(), anyVararg())).thenReturn(new ConfigGroup()); configChangeLogServiceImpl .saveLogWithBackground(Long.valueOf(1), "operator", Long.valueOf(1), new HashMap<String, String>() {{ put("String", "String"); }}, new HashMap<String, String>() {{ put("String", "String"); }}, new GregorianCalendar(2021, Calendar.MARCH, 10, 14, 45).getTime()); } @Test public void testStop() throws Exception { configChangeLogServiceImpl.stop(); } @Test public void testCountByExample() throws Exception { when(configChangeLogMapper.countByExample(any())).thenReturn(0L); long result = configChangeLogServiceImpl.countByExample(null); } @Test public void testDeleteByPrimaryKey() throws Exception { when(configChangeLogMapper.deleteByPrimaryKey(any())).thenReturn(0); int result = configChangeLogServiceImpl.deleteByPrimaryKey(Long.valueOf(1)); } @Test public void testDeleteByExample() throws Exception { when(configChangeLogMapper.deleteByExample(any())).thenReturn(0); int result = configChangeLogServiceImpl.deleteByExample(null); } @Test public void testInsert() throws Exception { when(configChangeLogMapper.insert(any())).thenReturn(0); int result = configChangeLogServiceImpl.insert(new ConfigChangeLogWithBLOBs()); } @Test public void testInsertSelective() throws Exception { when(configChangeLogMapper.insertSelective(any())).thenReturn(0); int result = configChangeLogServiceImpl.insertSelective(new ConfigChangeLogWithBLOBs()); } @Test public void testUpdateByPrimaryKeySelective() throws Exception { when(configChangeLogMapper.updateByPrimaryKeySelective(any())).thenReturn(0); int result = configChangeLogServiceImpl.updateByPrimaryKeySelective(new ConfigChangeLogWithBLOBs()); } @Test public void testUpdateByPrimaryKey() throws Exception { when(configChangeLogMapper.updateByPrimaryKey(any())).thenReturn(0); int result = configChangeLogServiceImpl.updateByPrimaryKey(new ConfigChangeLogWithBLOBs()); } @Test public void testUpdateByExampleSelective() throws Exception { when(configChangeLogMapper.updateByExampleSelective(any(), any())).thenReturn(0); int result = configChangeLogServiceImpl.updateByExampleSelective(new ConfigChangeLogWithBLOBs(), null); } }
1,849
372
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.dfareporting.model; /** * This message is for backends to pass their scotty media specific fields to ESF. Backend will * include this in their response message to ESF. Example: ExportFile is an rpc defined for upload * using scotty from ESF. rpc ExportFile(ExportFileRequest) returns (ExportFileResponse) Message * ExportFileResponse will include apiserving.MediaResponseInfo to tell ESF about data like * dynamic_dropzone it needs to pass to Scotty. message ExportFileResponse { optional gdata.Media * blob = 1; optional apiserving.MediaResponseInfo media_response_info = 2 } * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the DCM/DFA Reporting And Trafficking API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class MediaResponseInfo extends com.google.api.client.json.GenericJson { /** * Data to copy from backend response to the next backend requests. Custom data is returned to * Scotty in the agent_state field, which Scotty will then provide in subsequent upload * notifications. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String customData; /** * Specifies any transformation to be applied to data before persisting it or retrieving from * storage. E.g., encryption options for blobstore2. This should be of the form * uploader_service.DataStorageTransform. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dataStorageTransform; /** * Specifies the Scotty Drop Target to use for uploads. If present in a media response, Scotty * does not upload to a standard drop zone. Instead, Scotty saves the upload directly to the * location specified in this drop target. Unlike drop zones, the drop target is the final storage * location for an upload. So, the agent does not need to clone the blob at the end of the upload. * The agent is responsible for garbage collecting any orphaned blobs that may occur due to * aborted uploads. For more information, see the drop target design doc here: * http://goto/ScottyDropTarget This field will be preferred to dynamicDropzone. If provided, the * identified field in the response must be of the type uploader.agent.DropTarget. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dynamicDropTarget; /** * Specifies the Scotty dropzone to use for uploads. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dynamicDropzone; /** * Request class to use for all Blobstore operations for this request. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String requestClass; /** * Specifies the TrafficClass that Scotty should use for any RPCs to fetch the response bytes. * Will override the traffic class GTOS of the incoming http request. This is a temporary field to * facilitate whitelisting and experimentation by the bigstore agent only. For instance, this does * not apply to RTMP reads. WARNING: DO NOT USE WITHOUT PERMISSION FROM THE SCOTTY TEAM. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String trafficClassField; /** * Tells Scotty to verify hashes on the agent's behalf by parsing out the X-Goog-Hash header. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean verifyHashFromHeader; /** * Data to copy from backend response to the next backend requests. Custom data is returned to * Scotty in the agent_state field, which Scotty will then provide in subsequent upload * notifications. * @return value or {@code null} for none */ public java.lang.String getCustomData() { return customData; } /** * Data to copy from backend response to the next backend requests. Custom data is returned to * Scotty in the agent_state field, which Scotty will then provide in subsequent upload * notifications. * @param customData customData or {@code null} for none */ public MediaResponseInfo setCustomData(java.lang.String customData) { this.customData = customData; return this; } /** * Specifies any transformation to be applied to data before persisting it or retrieving from * storage. E.g., encryption options for blobstore2. This should be of the form * uploader_service.DataStorageTransform. * @see #decodeDataStorageTransform() * @return value or {@code null} for none */ public java.lang.String getDataStorageTransform() { return dataStorageTransform; } /** * Specifies any transformation to be applied to data before persisting it or retrieving from * storage. E.g., encryption options for blobstore2. This should be of the form * uploader_service.DataStorageTransform. * @see #getDataStorageTransform() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeDataStorageTransform() { return com.google.api.client.util.Base64.decodeBase64(dataStorageTransform); } /** * Specifies any transformation to be applied to data before persisting it or retrieving from * storage. E.g., encryption options for blobstore2. This should be of the form * uploader_service.DataStorageTransform. * @see #encodeDataStorageTransform() * @param dataStorageTransform dataStorageTransform or {@code null} for none */ public MediaResponseInfo setDataStorageTransform(java.lang.String dataStorageTransform) { this.dataStorageTransform = dataStorageTransform; return this; } /** * Specifies any transformation to be applied to data before persisting it or retrieving from * storage. E.g., encryption options for blobstore2. This should be of the form * uploader_service.DataStorageTransform. * @see #setDataStorageTransform() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public MediaResponseInfo encodeDataStorageTransform(byte[] dataStorageTransform) { this.dataStorageTransform = com.google.api.client.util.Base64.encodeBase64URLSafeString(dataStorageTransform); return this; } /** * Specifies the Scotty Drop Target to use for uploads. If present in a media response, Scotty * does not upload to a standard drop zone. Instead, Scotty saves the upload directly to the * location specified in this drop target. Unlike drop zones, the drop target is the final storage * location for an upload. So, the agent does not need to clone the blob at the end of the upload. * The agent is responsible for garbage collecting any orphaned blobs that may occur due to * aborted uploads. For more information, see the drop target design doc here: * http://goto/ScottyDropTarget This field will be preferred to dynamicDropzone. If provided, the * identified field in the response must be of the type uploader.agent.DropTarget. * @see #decodeDynamicDropTarget() * @return value or {@code null} for none */ public java.lang.String getDynamicDropTarget() { return dynamicDropTarget; } /** * Specifies the Scotty Drop Target to use for uploads. If present in a media response, Scotty * does not upload to a standard drop zone. Instead, Scotty saves the upload directly to the * location specified in this drop target. Unlike drop zones, the drop target is the final storage * location for an upload. So, the agent does not need to clone the blob at the end of the upload. * The agent is responsible for garbage collecting any orphaned blobs that may occur due to * aborted uploads. For more information, see the drop target design doc here: * http://goto/ScottyDropTarget This field will be preferred to dynamicDropzone. If provided, the * identified field in the response must be of the type uploader.agent.DropTarget. * @see #getDynamicDropTarget() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeDynamicDropTarget() { return com.google.api.client.util.Base64.decodeBase64(dynamicDropTarget); } /** * Specifies the Scotty Drop Target to use for uploads. If present in a media response, Scotty * does not upload to a standard drop zone. Instead, Scotty saves the upload directly to the * location specified in this drop target. Unlike drop zones, the drop target is the final storage * location for an upload. So, the agent does not need to clone the blob at the end of the upload. * The agent is responsible for garbage collecting any orphaned blobs that may occur due to * aborted uploads. For more information, see the drop target design doc here: * http://goto/ScottyDropTarget This field will be preferred to dynamicDropzone. If provided, the * identified field in the response must be of the type uploader.agent.DropTarget. * @see #encodeDynamicDropTarget() * @param dynamicDropTarget dynamicDropTarget or {@code null} for none */ public MediaResponseInfo setDynamicDropTarget(java.lang.String dynamicDropTarget) { this.dynamicDropTarget = dynamicDropTarget; return this; } /** * Specifies the Scotty Drop Target to use for uploads. If present in a media response, Scotty * does not upload to a standard drop zone. Instead, Scotty saves the upload directly to the * location specified in this drop target. Unlike drop zones, the drop target is the final storage * location for an upload. So, the agent does not need to clone the blob at the end of the upload. * The agent is responsible for garbage collecting any orphaned blobs that may occur due to * aborted uploads. For more information, see the drop target design doc here: * http://goto/ScottyDropTarget This field will be preferred to dynamicDropzone. If provided, the * identified field in the response must be of the type uploader.agent.DropTarget. * @see #setDynamicDropTarget() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public MediaResponseInfo encodeDynamicDropTarget(byte[] dynamicDropTarget) { this.dynamicDropTarget = com.google.api.client.util.Base64.encodeBase64URLSafeString(dynamicDropTarget); return this; } /** * Specifies the Scotty dropzone to use for uploads. * @return value or {@code null} for none */ public java.lang.String getDynamicDropzone() { return dynamicDropzone; } /** * Specifies the Scotty dropzone to use for uploads. * @param dynamicDropzone dynamicDropzone or {@code null} for none */ public MediaResponseInfo setDynamicDropzone(java.lang.String dynamicDropzone) { this.dynamicDropzone = dynamicDropzone; return this; } /** * Request class to use for all Blobstore operations for this request. * @return value or {@code null} for none */ public java.lang.String getRequestClass() { return requestClass; } /** * Request class to use for all Blobstore operations for this request. * @param requestClass requestClass or {@code null} for none */ public MediaResponseInfo setRequestClass(java.lang.String requestClass) { this.requestClass = requestClass; return this; } /** * Specifies the TrafficClass that Scotty should use for any RPCs to fetch the response bytes. * Will override the traffic class GTOS of the incoming http request. This is a temporary field to * facilitate whitelisting and experimentation by the bigstore agent only. For instance, this does * not apply to RTMP reads. WARNING: DO NOT USE WITHOUT PERMISSION FROM THE SCOTTY TEAM. * @return value or {@code null} for none */ public java.lang.String getTrafficClassField() { return trafficClassField; } /** * Specifies the TrafficClass that Scotty should use for any RPCs to fetch the response bytes. * Will override the traffic class GTOS of the incoming http request. This is a temporary field to * facilitate whitelisting and experimentation by the bigstore agent only. For instance, this does * not apply to RTMP reads. WARNING: DO NOT USE WITHOUT PERMISSION FROM THE SCOTTY TEAM. * @param trafficClassField trafficClassField or {@code null} for none */ public MediaResponseInfo setTrafficClassField(java.lang.String trafficClassField) { this.trafficClassField = trafficClassField; return this; } /** * Tells Scotty to verify hashes on the agent's behalf by parsing out the X-Goog-Hash header. * @return value or {@code null} for none */ public java.lang.Boolean getVerifyHashFromHeader() { return verifyHashFromHeader; } /** * Tells Scotty to verify hashes on the agent's behalf by parsing out the X-Goog-Hash header. * @param verifyHashFromHeader verifyHashFromHeader or {@code null} for none */ public MediaResponseInfo setVerifyHashFromHeader(java.lang.Boolean verifyHashFromHeader) { this.verifyHashFromHeader = verifyHashFromHeader; return this; } @Override public MediaResponseInfo set(String fieldName, Object value) { return (MediaResponseInfo) super.set(fieldName, value); } @Override public MediaResponseInfo clone() { return (MediaResponseInfo) super.clone(); } }
4,093
612
/* * Copyright (c) 2015-present, Parse, LLC. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.parse; /** * A {@code SaveCallback} is used to run code after saving a {@link ParseObject} in a background * thread. * * <p>The easiest way to use a {@code SaveCallback} is through an anonymous inner class. Override * the {@code done} function to specify what the callback should do after the save is complete. The * {@code done} function will be run in the UI thread, while the save happens in a background * thread. This ensures that the UI does not freeze while the save happens. * * <p>For example, this sample code saves the object {@code myObject} and calls a different function * depending on whether the save succeeded or not. * * <p> * * <pre> * myObject.saveInBackground(new SaveCallback() { * public void done(ParseException e) { * if (e == null) { * myObjectSavedSuccessfully(); * } else { * myObjectSaveDidNotSucceed(); * } * } * }); * </pre> */ public interface SaveCallback extends ParseCallback1<ParseException> { /** * Override this function with the code you want to run after the save is complete. * * @param e The exception raised by the save, or {@code null} if it succeeded. */ @Override void done(ParseException e); }
468
576
<reponame>AhmedHussKhalifa/torchdistill from torchdistill.models.adaptation import ADAPTATION_CLASS_DICT from torchdistill.models.classification import CLASSIFICATION_MODEL_FUNC_DICT from torchdistill.models.custom import CUSTOM_MODEL_CLASS_DICT, CUSTOM_MODEL_FUNC_DICT from torchdistill.models.special import SPECIAL_CLASS_DICT MODEL_DICT = dict() MODEL_DICT.update(ADAPTATION_CLASS_DICT) MODEL_DICT.update(SPECIAL_CLASS_DICT) MODEL_DICT.update(CUSTOM_MODEL_CLASS_DICT) MODEL_DICT.update(CUSTOM_MODEL_FUNC_DICT) MODEL_DICT.update(CLASSIFICATION_MODEL_FUNC_DICT)
223
2,338
<filename>lldb/packages/Python/lldbsuite/test/functionalities/ubsan/user-expression/main.c<gh_stars>1000+ int foo() { int data[4]; int x = *(int *)(((char *)&data[0]) + 2); return 42; } int main() { return 0; // breakpoint line }
96
310
// Copyright (C) 2002-2012 <NAME> // Copyright (C) 2009-2012 <NAME> // This file is part of the "Irrlicht Engine". // For conditions of distribution and use, see copyright notice in irrlicht.h // Based on the NPK reader from Irrlicht #include "pch.h" #include "CNPKReader.h" #ifdef __IRR_COMPILE_WITH_NPK_ARCHIVE_LOADER_ #include "irrOS.h" #include "coreutil.h" #ifdef _DEBUG #define IRR_DEBUG_NPK_READER #endif namespace irr { namespace io { namespace { bool isHeaderValid(const SNPKHeader& header) { const c8* const tag = header.Tag; return tag[0] == '0' && tag[1] == 'K' && tag[2] == 'P' && tag[3] == 'N'; } } // end namespace //! Constructor CArchiveLoaderNPK::CArchiveLoaderNPK( io::IFileSystem* fs) : FileSystem(fs) { #ifdef _DEBUG setDebugName("CArchiveLoaderNPK"); #endif } //! returns true if the file maybe is able to be loaded by this class bool CArchiveLoaderNPK::isALoadableFileFormat(const io::path& filename) const { return core::hasFileExtension(filename, "npk"); } //! Check to see if the loader can create archives of this type. bool CArchiveLoaderNPK::isALoadableFileFormat(E_FILE_ARCHIVE_TYPE fileType) const { return fileType == EFAT_NPK; } //! Creates an archive from the filename /** \param file File handle to check. \return Pointer to newly created archive, or 0 upon error. */ IFileArchive* CArchiveLoaderNPK::createArchive(const io::path& filename, bool ignoreCase, bool ignorePaths) const { IFileArchive *archive = 0; io::IReadFile* file = FileSystem->createAndOpenFile(filename); if (file) { archive = createArchive(file, ignoreCase, ignorePaths); file->drop (); } return archive; } //! creates/loads an archive from the file. //! \return Pointer to the created archive. Returns 0 if loading failed. IFileArchive* CArchiveLoaderNPK::createArchive(io::IReadFile* file, bool ignoreCase, bool ignorePaths) const { IFileArchive *archive = 0; if ( file ) { file->seek ( 0 ); archive = new CNPKReader(file, ignoreCase, ignorePaths); } return archive; } //! Check if the file might be loaded by this class /** Check might look into the file. \param file File handle to check. \return True if file seems to be loadable. */ bool CArchiveLoaderNPK::isALoadableFileFormat(io::IReadFile* file) const { SNPKHeader header; file->read(&header, sizeof(header)); return isHeaderValid(header); } /*! NPK Reader */ CNPKReader::CNPKReader(IReadFile* file, bool ignoreCase, bool ignorePaths) : CFileList((file ? file->getFileName() : io::path("")), ignoreCase, ignorePaths), File(file) { #ifdef _DEBUG setDebugName("CNPKReader"); #endif if (File) { File->grab(); if (scanLocalHeader()) sort(); else os::Printer::log("Failed to load NPK archive."); } } CNPKReader::~CNPKReader() { if (File) File->drop(); } const IFileList* CNPKReader::getFileList() const { return this; } bool CNPKReader::scanLocalHeader() { SNPKHeader header; // Read and validate the header File->read(&header, sizeof(header)); if (!isHeaderValid(header)) return false; // Seek to the table of contents #ifdef __BIG_ENDIAN__ header.Offset = os::Byteswap::byteswap(header.Offset); header.Length = os::Byteswap::byteswap(header.Length); #endif header.Offset += 8; core::stringc dirName; bool inTOC=true; // Loop through each entry in the table of contents while (inTOC && (File->getPos() < File->getSize())) { // read an entry char tag[4]={0}; SNPKFileEntry entry; File->read(tag, 4); const int numTag = MAKE_IRR_ID(tag[3],tag[2],tag[1],tag[0]); int size; bool isDir=true; switch (numTag) { case MAKE_IRR_ID('D','I','R','_'): { File->read(&size, 4); readString(entry.Name); entry.Length=0; entry.Offset=0; #ifdef IRR_DEBUG_NPK_READER os::Printer::log("Dir", entry.Name); #endif } break; case MAKE_IRR_ID('F','I','L','E'): { File->read(&size, 4); File->read(&entry.Offset, 4); File->read(&entry.Length, 4); readString(entry.Name); isDir=false; #ifdef IRR_DEBUG_NPK_READER os::Printer::log("File", entry.Name); #endif #ifdef __BIG_ENDIAN__ entry.Offset = os::Byteswap::byteswap(entry.Offset); entry.Length = os::Byteswap::byteswap(entry.Length); #endif } break; case MAKE_IRR_ID('D','E','N','D'): { File->read(&size, 4); entry.Name=""; entry.Length=0; entry.Offset=0; const s32 pos = dirName.findLast('/', dirName.size()-2); if (pos==-1) dirName=""; else dirName=dirName.subString(0, pos); #ifdef IRR_DEBUG_NPK_READER os::Printer::log("Dirend", dirName); #endif } break; default: inTOC=false; } // skip root dir if (isDir) { if (!entry.Name.size() || (entry.Name==".") || (entry.Name=="<noname>")) continue; dirName += entry.Name; dirName += "/"; } #ifdef IRR_DEBUG_NPK_READER os::Printer::log("Name", entry.Name); #endif addItem((isDir?dirName:dirName+entry.Name), entry.Offset+header.Offset, entry.Length, isDir); } return true; } //! opens a file by file name IReadFile* CNPKReader::createAndOpenFile(const io::path& filename) { s32 index = findFile(filename, false); if (index != -1) return createAndOpenFile(index); return 0; } //! opens a file by index IReadFile* CNPKReader::createAndOpenFile(u32 index) { if (index >= Files.size() ) return 0; const SFileListEntry &entry = Files[index]; return createLimitReadFile( entry.FullName, File, entry.Offset, entry.Size ); } void CNPKReader::readString(core::stringc& name) { short stringSize; char buf[256]; File->read(&stringSize, 2); #ifdef __BIG_ENDIAN__ stringSize = os::Byteswap::byteswap(stringSize); #endif name.reserve(stringSize); while(stringSize) { const short next = core::min_(stringSize, (short)255); File->read(buf,next); buf[next]=0; name.append(buf); stringSize -= next; } } } // end namespace io } // end namespace irr #endif // __IRR_COMPILE_WITH_NPK_ARCHIVE_LOADER_
2,361
17,810
<reponame>Weissle/modern-cpp-tutorial<gh_stars>1000+ // // 4.1.linear.container.cpp // modern c++ tutorial // // created by changkun at changkun.de // https://github.com/changkun/modern-cpp-tutorial // #include <iostream> #include <array> #include <vector> void foo(int *p, int len) { for (int i = 0; i != len; ++i) { std::cout << p[i] << std::endl; } } int main() { std::vector<int> v; std::cout << "size:" << v.size() << std::endl; // output 0 std::cout << "capacity:" << v.capacity() << std::endl; // output 0 // As you can see, the storage of std::vector is automatically managed and // automatically expanded as needed. // But if there is not enough space, you need to redistribute more memory, // and reallocating memory is usually a performance-intensive operation. v.push_back(1); v.push_back(2); v.push_back(3); std::cout << "size:" << v.size() << std::endl; // output 3 std::cout << "capacity:" << v.capacity() << std::endl; // output 4 // The auto-expansion logic here is very similar to Golang's slice. v.push_back(4); v.push_back(5); std::cout << "size:" << v.size() << std::endl; // output 5 std::cout << "capacity:" << v.capacity() << std::endl; // output 8 // As can be seen below, although the container empties the element, // the memory of the emptied element is not returned. v.clear(); std::cout << "size:" << v.size() << std::endl; // output 0 std::cout << "capacity:" << v.capacity() << std::endl; // output 8 // Additional memory can be returned to the system via the shrink_to_fit() call v.shrink_to_fit(); std::cout << "size:" << v.size() << std::endl; // output 0 std::cout << "capacity:" << v.capacity() << std::endl; // output 0 std::array<int, 4> arr= {1,4,3,2}; //int len = 4; //std::array<int, len> arr = {1,2,3,4}; // illegal, size of array must be constexpr // C style parameter passing // foo(arr, arr.size()); // illegal, cannot convert implicitly foo(&arr[0], arr.size()); foo(arr.data(), arr.size()); // more usage std::sort(arr.begin(), arr.end()); for(auto &i : arr) std::cout << i << std::endl; return 0; }
963
363
<filename>plecost_lib/libs/updaters/plugins.py #!/usr/bin/python # -*- coding: utf-8 -*- # # Plecost: Wordpress vulnerabilities finder # # @url: http://iniqua.com/labs/ # @url: https://github.com/iniqua/plecost # # @author:<NAME> aka ffranz (http://iniqua.com/) # @author:<NAME> aka cr0hn (http://www.cr0hn.com/me/) # # Copyright (c) 2015, Iniqua Team # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # import csv import lxml.html from urllib.error import URLError from urllib.request import urlopen from os.path import join from ..utils import colorize, get_data_folder, update_progress # -------------------------------------------------------------------------- def update_plugins(log): """ Update data information. :param log: Log function as format: function(Message, Level) :type log: function(str, int) """ # -------------------------------------------------------------------------- # Config and vars # -------------------------------------------------------------------------- wp_plugins_url = "http://wordpress.org/plugins/browse/popular/page/%s/" max_plugins = 1400 file_out = join(get_data_folder(), "plugin_list_huge.txt") log("[*] Preparing for update plugins...\n") with open(file_out, "w") as out: already_processed = [] already_processed_append = already_processed.append csv_file = csv.writer(out) total_plugins = 1 searching = True # Looking for 85 * 14 (per page) = 1190 plugins for i in update_progress(range(1, 85), prefix_text="[*] Downloading plugins (slow): "): if searching is False: break # 6 tries for each request for x in range(1, 6): try: url = wp_plugins_url % i wpage = urlopen(url).read() break except URLError as e: log("[%s] %s" % ( colorize("!", "red"), colorize("Error while getting URL: %s. Attempt %s.\n" % (url, x)) )) # sleep(random()) # Maximum attempt reached if x == 6: log("[%s] %s" % ( colorize("!!", "red"), colorize("Maximum time exceeded"), ), 0) return else: continue # Parse parsed_main = lxml.html.fromstring(wpage) for section in parsed_main.xpath('//main/article'): plugin_info = section.xpath(".//h2/a")[0] plugin_url = plugin_info.attrib.get("href") plugin_name = plugin_info.text if not plugin_name: plugin_name = plugin_url # # Get plugins details # plugin_page = urlopen(plugin_url).read() plugin_parsed = lxml.html.fromstring(plugin_page) plugin_version = plugin_parsed.xpath("//div[contains(@class, 'plugin-meta')]/ul/li/strong") if plugin_version: plugin_version = plugin_version[0].text else: plugin_version = None # -------------------------------------------------------------------------- # We have all information to continue? # -------------------------------------------------------------------------- if plugin_url is None or plugin_version is None: log("[%s] Not enough information to store plugin for:\n%s\n" % ( colorize("ii", "red"), plugin_info ), 2) continue # Report status log(" |-- %s - Processing plugin: %s\n" % ( colorize(total_plugins), plugin_url ), log_level=1) # Write to file plugin_url_store = plugin_url.replace("https://wordpress.org/plugins/", "")[0:-1] try: csv_file.writerow([plugin_url_store, plugin_name, plugin_version]) except UnicodeEncodeError: csv_file.writerow([plugin_url_store, plugin_url_store, plugin_version]) # Save plugin already_processed_append(plugin_url) # Maximum number of plugins reached? total_plugins += 1 if total_plugins >= max_plugins: searching = False break # Creates split files with open(file_out, "r") as all_plugins, \ open(join(get_data_folder(), "plugin_list_10.txt"), 'w') as f_10, \ open(join(get_data_folder(), "plugin_list_50.txt"), 'w') as f_50, \ open(join(get_data_folder(), "plugin_list_100.txt"), 'w') as f_100, \ open(join(get_data_folder(), "plugin_list_250.txt"), 'w') as f_250, \ open(join(get_data_folder(), "plugin_list_1000.txt"), 'w') as f_1000: for i, line in enumerate(all_plugins.readlines(), start=1): _line = line if i < 11: f_10.write(_line) if i < 50: f_50.write(_line) if i < 100: f_100.write(_line) if i < 250: f_250.write(_line) if i < 1000: f_1000.write(_line) log("\n[*] Oks!\n")
3,485
1,279
<filename>3rdparty/nvtt/nvcore/defsgnucwin32.h #ifndef NV_CORE_H #error "Do not include this file directly." #endif //#include <cstddef> // size_t, NULL // Function linkage #define DLL_IMPORT __declspec(dllimport) #define DLL_EXPORT __declspec(dllexport) #define DLL_EXPORT_CLASS DLL_EXPORT // Function calling modes #if NV_CPU_X86 # define NV_CDECL __attribute__((cdecl)) # define NV_STDCALL __attribute__((stdcall)) #else # define NV_CDECL # define NV_STDCALL #endif #define NV_FASTCALL __attribute__((fastcall)) #define NV_FORCEINLINE inline #define NV_DEPRECATED __attribute__((deprecated)) #if __GNUC__ > 2 #define NV_PURE __attribute__((pure)) #define NV_CONST __attribute__((const)) #else #define NV_PURE #define NV_CONST #endif #define NV_NOINLINE __attribute__((noinline)) // Define __FUNC__ properly. #if defined(__STDC_VERSION__) && __STDC_VERSION__ < 199901L # if __GNUC__ >= 2 # define __FUNC__ __PRETTY_FUNCTION__ // __FUNCTION__ # else # define __FUNC__ "<unknown>" # endif #else # define __FUNC__ __PRETTY_FUNCTION__ #endif #define restrict __restrict__ /* // Type definitions typedef unsigned char uint8; typedef signed char int8; typedef unsigned short uint16; typedef signed short int16; typedef unsigned int uint32; typedef signed int int32; typedef unsigned long long uint64; typedef signed long long int64; // Aliases typedef uint32 uint; */
555
1,056
<reponame>arusinha/incubator-netbeans<filename>ide/localhistory/test/qa-functional/src/org/netbeans/test/localhistory/operators/OutlineViewOperator.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.test.localhistory.operators; import java.awt.Component; import java.awt.Point; import java.awt.Rectangle; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import javax.swing.JTable; import javax.swing.JTree; import javax.swing.tree.TreePath; import org.netbeans.jemmy.ComponentChooser; import org.netbeans.jemmy.ComponentSearcher; import org.netbeans.jemmy.EventTool; import org.netbeans.jemmy.Timeout; import org.netbeans.jemmy.drivers.DriverManager; import org.netbeans.jemmy.drivers.MouseDriver; import org.netbeans.jemmy.drivers.SupportiveDriver; import org.netbeans.jemmy.operators.ComponentOperator; import org.netbeans.jemmy.operators.ContainerOperator; import org.netbeans.jemmy.operators.JPopupMenuOperator; import org.netbeans.jemmy.operators.JScrollPaneOperator; import org.netbeans.jemmy.operators.JTableOperator; import org.netbeans.jemmy.operators.JTreeOperator; import org.netbeans.jemmy.util.EmptyVisualizer; import org.openide.explorer.view.Visualizer; /** * * @author peter */ /** * Handle IDE's org.openide.explorer.view.TreeTable component * which is used instead of JTree in Options dialog, ... */ public class OutlineViewOperator extends JTableOperator { private JTreeOperator _tree; /** Creates new instance. * @param view JTable representing requested TreeTable */ public OutlineViewOperator(JTable view) { super(view); } /** Creates new instance for the first TreeTable in container. * @param contOper container where to find TreeTable */ public OutlineViewOperator(ContainerOperator contOper) { this(contOper, 0); } /** Creates new instance for the first TreeTable in container. * @param contOper container where to find TreeTable * @param index int index */ public OutlineViewOperator(ContainerOperator contOper, int index) { super(contOper); // this((JTable) // waitComponent(contOper, // new OutlineViewFinder(ComponentSearcher. // getTrueChooser("Any OutlineView")), // index)); // copyEnvironment(contOper); waitComponent(contOper, new OutlineViewFinder(ComponentSearcher.getTrueChooser("Any OutlineView")), index); } /** Returns operator for a tree which is showed as a part of * the table. * @return JTreeOperator instance */ public JTreeOperator tree() { if(_tree == null) { // Need to wait until TreeTable is populated. Otherwise it can throw // NPE from getValueAt(0, 0). waitState(new ComponentChooser() { public boolean checkComponent(Component comp) { return getColumnCount() > 0 && getRowCount() > 0; } public String getDescription() { return "OutlineView contains any rows."; } }); // cell renderer component for first column is JTree Object value = getValueAt(0, 0); JTree jTree = (JTree)getCellRenderer(0, 0).getTableCellRendererComponent((JTable)this.getSource(), value, false, false, 0, 0); // Need to set EmptyVisualizer because found JTree doesn't have any parent Container // and calling makeComponentVisible() throws NPE // _tree = new JTreeOperator(jTree); _tree = new RenderedTreeOperator(this, jTree); _tree.setVisualizer(new EmptyVisualizer()); } // Everytime make parent container visible because tree has EmptyVisualizer // and it is need for example for popup menu operations on JTree makeComponentVisible(); return _tree; } /** Selects node in this TreeTable. * @param path path to node (e.g. "System|Settings") * @return row number of selected node */ public int selectNode(String path) { TreePath treePath = tree().findPath(path, "|"); if(!tree().isPathSelected(treePath)) { tree().selectPath(treePath); } int result = tree().getRowForPath(treePath); scrollToCell(result, 0); new EventTool().waitNoEvent(500); return result; } /** Registers RenderedMouseDriver to be used by RenderedTreeOperator. */ static { DriverManager.setDriver(DriverManager.MOUSE_DRIVER_ID, new RenderedMouseDriver(), RenderedTreeOperator.class); } /** Finder to search for "org.openide.explorer.view.TreeTable". */ static class OutlineViewFinder implements ComponentChooser { ComponentChooser subFinder; public OutlineViewFinder(ComponentChooser sf) { subFinder = sf; } public boolean checkComponent(Component comp) { Class cls = comp.getClass(); do { if(cls.getName().equals("org.openide.explorer.view.OutlineView")) { return(subFinder.checkComponent(comp)); } } while((cls = cls.getSuperclass()) != null); return(false); } public String getDescription() { return(subFinder.getDescription()); } } /** MouseDriver used to process events not on JTree but on TreeTable * component which is used to handle events in real. */ public static class RenderedMouseDriver extends SupportiveDriver implements MouseDriver { /** Creates new instance of RenderedMouseDriver. */ public RenderedMouseDriver() { super(new Class[] {RenderedTreeOperator.class}); } /** Presses mouse on operator given by {@link TreeTableOperator.RenderedTreeOperator#getRealOperator getRealOperator()} method. * @param oper operator which delegates events on an real operator * @param x x position of mouse operation * @param y y position of mouse operation * @param mouseButton mouse button identification * @param modifiers modifiers pressed during mouse click */ public void pressMouse(ComponentOperator oper, int x, int y, int mouseButton, int modifiers) { checkSupported(oper); ComponentOperator realOper = ((RenderedTreeOperator)oper).getRealOperator(); DriverManager.getMouseDriver(realOper).pressMouse(realOper, x, y, mouseButton, modifiers); } /** Releases mouse on operator given by {@link TreeTableOperator.RenderedTreeOperator#getRealOperator getRealOperator()} method. * @param oper operator which delegates events on an real operator * @param x x position of mouse operation * @param y y position of mouse operation * @param mouseButton mouse button identification * @param modifiers modifiers pressed during mouse click */ public void releaseMouse(ComponentOperator oper, int x, int y, int mouseButton, int modifiers) { checkSupported(oper); ComponentOperator realOper = ((RenderedTreeOperator)oper).getRealOperator(); DriverManager.getMouseDriver(realOper).releaseMouse(realOper, x, y, mouseButton, modifiers); } /** Clicks mouse on operator given by {@link TreeTableOperator.RenderedTreeOperator#getRealOperator getRealOperator()} method. * @param oper operator which delegates events on an real operator * @param x x position of mouse operation * @param y y position of mouse operation * @param clickCount how many times to be clicked * @param mouseButton mouse button identification * @param modifiers modifiers pressed during mouse click * @param mouseClick timeout of mouse click */ public void clickMouse(ComponentOperator oper, int x, int y, int clickCount, int mouseButton, int modifiers, Timeout mouseClick) { checkSupported(oper); ComponentOperator realOper = ((RenderedTreeOperator)oper).getRealOperator(); DriverManager.getMouseDriver(realOper).clickMouse(realOper, x, y, clickCount, mouseButton, modifiers, mouseClick); } /** Moves mouse on operator given by {@link TreeTableOperator.RenderedTreeOperator#getRealOperator getRealOperator()} method. * @param oper operator which delegates events on an real operator * @param x x position of mouse operation * @param y y position of mouse operation */ public void moveMouse(ComponentOperator oper, int x, int y) { checkSupported(oper); ComponentOperator realOper = ((RenderedTreeOperator)oper).getRealOperator(); DriverManager.getMouseDriver(realOper).moveMouse(realOper, x, y); } /** Drags mouse on operator given by {@link TreeTableOperator.RenderedTreeOperator#getRealOperator getRealOperator()} method. * @param oper operator which delegates events on an real operator * @param x x position of mouse operation * @param y y position of mouse operation * @param mouseButton mouse button identification * @param modifiers modifiers pressed during mouse click */ public void dragMouse(ComponentOperator oper, int x, int y, int mouseButton, int modifiers) { checkSupported(oper); ComponentOperator realOper = ((RenderedTreeOperator)oper).getRealOperator(); DriverManager.getMouseDriver(realOper).dragMouse(realOper, x, y, mouseButton, modifiers); } /** Provides drag and drop operation on operator given by * {@link TreeTableOperator.RenderedTreeOperator#getRealOperator getRealOperator()} method. * @param oper operator which delegates events on an real operator * @param start_x x position where to drag * @param start_y y position where to drag * @param end_x x position where to drop * @param end_y y position where to drop * @param mouseButton mouse button identification * @param modifiers modifiers pressed during mouse click * @param before timeout before drag * @param after timeout after drop */ public void dragNDrop(ComponentOperator oper, int start_x, int start_y, int end_x, int end_y, int mouseButton, int modifiers, Timeout before, Timeout after) { checkSupported(oper); ComponentOperator realOper = ((RenderedTreeOperator)oper).getRealOperator(); DriverManager.getMouseDriver(realOper).dragNDrop(realOper, start_x, start_y, end_x, end_y, mouseButton, modifiers, before, after); } /** Enters mouse on operator given by {@link TreeTableOperator.RenderedTreeOperator#getRealOperator getRealOperator()} method. * @param oper operator which delegates events on an real operator */ public void enterMouse(ComponentOperator oper){ checkSupported(oper); ComponentOperator realOper = ((RenderedTreeOperator)oper).getRealOperator(); DriverManager.getMouseDriver(realOper).enterMouse(realOper); } /** Exits mouse on operator given by {@link TreeTableOperator.RenderedTreeOperator#getRealOperator getRealOperator()} method. * @param oper operator which delegates events on an real operator */ public void exitMouse(ComponentOperator oper) { checkSupported(oper); ComponentOperator realOper = ((RenderedTreeOperator)oper).getRealOperator(); DriverManager.getMouseDriver(realOper).exitMouse(realOper); } } /** Operator used to process events not on JTree but on TreeTable * component which is used to handle events in real. */ public static class RenderedTreeOperator extends JTreeOperator { OutlineViewOperator oper; /** Creates new instance of RenderedTreeOperator. * @param oper TreeTable operator to handle events * @param tree JTree representing nodes */ public RenderedTreeOperator(OutlineViewOperator oper, JTree tree) { super(tree); this.oper = oper; } /** Returns TreeTableOperator instance which is used to handle events. * @return TreeTableOperator instance */ public ComponentOperator getRealOperator() { return(oper); } /** Calls popup menu on specified tree paths. * @param paths an array of TreePath instances * @param mouseButton mouse button identification * @return JPopupMenu instance opened by this method */ public JPopupMenu callPopupOnPaths(TreePath[] paths, int mouseButton) { oper.makeComponentVisible(); for(int i = 0; i < paths.length; i++) { if(paths[i].getParentPath() != null) { expandPath(paths[i].getParentPath()); } } selectPaths(paths); scrollToPath(paths[paths.length - 1]); Point point = getPointToClick(paths[paths.length - 1]); return(JPopupMenuOperator.callPopup(oper.getSource(), (int)point.getX(), (int)point.getY(), mouseButton)); } /** * Scrolls to a path if the tree is on a JScrollPane component. * @param path TreePath where to scroll */ public void scrollToPath(TreePath path) { makeComponentVisible(); //try to find JScrollPane under. JScrollPane scroll = (JScrollPane)getContainer(new JScrollPaneOperator. JScrollPaneFinder(ComponentSearcher. getTrueChooser("JScrollPane"))); if(scroll == null) { return; } JScrollPaneOperator scroller = new JScrollPaneOperator(scroll); scroller.copyEnvironment(this); scroller.setVisualizer(new EmptyVisualizer()); Rectangle rect = getPathBounds(path); if(rect != null) { scroller.scrollToComponentRectangle(getRealOperator().getSource(), (int)rect.getX(), (int)rect.getY(), (int)rect.getWidth(), (int)rect.getHeight()); } else { throw(new NoSuchPathException(path)); } } /** Expands path and waits until all children are ready. This method * is used in JTreeOperator.findPathPrimitive, so we need it override here. * @param treePath tree path to be expanded */ public void expandPath(final TreePath treePath) { super.expandPath(treePath); Visualizer.findNode(treePath.getLastPathComponent()).getChildren().getNodes(true); } } /** Performs verification by accessing all sub-components */ public void verify() { tree(); } }
6,336
403
<reponame>kthoms/code<gh_stars>100-1000 /* * Camunda Platform REST API * OpenApi Spec for Camunda Platform REST API. * * The version of the OpenAPI document: 7.16.0 * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package com.camunda.consulting.openapi.client.model; import java.util.Objects; import java.util.Arrays; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonTypeName; import com.fasterxml.jackson.annotation.JsonValue; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; /** * ConditionQueryParameterDto */ @JsonPropertyOrder({ ConditionQueryParameterDto.JSON_PROPERTY_OPERATOR, ConditionQueryParameterDto.JSON_PROPERTY_VALUE }) @JsonTypeName("ConditionQueryParameterDto") @javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2021-11-19T11:53:20.948992+01:00[Europe/Berlin]") public class ConditionQueryParameterDto { /** * Comparison operator to be used. &#x60;notLike&#x60; is not supported by all endpoints. */ public enum OperatorEnum { EQ("eq"), NEQ("neq"), GT("gt"), GTEQ("gteq"), LT("lt"), LTEQ("lteq"), LIKE("like"), NOTLIKE("notLike"); private String value; OperatorEnum(String value) { this.value = value; } @JsonValue public String getValue() { return value; } @Override public String toString() { return String.valueOf(value); } @JsonCreator public static OperatorEnum fromValue(String value) { for (OperatorEnum b : OperatorEnum.values()) { if (b.value.equals(value)) { return b; } } return null; } } public static final String JSON_PROPERTY_OPERATOR = "operator"; private OperatorEnum operator; public static final String JSON_PROPERTY_VALUE = "value"; private Object value = null; public ConditionQueryParameterDto operator(OperatorEnum operator) { this.operator = operator; return this; } /** * Comparison operator to be used. &#x60;notLike&#x60; is not supported by all endpoints. * @return operator **/ @javax.annotation.Nullable @ApiModelProperty(value = "Comparison operator to be used. `notLike` is not supported by all endpoints.") @JsonProperty(JSON_PROPERTY_OPERATOR) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public OperatorEnum getOperator() { return operator; } public void setOperator(OperatorEnum operator) { this.operator = operator; } public ConditionQueryParameterDto value(Object value) { this.value = value; return this; } /** * Can be any value - string, number, boolean, array or object. **Note**: Not every endpoint supports every type. * @return value **/ @javax.annotation.Nullable @ApiModelProperty(value = "Can be any value - string, number, boolean, array or object. **Note**: Not every endpoint supports every type.") @JsonProperty(JSON_PROPERTY_VALUE) @JsonInclude(value = JsonInclude.Include.USE_DEFAULTS) public Object getValue() { return value; } public void setValue(Object value) { this.value = value; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ConditionQueryParameterDto conditionQueryParameterDto = (ConditionQueryParameterDto) o; return Objects.equals(this.operator, conditionQueryParameterDto.operator) && Objects.equals(this.value, conditionQueryParameterDto.value); } @Override public int hashCode() { return Objects.hash(operator, value); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class ConditionQueryParameterDto {\n"); sb.append(" operator: ").append(toIndentedString(operator)).append("\n"); sb.append(" value: ").append(toIndentedString(value)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
1,694
344
<reponame>cclauss/archai<filename>archai/algos/didarts/didarts_exp_runner.py # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. from typing import Type from overrides import overrides from archai.nas.exp_runner import ExperimentRunner from archai.nas.arch_trainer import TArchTrainer from archai.algos.darts.darts_model_desc_builder import DartsModelDescBuilder from archai.algos.didarts.didarts_arch_trainer import DidartsArchTrainer class DiDartsExperimentRunner(ExperimentRunner): @overrides def model_desc_builder(self)->DartsModelDescBuilder: return DartsModelDescBuilder() @overrides def trainer_class(self)->TArchTrainer: return DidartsArchTrainer
263
345
<reponame>clayne/drltrace<gh_stars>100-1000 import os import sys content = open(sys.argv[1], 'r').readlines() names = dict() for line in content: if "|" not in line or "#" in line: continue line = line.split("|") name = line[1] try: names[name] += 1 except: #print name names[name] = 1 result = sorted( ((v,k) for k,v in names.iteritems()), reverse=True) for count, element in result: if count > 1: print "\"%s\":\"%s\", " % (count, element)
223
777
<reponame>rhencke/engine<gh_stars>100-1000 // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef IOS_CHROME_BROWSER_METRICS_TAB_USAGE_RECORDER_DELEGATE_H_ #define IOS_CHROME_BROWSER_METRICS_TAB_USAGE_RECORDER_DELEGATE_H_ #import <Foundation/Foundation.h> // A delegate which provides to the TabUsageRecorder a count of how many alive // tabs it is monitoring. @protocol TabUsageRecorderDelegate // A count of how many alive tabs the TabUsageRecorder is monitoring. // NOTE: This should be used for metrics-gathering only; for any other purpose // callers should not know or care how many tabs are alive. - (NSUInteger)liveTabsCount; @end #endif // IOS_CHROME_BROWSER_METRICS_TAB_USAGE_RECORDER_DELEGATE_H_
286
2,288
<reponame>michaelfolkson/lnhw #include "config.h" #include <assert.h> #include <ccan/mem/mem.h> int main(void) { const char *haystack = "abcd\0efgh"; char *p; #ifdef FAIL #if !HAVE_TYPEOF #error "Can't fail without typeof" #else /* Should catch const discard errors. */ p = memcheck(haystack, sizeof(haystack)); #endif #else p = memcheck((char *)haystack, sizeof(haystack)); #endif return p == haystack ? 0 : 1; }
176
678
//https://github.com/Tyilo/insert_dylib/blob/master/insert_dylib/main.c #include "insert_dylib.h" #include <stdlib.h> #include <stdio.h> #include <stdbool.h> #include <stdarg.h> #include <string.h> #include <unistd.h> #include <getopt.h> #include <sys/stat.h> #include <sys/param.h> #include <copyfile.h> #include <mach-o/loader.h> #include <mach-o/fat.h> #define IS_64_BIT(x) ((x) == MH_MAGIC_64 || (x) == MH_CIGAM_64) #define IS_LITTLE_ENDIAN(x) ((x) == FAT_CIGAM || (x) == MH_CIGAM_64 || (x) == MH_CIGAM) #define SWAP32(x, magic) (IS_LITTLE_ENDIAN(magic)? OSSwapInt32(x): (x)) #define SWAP64(x, magic) (IS_LITTLE_ENDIAN(magic)? OSSwapInt64(x): (x)) #define ROUND_UP(x, y) (((x) + (y) - 1) & -(y)) #define ABSDIFF(x, y) ((x) > (y)? (uintmax_t)(x) - (uintmax_t)(y): (uintmax_t)(y) - (uintmax_t)(x)) #define BUFSIZE 512 void fbzero(FILE *f, off_t offset, size_t len) { static unsigned char zeros[BUFSIZE] = {0}; fseeko(f, offset, SEEK_SET); while(len != 0) { size_t size = MIN(len, sizeof(zeros)); fwrite(zeros, size, 1, f); len -= size; } } void fmemmove(FILE *f, off_t dst, off_t src, size_t len) { static unsigned char buf[BUFSIZE]; while(len != 0) { size_t size = MIN(len, sizeof(buf)); fseeko(f, src, SEEK_SET); fread(&buf, size, 1, f); fseeko(f, dst, SEEK_SET); fwrite(buf, size, 1, f); len -= size; src += size; dst += size; } } int inplace_flag = false; int weak_flag = false; int overwrite_flag = false; int codesig_flag = 0; int yes_flag = false; static struct option long_options[] = { {"inplace", no_argument, &inplace_flag, true}, {"weak", no_argument, &weak_flag, true}, {"overwrite", no_argument, &overwrite_flag, true}, {"strip-codesig", no_argument, &codesig_flag, 1}, {"no-strip-codesig", no_argument, &codesig_flag, 2}, {"all-yes", no_argument, &yes_flag, true}, {NULL, 0, NULL, 0} }; __attribute__((noreturn)) void usage(void) { printf("Usage: insert_dylib dylib_path binary_path [new_binary_path]\n"); printf("Option flags:"); struct option *opt = long_options; while(opt->name != NULL) { printf(" --%s", opt->name); opt++; } printf("\n"); exit(1); } __attribute__((format(printf, 1, 2))) bool ask(const char *format, ...) { char *question; asprintf(&question, "%s [y/n] ", format); va_list args; va_start(args, format); vprintf(question, args); va_end(args); free(question); while(true) { char *line = NULL; size_t size; if(yes_flag) { puts("y"); line = "y"; } else { getline(&line, &size, stdin); } switch(line[0]) { case 'y': case 'Y': return true; break; case 'n': case 'N': return false; break; default: printf("Please enter y or n: "); } } } size_t fpeek(void *restrict ptr, size_t size, size_t nitems, FILE *restrict stream) { off_t pos = ftello(stream); size_t result = fread(ptr, size, nitems, stream); fseeko(stream, pos, SEEK_SET); return result; } void *read_load_command(FILE *f, uint32_t cmdsize) { void *lc = malloc(cmdsize); fpeek(lc, cmdsize, 1, f); return lc; } bool check_load_commands(FILE *f, struct mach_header *mh, size_t header_offset, size_t commands_offset, const char *dylib_path, off_t *slice_size) { fseeko(f, commands_offset, SEEK_SET); uint32_t ncmds = SWAP32(mh->ncmds, mh->magic); off_t linkedit_32_pos = -1; off_t linkedit_64_pos = -1; struct segment_command linkedit_32; struct segment_command_64 linkedit_64; off_t symtab_pos = -1; uint32_t symtab_size = 0; for(int i = 0; i < ncmds; i++) { struct load_command lc; fpeek(&lc, sizeof(lc), 1, f); uint32_t cmdsize = SWAP32(lc.cmdsize, mh->magic); uint32_t cmd = SWAP32(lc.cmd, mh->magic); switch(cmd) { case LC_CODE_SIGNATURE: if(i == ncmds - 1) { if(codesig_flag == 2) { return true; } if(codesig_flag == 0 && !ask("LC_CODE_SIGNATURE load command found. Remove it?")) { return true; } struct linkedit_data_command *cmd = read_load_command(f, cmdsize); fbzero(f, ftello(f), cmdsize); uint32_t dataoff = SWAP32(cmd->dataoff, mh->magic); uint32_t datasize = SWAP32(cmd->datasize, mh->magic); free(cmd); uint64_t linkedit_fileoff = 0; uint64_t linkedit_filesize = 0; if(linkedit_32_pos != -1) { linkedit_fileoff = SWAP32(linkedit_32.fileoff, mh->magic); linkedit_filesize = SWAP32(linkedit_32.filesize, mh->magic); } else if(linkedit_64_pos != -1) { linkedit_fileoff = SWAP64(linkedit_64.fileoff, mh->magic); linkedit_filesize = SWAP64(linkedit_64.filesize, mh->magic); } else { fprintf(stderr, "Warning: __LINKEDIT segment not found.\n"); } if(linkedit_32_pos != -1 || linkedit_64_pos != -1) { if(linkedit_fileoff + linkedit_filesize != *slice_size) { fprintf(stderr, "Warning: __LINKEDIT segment is not at the end of the file, so codesign will not work on the patched binary.\n"); } else { if(dataoff + datasize != *slice_size) { fprintf(stderr, "Warning: Codesignature is not at the end of __LINKEDIT segment, so codesign will not work on the patched binary.\n"); } else { *slice_size -= datasize; //int64_t diff_size = 0; if(symtab_pos == -1) { fprintf(stderr, "Warning: LC_SYMTAB load command not found. codesign might not work on the patched binary.\n"); } else { fseeko(f, symtab_pos, SEEK_SET); struct symtab_command *symtab = read_load_command(f, symtab_size); uint32_t strsize = SWAP32(symtab->strsize, mh->magic); int64_t diff_size = SWAP32(symtab->stroff, mh->magic) + strsize - (int64_t)*slice_size; if(-0x10 <= diff_size && diff_size <= 0) { symtab->strsize = SWAP32((uint32_t)(strsize - diff_size), mh->magic); fwrite(symtab, symtab_size, 1, f); } else { fprintf(stderr, "Warning: String table doesn't appear right before code signature. codesign might not work on the patched binary. (0x%llx)\n", diff_size); } free(symtab); } linkedit_filesize -= datasize; uint64_t linkedit_vmsize = ROUND_UP(linkedit_filesize, 0x1000); if(linkedit_32_pos != -1) { linkedit_32.filesize = SWAP32((uint32_t)linkedit_filesize, mh->magic); linkedit_32.vmsize = SWAP32((uint32_t)linkedit_vmsize, mh->magic); fseeko(f, linkedit_32_pos, SEEK_SET); fwrite(&linkedit_32, sizeof(linkedit_32), 1, f); } else { linkedit_64.filesize = SWAP64(linkedit_filesize, mh->magic); linkedit_64.vmsize = SWAP64(linkedit_vmsize, mh->magic); fseeko(f, linkedit_64_pos, SEEK_SET); fwrite(&linkedit_64, sizeof(linkedit_64), 1, f); } goto fix_header; } } } // If we haven't truncated the file, zero out the code signature fbzero(f, header_offset + dataoff, datasize); fix_header: mh->ncmds = SWAP32(ncmds - 1, mh->magic); mh->sizeofcmds = SWAP32(SWAP32(mh->sizeofcmds, mh->magic) - cmdsize, mh->magic); return true; } else { printf("LC_CODE_SIGNATURE is not the last load command, so couldn't remove.\n"); } break; case LC_LOAD_DYLIB: case LC_LOAD_WEAK_DYLIB: { struct dylib_command *dylib_command = read_load_command(f, cmdsize); union lc_str offset = dylib_command->dylib.name; char *name = &((char *)dylib_command)[SWAP32(offset.offset, mh->magic)]; int cmp = strcmp(name, dylib_path); free(dylib_command); if(cmp == 0) { if(!ask("Binary already contains a load command for that dylib. Continue anyway?")) { return false; } } break; } case LC_SEGMENT: case LC_SEGMENT_64: if(cmd == LC_SEGMENT) { struct segment_command *cmd = read_load_command(f, cmdsize); if(strcmp(cmd->segname, "__LINKEDIT") == 0) { linkedit_32_pos = ftello(f); linkedit_32 = *cmd; } free(cmd); } else { struct segment_command_64 *cmd = read_load_command(f, cmdsize); if(strcmp(cmd->segname, "__LINKEDIT") == 0) { linkedit_64_pos = ftello(f); linkedit_64 = *cmd; } free(cmd); } case LC_SYMTAB: symtab_pos = ftello(f); symtab_size = cmdsize; } fseeko(f, SWAP32(lc.cmdsize, mh->magic), SEEK_CUR); } return true; } bool insert_dylib(FILE *f, size_t header_offset, const char *dylib_path, off_t *slice_size) { fseeko(f, header_offset, SEEK_SET); struct mach_header mh; fread(&mh, sizeof(struct mach_header), 1, f); if(mh.magic != MH_MAGIC_64 && mh.magic != MH_CIGAM_64 && mh.magic != MH_MAGIC && mh.magic != MH_CIGAM) { printf("Unknown magic: 0x%x\n", mh.magic); return false; } size_t commands_offset = header_offset + (IS_64_BIT(mh.magic)? sizeof(struct mach_header_64): sizeof(struct mach_header)); bool cont = check_load_commands(f, &mh, header_offset, commands_offset, dylib_path, slice_size); if(!cont) { return true; } // Even though a padding of 4 works for x86_64, codesign doesn't like it size_t path_padding = 8; size_t dylib_path_len = strlen(dylib_path); size_t dylib_path_size = (dylib_path_len & ~(path_padding - 1)) + path_padding; uint32_t cmdsize = (uint32_t)(sizeof(struct dylib_command) + dylib_path_size); struct dylib_command dylib_command = { .cmd = SWAP32(weak_flag? LC_LOAD_WEAK_DYLIB: LC_LOAD_DYLIB, mh.magic), .cmdsize = SWAP32(cmdsize, mh.magic), .dylib = { .name = SWAP32(sizeof(struct dylib_command), mh.magic), .timestamp = 0, .current_version = 0, .compatibility_version = 0 } }; uint32_t sizeofcmds = SWAP32(mh.sizeofcmds, mh.magic); fseeko(f, commands_offset + sizeofcmds, SEEK_SET); char space[cmdsize]; fread(&space, cmdsize, 1, f); bool empty = true; for(int i = 0; i < cmdsize; i++) { if(space[i] != 0) { empty = false; break; } } if(!empty) { if(!ask("It doesn't seem like there is enough empty space. Continue anyway?")) { return false; } } fseeko(f, -((off_t)cmdsize), SEEK_CUR); char *dylib_path_padded = calloc(dylib_path_size, 1); memcpy(dylib_path_padded, dylib_path, dylib_path_len); fwrite(&dylib_command, sizeof(dylib_command), 1, f); fwrite(dylib_path_padded, dylib_path_size, 1, f); free(dylib_path_padded); mh.ncmds = SWAP32(SWAP32(mh.ncmds, mh.magic) + 1, mh.magic); sizeofcmds += cmdsize; mh.sizeofcmds = SWAP32(sizeofcmds, mh.magic); fseeko(f, header_offset, SEEK_SET); fwrite(&mh, sizeof(mh), 1, f); return true; } int add_dylib(int argc, const char *argv[]) { while(true) { int option_index = 0; int c = getopt_long(argc, (char *const *)argv, "", long_options, &option_index); if(c == -1) { break; } switch(c) { case 0: break; case '?': usage(); break; default: abort(); break; } } argv = &argv[optind - 1]; argc -= optind - 1; if(argc < 3 || argc > 4) { usage(); } const char *lc_name = weak_flag? "LC_LOAD_WEAK_DYLIB": "LC_LOAD_DYLIB"; const char *dylib_path = argv[1]; const char *binary_path = argv[2]; struct stat s; if(stat(binary_path, &s) != 0) { perror(binary_path); return 1; } if(dylib_path[0] != '@' && stat(dylib_path, &s) != 0) { if(!ask("The provided dylib path doesn't exist. Continue anyway?")) { return 1; } } bool binary_path_was_malloced = false; if(!inplace_flag) { char *new_binary_path; if(argc == 4) { new_binary_path = (char *)argv[3]; } else { asprintf(&new_binary_path, "%s_patched", binary_path); binary_path_was_malloced = true; } if(copyfile(binary_path, new_binary_path, NULL, COPYFILE_DATA | COPYFILE_UNLINK)) { printf("Failed to create %s\n", new_binary_path); return 1; } binary_path = new_binary_path; } FILE *f = fopen(binary_path, "r+"); if(!f) { printf("Couldn't open file %s\n", binary_path); return 1; } bool success = true; fseeko(f, 0, SEEK_END); off_t file_size = ftello(f); rewind(f); uint32_t magic; fread(&magic, sizeof(uint32_t), 1, f); switch(magic) { case FAT_MAGIC: case FAT_CIGAM: { fseeko(f, 0, SEEK_SET); struct fat_header fh; fread(&fh, sizeof(fh), 1, f); uint32_t nfat_arch = SWAP32(fh.nfat_arch, magic); printf("Binary is a fat binary with %d archs.\n", nfat_arch); struct fat_arch archs[nfat_arch]; fread(archs, sizeof(archs), 1, f); int fails = 0; uint32_t offset = 0; if(nfat_arch > 0) { offset = SWAP32(archs[0].offset, magic); } for(int i = 0; i < nfat_arch; i++) { off_t orig_offset = SWAP32(archs[i].offset, magic); off_t orig_slice_size = SWAP32(archs[i].size, magic); offset = ROUND_UP(offset, 1 << SWAP32(archs[i].align, magic)); if(orig_offset != offset) { fmemmove(f, offset, orig_offset, orig_slice_size); fbzero(f, MIN(offset, orig_offset) + orig_slice_size, ABSDIFF(offset, orig_offset)); archs[i].offset = SWAP32(offset, magic); } off_t slice_size = orig_slice_size; bool r = insert_dylib(f, offset, dylib_path, &slice_size); if(!r) { printf("Failed to add %s to arch #%d!\n", lc_name, i + 1); fails++; } if(slice_size < orig_slice_size && i < nfat_arch - 1) { fbzero(f, offset + slice_size, orig_slice_size - slice_size); } file_size = offset + slice_size; offset += slice_size; archs[i].size = SWAP32((uint32_t)slice_size, magic); } rewind(f); fwrite(&fh, sizeof(fh), 1, f); fwrite(archs, sizeof(archs), 1, f); // We need to flush before truncating fflush(f); ftruncate(fileno(f), file_size); if(fails == 0) { printf("Added %s to all archs in %s\n", lc_name, binary_path); } else if(fails == nfat_arch) { printf("Failed to add %s to any archs.\n", lc_name); success = false; } else { printf("Added %s to %d/%d archs in %s\n", lc_name, nfat_arch - fails, nfat_arch, binary_path); } break; } case MH_MAGIC_64: case MH_CIGAM_64: case MH_MAGIC: case MH_CIGAM: if(insert_dylib(f, 0, dylib_path, &file_size)) { ftruncate(fileno(f), file_size); printf("Added %s to %s\n", lc_name, binary_path); } else { printf("Failed to add %s!\n", lc_name); success = false; } break; default: printf("Unknown magic: 0x%x\n", magic); return 1; } fclose(f); if(!success) { if(!inplace_flag) { unlink(binary_path); } return 1; } if(binary_path_was_malloced) { free((void *)binary_path); } return 0; }
11,222
1,590
<reponame>libc16/azure-rest-api-specs<filename>specification/apimanagement/control-plane/Microsoft.ApiManagement/preview/2017-03-01/examples/ApiManagementGetIdentityProviders.json { "parameters": { "apimBaseUrl": "apimservice1.management.azure-api.net", "identityProviderName": "google", "api-version": "2017-03-01", "Accept": "application/json" }, "responses": { "200": { "body": { "id": "/identityProviders/Google", "clientId": "FOLBTMWPUI", "clientSecret": "<clientSecret>", "type": "google" } } } }
251
190,993
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Tests for python.compiler.mlir.""" from tensorflow.python.compiler.mlir import mlir from tensorflow.python.eager import def_function from tensorflow.python.framework import dtypes from tensorflow.python.framework import errors from tensorflow.python.framework import tensor_spec from tensorflow.python.framework import test_util from tensorflow.python.ops import logging_ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import test from tensorflow.python.pywrap_mlir import import_graphdef class MLIRGraphDefImportTest(test.TestCase): def testImport(self): """Tests the basic flow of `tf.mlir.experimental.convert_graph_def`.""" mlir_module = mlir.convert_graph_def('') # An empty graph should contain at least an empty main function. self.assertIn('func @main', mlir_module) def testInvalidPbtxt(self): with self.assertRaisesRegex(errors.InvalidArgumentError, 'Could not parse input proto'): mlir.convert_graph_def('some invalid proto') def testGraphDefToTf(self): """Tests the basic flow of `tf.mlir.experimental.convert_graph_def` with tf-standard-pipeline converting all the way to the TF dialect. """ tensor_shape = (10, 10) @def_function.function( input_signature=( tensor_spec.TensorSpec(shape=tensor_shape, dtype=dtypes.float32), tensor_spec.TensorSpec(shape=tensor_shape, dtype=dtypes.float32), )) def add_func(lhs, rhs): return math_ops.add(lhs, rhs) tf_graph_def = add_func.get_concrete_function().graph.as_graph_def() mlir_tf = import_graphdef( tf_graph_def, "tf-standard-pipeline", False, input_names=["lhs", "rhs"], input_data_types=["DT_FLOAT", "DT_FLOAT"], input_data_shapes=["10,10", "10,10"], output_names=["Add"]) # Check whether the mlir-function signature has the mentioned # inputs and outputs. self.assertRegex( mlir_tf, r"func @main\(%arg0: tensor<10x10xf32>, %arg1: tensor<10x10xf32>") self.assertRegex(mlir_tf, r'inputs = "lhs,rhs"') self.assertRegex(mlir_tf, r'outputs = "Add"') # Same check with scalar input (empty input shape). mlir_tf = import_graphdef( tf_graph_def, "tf-standard-pipeline", False, input_names=["lhs", "rhs"], input_data_types=["DT_FLOAT", "DT_FLOAT"], input_data_shapes=["", ""], output_names=["Add"]) self.assertRegex(mlir_tf, r"func @main\(%arg0: tensor<f32>, %arg1: tensor<f32>") # Test invalid test cases where no. of input names is invalid/wrong. with self.assertRaisesRegex( errors.InvalidArgumentError, "Length of input node array and data type doesn't match"): import_graphdef( tf_graph_def, "tf-standard-pipeline", False, input_names=["lhs"], input_data_types=["DT_FLOAT", "DT_FLOAT"], input_data_shapes=["10,10", "10,10"], output_names=["Add"]) # Test invalid test cases where the input shapes argument is wrong. with self.assertRaisesRegex(errors.InvalidArgumentError, "Dimensions must be equal"): import_graphdef( tf_graph_def, "tf-standard-pipeline", False, input_names=["lhs", "rhs"], input_data_types=["DT_FLOAT", "DT_FLOAT"], input_data_shapes=["10,11", "10,10"], output_names=["Add"]) class MLIRConcreteFunctionImportTest(test.TestCase): @test_util.run_v2_only def testImport(self): @def_function.function def sqr(i): return i * i concrete_function = sqr.get_concrete_function( tensor_spec.TensorSpec(None, dtypes.float32)) mlir_module = mlir.convert_function(concrete_function, show_debug_info=True) self.assertRegex(mlir_module, r'func @.*sqr.*\(') self.assertRegex(mlir_module, r'callsite\(".*mlir_test.py":') @test_util.run_v2_only def testImportWithCall(self): @def_function.function def callee(i): return i @def_function.function def caller(i): return callee(i) concrete_function = caller.get_concrete_function( tensor_spec.TensorSpec(None, dtypes.float32)) mlir_module = mlir.convert_function(concrete_function) self.assertRegex(mlir_module, r'func @.*caller.*\(') self.assertRegex(mlir_module, r'func private @.*callee.*\(') @test_util.run_v2_only def testImportWithControlRet(self): @def_function.function def logging(): logging_ops.print_v2('some message') concrete_function = logging.get_concrete_function() mlir_module = mlir.convert_function(concrete_function, pass_pipeline='') self.assertRegex(mlir_module, r'tf\.PrintV2') self.assertRegex(mlir_module, r'tf_executor.fetch.*: !tf_executor.control') if __name__ == '__main__': test.main()
2,298
412
<reponame>tobireinhard/cbmc<gh_stars>100-1000 public class Main { public void constantCharAt() { StringBuilder sb = new StringBuilder("abc"); char c = sb.charAt(1); assert c == 'b'; } }
82
2,151
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_DOWNLOAD_NOTIFICATION_DOWNLOAD_NOTIFICATION_MANAGER_H_ #define CHROME_BROWSER_DOWNLOAD_NOTIFICATION_DOWNLOAD_NOTIFICATION_MANAGER_H_ #include <memory> #include <set> #include "chrome/browser/download/download_ui_controller.h" #include "chrome/browser/download/notification/download_item_notification.h" #include "chrome/browser/profiles/profile.h" #include "components/download/public/common/download_item.h" class DownloadNotificationManagerForProfile; class DownloadNotificationManager : public DownloadUIController::Delegate { public: explicit DownloadNotificationManager(Profile* profile); ~DownloadNotificationManager() override; void OnAllDownloadsRemoving(Profile* profile); // DownloadUIController::Delegate: void OnNewDownloadReady(download::DownloadItem* item) override; DownloadNotificationManagerForProfile* GetForProfile(Profile* profile) const; private: friend class test::DownloadItemNotificationTest; Profile* main_profile_ = nullptr; std::map<Profile*, std::unique_ptr<DownloadNotificationManagerForProfile>> manager_for_profile_; }; class DownloadNotificationManagerForProfile : public download::DownloadItem::Observer { public: DownloadNotificationManagerForProfile( Profile* profile, DownloadNotificationManager* parent_manager); ~DownloadNotificationManagerForProfile() override; // DownloadItem::Observer overrides: void OnDownloadUpdated(download::DownloadItem* download) override; void OnDownloadOpened(download::DownloadItem* download) override; void OnDownloadRemoved(download::DownloadItem* download) override; void OnDownloadDestroyed(download::DownloadItem* download) override; void OnNewDownloadReady(download::DownloadItem* item); DownloadItemNotification* GetNotificationItemByGuid(const std::string& guid); private: friend class test::DownloadItemNotificationTest; Profile* profile_ = nullptr; DownloadNotificationManager* parent_manager_; // weak std::set<download::DownloadItem*> downloading_items_; std::map<download::DownloadItem*, std::unique_ptr<DownloadItemNotification>> items_; }; #endif // CHROME_BROWSER_DOWNLOAD_NOTIFICATION_DOWNLOAD_NOTIFICATION_MANAGER_H_
673
1,139
import json from typing import Callable, List import networkx import pytest from beagle.backends.networkx import NetworkX from beagle.nodes import Process, File from io import BytesIO from scapy.all import Ether, PcapWriter, Packet, IP, UDP, TCP, DNS, DNSQR, DNSRR from scapy.layers.http import HTTPRequest, HTTP from beagle.datasources.pcap import PCAP def packets_to_datasource_events(packets: List[Packet]) -> PCAP: f = BytesIO() PcapWriter(f).write(packets) f.seek(0) return PCAP(f) # type: ignore @pytest.fixture() def nx() -> Callable[..., NetworkX]: def _backend(*args, **kwargs) -> networkx.Graph: return NetworkX(*args, consolidate_edges=True, **kwargs).graph() # type: ignore return _backend def test_one_node(nx): node = Process("1", "1", "1", "1", "1", "1") G = nx(nodes=[node]) assert len(G.nodes()) == 1 def test_one_edge(nx): proc = Process(process_id=10, process_image="test.exe", command_line="test.exe /c foobar") other_proc = Process(process_id=12, process_image="best.exe", command_line="best.exe /c 123456") proc.launched[other_proc].append(timestamp=1) G = nx(nodes=[proc, other_proc]) assert len(G.nodes()) == 2 assert len(G.edges()) == 1 u = hash(proc) v = hash(other_proc) assert networkx.has_path(G, u, v) assert "Launched" in G[u][v] assert {"timestamp": 1} == G[u][v]["Launched"]["data"][0] def test_node_updated(nx): """After pushing in the first process, the second process which has the same hash should cause the command line attribute to update""" proc = Process(process_id=10, process_image="test.exe", command_line=None) next_proc = Process(process_id=10, process_image="test.exe", command_line="best.exe /c 123456") G = nx(nodes=[proc, next_proc]) in_graph_proc = G.nodes(data=True)[hash(proc)]["data"] assert in_graph_proc.command_line == "best.exe /c 123456" assert in_graph_proc.process_id == 10 assert in_graph_proc.process_image == "test.exe" # Should only have one node, since both nodes inserted are equal assert len(G.nodes()) == 1 def test_edge_has_no_name(nx): proc = Process(process_id=10, process_image="test.exe", command_line=None) other_proc = Process(process_id=12, process_image="best.exe", command_line="best.exe /c 123456") # append never called proc.launched[other_proc] # This shouldn't error. G = nx(nodes=[proc, other_proc]) len(G.nodes()) == 2 len(G.edges()) == 1 def test_empty_graph(nx): backend = NetworkX(nodes=[], consolidate_edges=True) backend.graph() assert backend.is_empty() def test_from_json_object(nx): proc = Process(process_id=10, process_image="test.exe", command_line=None) other_proc = Process(process_id=12, process_image="best.exe", command_line="best.exe /c 123456") proc.launched[other_proc] G = nx(nodes=[proc, other_proc]) _json_output = NetworkX.graph_to_json(G) assert isinstance(_json_output, dict) G2 = NetworkX.from_json(_json_output) # Graphs should be equal. assert networkx.is_isomorphic(G, G2) def test_from_json_path(nx, tmpdir): proc = Process(process_id=10, process_image="test.exe", command_line=None) other_proc = Process(process_id=12, process_image="best.exe", command_line="best.exe /c 123456") proc.launched[other_proc] G = nx(nodes=[proc, other_proc]) _json_output = NetworkX.graph_to_json(G) # Save to file p = tmpdir.mkdir("networkx").join("data.json") p.write(json.dumps(_json_output)) G2 = NetworkX.from_json(p) # Graphs should be equal. assert networkx.is_isomorphic(G, G2) def test_from_json_fails_on_invalid(nx, tmpdir): with pytest.raises(ValueError): NetworkX.from_json({}) with pytest.raises(ValueError): NetworkX.from_json({"nodes": []}) with pytest.raises(ValueError): NetworkX.from_json({"links": []}) def test_add_nodes_no_overlap(nx): proc = Process(process_id=10, process_image="test.exe", command_line="test.exe /c foobar") other_proc = Process(process_id=12, process_image="best.exe", command_line="best.exe /c 123456") proc.launched[other_proc].append(timestamp=1) backend = NetworkX(consolidate_edges=True, nodes=[proc, other_proc]) G = backend.graph() assert len(G.nodes()) == 2 assert len(G.edges()) == 1 # Add in a new pair of nodes. proc2 = Process(process_id=4, process_image="malware.exe", command_line="malware.exe /c foobar") f = File(file_name="foo", file_path="bar") proc2.wrote[f] G = backend.add_nodes([proc2, f]) # Graph grew assert len(G.nodes()) == 4 assert len(G.edges()) == 2 def test_add_node_overlaps_existing(nx): proc = Process(process_id=10, process_image="test.exe", command_line="test.exe /c foobar") other_proc = Process(process_id=12, process_image="best.exe", command_line="best.exe /c 123456") proc.launched[other_proc].append(timestamp=1) backend = NetworkX(consolidate_edges=True, nodes=[proc, other_proc]) G = backend.graph() assert len(G.nodes()) == 2 assert len(G.edges()) == 1 # Add a new node that *overlaps* an existing node (note - not the same node object.) proc2 = Process(process_id=10, process_image="test.exe", command_line="test.exe /c foobar") f = File(file_name="foo", file_path="bar") proc2.wrote[f] G = backend.add_nodes([proc2, f]) # Graph grew, but only 3 nodes. assert len(G.nodes()) == 3 assert len(G.edges()) == 2 # Process should have both write and launched edges. u = hash(proc2) v = hash(other_proc) v2 = hash(f) assert networkx.has_path(G, u, v) assert networkx.has_path(G, u, v2) assert "Launched" in G[u][v] assert "Wrote" in G[u][v2] def test_from_datasources(): packets_1 = [ Ether(src="ab:ab:ab:ab:ab:ab", dst="12:12:12:12:12:12") / IP(src="127.0.0.1", dst="192.168.1.1") / TCP(sport=12345, dport=80) / HTTP() / HTTPRequest(Method="GET", Path="/foo", Host="https://google.com") ] packets_2 = [ # HTTP Packet Ether(src="ab:ab:ab:ab:ab:ab", dst="12:12:12:12:12:12") / IP(src="127.0.0.1", dst="192.168.1.1") / TCP(sport=12345, dport=80) / HTTP() / HTTPRequest(Method="GET", Path="/foo", Host="https://google.com"), # DNS Packet Ether(src="ab:ab:ab:ab:ab:ab", dst="12:12:12:12:12:12") / IP(src="127.0.0.1", dst="192.168.1.1") / UDP(sport=80, dport=53) / DNS(rd=1, qd=DNSQR(qtype="A", qname="google.com"), an=DNSRR(rdata="192.168.3.11")), # TCP Packet Ether(src="ab:ab:ab:ab:ab:ab", dst="12:12:12:12:12:12") / IP(src="127.0.0.1", dst="192.168.1.1") / TCP(sport=80, dport=5355), ] nx = NetworkX.from_datasources( [packets_to_datasource_events(packets) for packets in [packets_1, packets_2]] ) # Make the graph nx.graph() assert not nx.is_empty()
2,961
584
<filename>AppleIntelWifiAdapter/mvm/IWLMvmPhy.hpp // // IWLMvmPhy.hpp // AppleIntelWifiAdapter // // Created by <NAME> on 2/19/20. // Copyright © 2020 IntelWifi for MacOS authors. All rights reserved. // #ifndef APPLEINTELWIFIADAPTER_MVM_IWLMVMPHY_HPP_ #define APPLEINTELWIFIADAPTER_MVM_IWLMVMPHY_HPP_ #include "IWLMvmDriver.hpp" int iwl_phy_ctxt_add(IWLMvmDriver *drv, struct iwl_phy_ctx *ctxt, struct apple80211_channel *chan, uint8_t chains_static, uint8_t chains_dynamic); int iwl_phy_ctxt_changed(IWLMvmDriver *drv, struct iwl_phy_ctx *ctxt, struct apple80211_channel *chan, uint8_t chains_static, uint8_t chains_dynamic); int iwl_phy_ctxt_apply(IWLMvmDriver *drv, struct iwl_phy_ctx *ctxt, uint8_t chains_static, uint8_t chains_dynamic, uint32_t action, uint32_t apply_time); void iwl_phy_ctxt_cmd_data(IWLMvmDriver *drv, struct iwl_phy_context_cmd *cmd, struct apple80211_channel *chan, uint8_t chains_static, uint8_t chains_dynamic); void iwl_phy_ctxt_cmd_hdr(IWLMvmDriver *drv, struct iwl_phy_ctx *ctxt, struct iwl_phy_context_cmd *cmd, uint32_t action, uint32_t apply_time); #endif // APPLEINTELWIFIADAPTER_MVM_IWLMVMPHY_HPP_
729
315
<reponame>herrphon/Algorithms_Example /** ** Java Program to Implement Ternary Search Algorithm **/ import java.util.Scanner; /** Class TernarySearch **/ public class TernarySearch { /** call function **/ public static int ternarySearch (int[] A, int value) { return ternarySearch(A, value, 0, A.length - 1); } /** TernarySearch function **/ public static int ternarySearch (int[] A, int value, int start, int end) { if (start > end) return -1; /** First boundary: add 1/3 of length to start **/ int mid1 = start + (end-start) / 3; /** Second boundary: add 2/3 of length to start **/ int mid2 = start + 2*(end-start) / 3; if (A[mid1] == value) return mid1; else if (A[mid2] == value) return mid2; /** Search 1st third **/ else if (value < A[mid1]) return ternarySearch (A, value, start, mid1-1); /** Search 3rd third **/ else if (value > A[mid2]) return ternarySearch (A, value, mid2+1, end); /** Search middle third **/ else return ternarySearch (A, value, mid1,mid2); } /** Main method **/ public static void main(String[] args) { Scanner scan = new Scanner( System.in ); System.out.println("Ternary Search Test\n"); int n, i; /** Accept number of elements **/ System.out.println("Enter number of integer elements"); n = scan.nextInt(); /** Create integer array on n elements **/ int arr[] = new int[ n ]; /** Accept elements **/ System.out.println("\nEnter "+ n +" sorted integer elements"); for (i = 0; i < n; i++) arr[i] = scan.nextInt(); System.out.println("\nEnter element to search for : "); int key = scan.nextInt(); int result = ternarySearch(arr, key); if (result == -1) System.out.println("\n"+ key +" element not found"); else System.out.println("\n"+ key +" element found at position "+ result); } }
947
323
/* * Copyright (c) 2007, Novell Inc. * * This program is licensed under the BSD license, read LICENSE.BSD * for further information */ #ifndef COMMON_WRITE_H #define COMMON_WRITE_H #include "repo.h" void tool_write(Repo *repo, FILE *fp); #endif
95
971
package com.ucar.datalink.manager.core.flinker.cron.entity; import com.ucar.datalink.biz.service.JobService; import com.ucar.datalink.biz.utils.DataLinkFactory; import com.ucar.datalink.domain.job.JobExecutionInfo; import com.ucar.datalink.domain.job.JobExecutionState; import com.ucar.datalink.manager.core.flinker.cron.QuartzManager; import org.quartz.Job; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by yang.wang09 on 2019-02-13 15:45. */ public class EntityCronCheck implements Job { private static final Logger logger = LoggerFactory.getLogger(EntityCronCheck.class); private JobService jobService = DataLinkFactory.getObject(JobService.class); @Override public void execute(JobExecutionContext context) throws JobExecutionException { String job_name = context.getTrigger().getJobKey().getName(); logger.info("execute -> "+job_name); EntityQuartzJob job = (EntityQuartzJob)context.getJobDetail().getJobDataMap().get(job_name); long executionId = job.getExecuteId(); JobExecutionInfo executionInfo = jobService.getJobExecutionById(executionId); //如果成功,自己删除自己 if(JobExecutionState.SUCCEEDED.equalsIgnoreCase(executionInfo.getState())) { QuartzManager.getInstance().deleteCheckJob(job); return; } //如果不是失败状态也删除这个任务 if( !JobExecutionState.FAILED.equalsIgnoreCase(executionInfo.getState()) ) { QuartzManager.getInstance().deleteCheckJob(job); return; } //创建一个重试的 定时任务,然后再自己删除自己 QuartzManager.getInstance().deleteCheckJob(job); EntityQuartzJob retryJob = EntityCronUtil.cloneWithExecuteId(job, executionId); } }
780
347
<filename>gengine/app/tasks.py<gh_stars>100-1000 from .tasksystem import EngineTask def includeme(config): # @EngineTask( # name="demo_task", # description="print sth", # config_scheme=None, # default_config=None, # default_cron="* * * * *", # default_activated=True # ) # def demo_task(config): # return { # 'log': None, # 'success': True # } from gengine.app.registries import get_task_registration_points_registry get_task_registration_points_registry().run_extensions() @EngineTask( name="import_test_data", description="import the test demo data", config_scheme=None, default_config=None, default_cron="0 0 5 31 2", # 31st feb will never exec default_activated=True ) def demo_task(config): from gengine.app.tests.helpers import create_subjecttypes, create_subjects, create_achievements, create_variables create_subjecttypes() create_subjects() create_variables() create_achievements() return { 'log': None, 'success': True }
446
552
<gh_stars>100-1000 #include "stdafx.h" #include "SettingsDialog.h" namespace et { namespace edit { //==================== // Settings Dialog //==================== //--------------------------------- // SettingsDialog::SettingsDialog // // Settings Dialog default constructor // SettingsDialog::SettingsDialog(BaseObjectType* cobject, const Glib::RefPtr<Gtk::Builder>& refBuilder) : Gtk::Dialog(cobject) , m_RefBuilder(refBuilder) , m_Settings() { m_Settings = Gio::Settings::create("com.leah-lindner.editor"); } //static //--------------------------------- // SettingsDialog::create // // Create a settings dialog from the generated source in prefs.ui // SettingsDialog* SettingsDialog::create(Gtk::Window& parent) { // Load the Builder file and instantiate its widgets. Glib::RefPtr<Gtk::Builder> refBuilder = Gtk::Builder::create_from_resource("/com/leah-lindner/editor/ui/prefs.ui"); SettingsDialog* dialog = nullptr; refBuilder->get_widget_derived("prefs_dialog", dialog); if (!dialog) { throw std::runtime_error("No 'prefs_dialog' object in prefs.ui"); } return dialog; } } // namespace edit } // namespace et
358
2,542
<gh_stars>1000+ // ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ namespace Hosting2 { class DummyStatelessServiceFactory : public IFabricStatelessServiceFactory, public Common::ComUnknownBase { COM_INTERFACE_LIST1( StatelessServiceFactory, IID_IFabricStatelessServiceFactory, IFabricStatelessServiceFactory) public: DummyStatelessServiceFactory(std::wstring supportedServiceType); virtual ~DummyStatelessServiceFactory(); virtual HRESULT STDMETHODCALLTYPE CreateInstance( /* [in] */ LPCWSTR serviceType, /* [in] */ FABRIC_URI serviceName, /* [in] */ ULONG initializationDataLength, /* [size_is][in] */ const byte *initializationData, /* [in] */ FABRIC_PARTITION_ID partitionId, /* [in] */ FABRIC_INSTANCE_ID instanceId, /* [retval][out] */ IFabricStatelessServiceInstance **serviceInstance); std::wstring const & get_SupportedServiceType() { return supportedServiceType_; } private: std::wstring supportedServiceType_; }; }
540
3,799
/* * Copyright 2021 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.camera.camera2; import android.content.Context; import android.hardware.camera2.CameraCharacteristics; import android.os.Build; import androidx.camera.core.CameraSelector; import androidx.camera.core.CameraX; import androidx.camera.core.CameraXConfig; import androidx.test.core.app.ApplicationProvider; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import org.robolectric.annotation.internal.DoNotInstrument; import org.robolectric.shadow.api.Shadow; import org.robolectric.shadows.ShadowCameraCharacteristics; import org.robolectric.shadows.ShadowCameraManager; import java.util.concurrent.TimeUnit; @RunWith(RobolectricTestRunner.class) @DoNotInstrument @Config(minSdk = Build.VERSION_CODES.LOLLIPOP) public class CameraXConfigTest { @Test public void canInitializeWithGetAvailableCamerasLimiterWhenNoCamera() throws Exception { CameraXConfig cameraXConfig = CameraXConfig.Builder.fromConfig(Camera2Config.defaultConfig()) .setAvailableCamerasLimiter(CameraSelector.DEFAULT_BACK_CAMERA) .build(); CameraX.initialize(ApplicationProvider.getApplicationContext(), cameraXConfig).get(3, TimeUnit.SECONDS); CameraX.shutdown().get(3, TimeUnit.SECONDS); } private void initCharacterisics(String cameraId, int lensFacing) { CameraCharacteristics characteristics = ShadowCameraCharacteristics.newCameraCharacteristics(); ShadowCameraCharacteristics shadowCharacteristics = Shadow.extract(characteristics); shadowCharacteristics.set(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); shadowCharacteristics.set(CameraCharacteristics.LENS_FACING, lensFacing); ((ShadowCameraManager) Shadow.extract( ApplicationProvider.getApplicationContext() .getSystemService(Context.CAMERA_SERVICE))) .addCamera(cameraId, characteristics); } }
988
2,231
// Copyright 2020 The Defold Foundation // Licensed under the Defold License version 1.0 (the "License"); you may not use // this file except in compliance with the License. // // You may obtain a copy of the License, together with FAQs at // https://www.defold.com/license // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. package com.defold.sound; import com.defold.sound.SoundManager; import android.content.Context; import android.media.AudioManager; import android.os.Build; import android.util.Log; public class Sound { public static int getSampleRate(Context context) { final int default_rate = 44100; if (Build.VERSION.SDK_INT >= 17) { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); String sampleRate = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); if (sampleRate == null) { return default_rate; } return Integer.parseInt(sampleRate); } else { Log.w(SoundManager.TAG, "Android version < 17. Unable to determine hardware sample rate."); return default_rate; } } public static int getFramesPerBuffer(Context context) { final int default_frames = 1024; if (Build.VERSION.SDK_INT >= 17) { AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); String framesPerBuffer = audioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); if (framesPerBuffer == null) { return default_frames; } return Integer.parseInt(framesPerBuffer); } else { Log.w(SoundManager.TAG, "Android version < 17. Unable to determine hardware frame count."); return default_frames; } } }
772
1,442
<gh_stars>1000+ #include <escher/even_odd_cell_with_ellipsis.h> namespace Escher { EvenOddCellWithEllipsis::EvenOddCellWithEllipsis() : EvenOddCell() { } void EvenOddCellWithEllipsis::layoutSubviews(bool force) { m_ellipsisView.setFrame(bounds(), force); } }
110
521
<gh_stars>100-1000 extern "C" { # include <dopenssl/rsa.h> }
33
565
 #pragma once #include "base/mappable.hpp" #include "geometry/frame.hpp" #include "geometry/grassmann.hpp" #include "geometry/linear_map.hpp" #include "geometry/quaternion.hpp" #include "geometry/r3_element.hpp" #include "geometry/rotation.hpp" #include "geometry/sign.hpp" #include "geometry/signature.hpp" #include "serialization/geometry.pb.h" namespace principia { namespace physics { class RigidMotionTest; } // namespace physics namespace geometry { FORWARD_DECLARE_FROM(identity, TEMPLATE(typename FromFrame, typename ToFrame) class, Identity); FORWARD_DECLARE_FROM(permutation, TEMPLATE(typename FromFrame, typename ToFrame) class, Permutation); FORWARD_DECLARE_FROM(rotation, TEMPLATE(typename FromFrame, typename ToFrame) class, Rotation); FORWARD_DECLARE_FROM(signature, TEMPLATE(typename FromFrame, typename ToFrame) class, Signature); FORWARD_DECLARE_FROM( symmetric_bilinear_form, TEMPLATE(typename Scalar, typename Frame, template<typename, typename> typename Multivector) class, SymmetricBilinearForm); namespace internal_orthogonal_map { using base::not_null; // An orthogonal map between the inner product spaces |FromFrame| and // |ToFrame|, as well as the induced maps on the exterior algebra. // The orthogonal map is modeled as a rotoinversion. template<typename FromFrame, typename ToFrame> class OrthogonalMap : public LinearMap<FromFrame, ToFrame> { public: Sign Determinant() const override; template<typename F = FromFrame, typename T = ToFrame, typename = std::enable_if_t<F::handedness == T::handedness>> Rotation<FromFrame, ToFrame> AsRotation() const; OrthogonalMap<ToFrame, FromFrame> Inverse() const; template<typename Scalar> Vector<Scalar, ToFrame> operator()( Vector<Scalar, FromFrame> const& vector) const; template<typename Scalar> Bivector<Scalar, ToFrame> operator()( Bivector<Scalar, FromFrame> const& bivector) const; template<typename Scalar> Trivector<Scalar, ToFrame> operator()( Trivector<Scalar, FromFrame> const& trivector) const; template<typename Scalar, template<typename, typename> typename Multivector> SymmetricBilinearForm<Scalar, ToFrame, Multivector> operator()( SymmetricBilinearForm<Scalar, FromFrame, Multivector> const& form) const; template<typename T> typename base::Mappable<OrthogonalMap, T>::type operator()(T const& t) const; template<typename F = FromFrame, typename T = ToFrame, typename = std::enable_if_t<F::handedness == T::handedness>> static OrthogonalMap Identity(); void WriteToMessage(not_null<serialization::LinearMap*> message) const; template<typename F = FromFrame, typename T = ToFrame, typename = std::enable_if_t<base::is_serializable_v<F> && base::is_serializable_v<T>>> static OrthogonalMap ReadFromMessage(serialization::LinearMap const& message); void WriteToMessage(not_null<serialization::OrthogonalMap*> message) const; template<typename F = FromFrame, typename T = ToFrame, typename = std::enable_if_t<base::is_serializable_v<F> && base::is_serializable_v<T>>> static OrthogonalMap ReadFromMessage( serialization::OrthogonalMap const& message); private: explicit OrthogonalMap(Quaternion const& quaternion); using IntermediateFrame = Frame<enum class IntermediateFrameTag, ToFrame::motion, ToFrame::handedness>; static constexpr Signature<FromFrame, IntermediateFrame> MakeSignature(); Rotation<IntermediateFrame, ToFrame> MakeRotation() const; Quaternion quaternion_; static constexpr Sign determinant_ = FromFrame::handedness == ToFrame::handedness ? Sign::Positive() : Sign::Negative(); template<typename From, typename To> friend class internal_identity::Identity; template<typename From, typename To> friend class OrthogonalMap; template<typename From, typename To> friend class internal_permutation::Permutation; template<typename From, typename To> friend class internal_rotation::Rotation; template<typename From, typename To> friend class internal_signature::Signature; template<typename From, typename Through, typename To> friend OrthogonalMap<From, To> operator*( OrthogonalMap<Through, To> const& left, OrthogonalMap<From, Through> const& right); template<typename From, typename To> friend std::ostream& operator<<( std::ostream& out, OrthogonalMap<From, To> const& orthogonal_map); friend class OrthogonalMapTest; }; template<typename FromFrame, typename ThroughFrame, typename ToFrame> OrthogonalMap<FromFrame, ToFrame> operator*( OrthogonalMap<ThroughFrame, ToFrame> const& left, OrthogonalMap<FromFrame, ThroughFrame> const& right); template<typename FromFrame, typename ToFrame> std::ostream& operator<<( std::ostream& out, OrthogonalMap<FromFrame, ToFrame> const& orthogonal_map); } // namespace internal_orthogonal_map using internal_orthogonal_map::OrthogonalMap; } // namespace geometry } // namespace principia #include "geometry/orthogonal_map_body.hpp"
2,180
1,610
<reponame>SanchitMisal/aiyprojects-raspbian # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from aiy.vision.inference import ImageInference from aiy.vision.models import object_detection as od from .test_util import define_test_case, TestImage def crop_center(image): width, height = image.size size = min(width, height) x, y = (width - size) / 2, (height - size) / 2 return image.crop((x, y, x + size, y + size)), (x, y) class ObjectDetectionTest: THRESHOLD = 0.3 def __init__(self, image_file, sparse): self.image_file = image_file self.sparse = sparse self.check = {'dog.jpg': self.check_dog, 'cat.jpg': self.check_cat}[image_file] def check_dog(self, objects): self.assertEqual(1, len(objects)) self.assertEqual(od.Object.DOG, objects[0].kind) self.assertAlmostEqual(0.914, objects[0].score, delta=0.001) self.assertEqual((52, 116, 570, 485), objects[0].bounding_box) def check_cat(self, objects): self.assertEqual(1, len(objects)) self.assertEqual(od.Object.CAT, objects[0].kind) self.assertAlmostEqual(0.672, objects[0].score, delta=0.001) self.assertEqual((575, 586, 2187, 1758), objects[0].bounding_box) def test_detection(self): with TestImage(self.image_file) as image: image_center, offset = crop_center(image) with ImageInference(od.model()) as inference: if self.sparse: sparse_configs = od.sparse_configs(threshold=self.THRESHOLD) result = inference.run(image_center, sparse_configs=sparse_configs) objects = od.get_objects_sparse(result, offset) else: result = inference.run(image_center) objects = od.get_objects(result, self.THRESHOLD, offset) self.check(objects) define_test_case(globals(), ObjectDetectionTest, 'dog.jpg', False) define_test_case(globals(), ObjectDetectionTest, 'dog.jpg', True) define_test_case(globals(), ObjectDetectionTest, 'cat.jpg', False) define_test_case(globals(), ObjectDetectionTest, 'cat.jpg', True) if __name__ == '__main__': unittest.main(verbosity=2)
1,107
3,100
<filename>src/xenia/ui/window_android.h<gh_stars>1000+ /** ****************************************************************************** * Xenia : Xbox 360 Emulator Research Project * ****************************************************************************** * Copyright 2021 <NAME>. All rights reserved. * * Released under the BSD license - see LICENSE in the root for more details. * ****************************************************************************** */ #ifndef XENIA_UI_WINDOW_ANDROID_H_ #define XENIA_UI_WINDOW_ANDROID_H_ #include "xenia/ui/menu_item.h" #include "xenia/ui/window.h" namespace xe { namespace ui { class AndroidWindow : public Window { public: // Many functions are left unimplemented because the layout is configured from // XML and Java. AndroidWindow(WindowedAppContext& app_context, const std::string& title) : Window(app_context, title) {} ~AndroidWindow(); NativePlatformHandle native_platform_handle() const override { return nullptr; } // TODO(Triang3l): ANativeWindow for Vulkan surface creation. NativeWindowHandle native_handle() const override { return nullptr; } void EnableMainMenu() override {} void DisableMainMenu() override {} bool SetIcon(const void* buffer, size_t size) override { return false; } bool CaptureMouse() override { return false; } bool ReleaseMouse() override { return false; } int get_medium_dpi() const override { return 160; } // TODO(Triang3l): Call the close event, which may finish the activity. void Close() override {} }; // Dummy for the menu item - menus are controlled by the layout. // TODO(Triang3l): Make something like MenuItem work as the basic common action // interface for Java buttons. class AndroidMenuItem final : public MenuItem { public: AndroidMenuItem(Type type, const std::string& text, const std::string& hotkey, std::function<void()> callback) : MenuItem(type, text, hotkey, callback) {} void EnableMenuItem(Window& window) override {} void DisableMenuItem(Window& window) override {} }; } // namespace ui } // namespace xe #endif // XENIA_UI_WINDOW_ANDROID_H_
675
679
<filename>main/svtools/source/edit/textundo.cxx /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_svtools.hxx" #include <svtools/texteng.hxx> #include <svtools/textview.hxx> #include <textundo.hxx> #include <textund2.hxx> #include <svtools/textdata.hxx> #include <textdoc.hxx> #include <textdat2.hxx> TYPEINIT1( TextUndo, SfxUndoAction ); TYPEINIT1( TextUndoDelPara, TextUndo ); TYPEINIT1( TextUndoConnectParas, TextUndo ); TYPEINIT1( TextUndoSplitPara, TextUndo ); TYPEINIT1( TextUndoInsertChars, TextUndo ); TYPEINIT1( TextUndoRemoveChars, TextUndo ); TYPEINIT1( TextUndoSetAttribs, TextUndo ); TextUndoManager::TextUndoManager( TextEngine* p ) { mpTextEngine = p; } TextUndoManager::~TextUndoManager() { } sal_Bool __EXPORT TextUndoManager::Undo() { if ( GetUndoActionCount() == 0 ) return sal_False; UndoRedoStart(); mpTextEngine->SetIsInUndo( sal_True ); sal_Bool bDone = SfxUndoManager::Undo(); mpTextEngine->SetIsInUndo( sal_False ); UndoRedoEnd(); return bDone; } sal_Bool __EXPORT TextUndoManager::Redo() { if ( GetRedoActionCount() == 0 ) return sal_False; UndoRedoStart(); mpTextEngine->SetIsInUndo( sal_True ); sal_Bool bDone = SfxUndoManager::Redo(); mpTextEngine->SetIsInUndo( sal_False ); UndoRedoEnd(); return bDone; } void TextUndoManager::UndoRedoStart() { DBG_ASSERT( GetView(), "Undo/Redo: Active View?" ); // if ( GetView() ) // GetView()->HideSelection(); } void TextUndoManager::UndoRedoEnd() { if ( GetView() ) { TextSelection aNewSel( GetView()->GetSelection() ); aNewSel.GetStart() = aNewSel.GetEnd(); GetView()->ImpSetSelection( aNewSel ); } mpTextEngine->UpdateSelections(); mpTextEngine->FormatAndUpdate( GetView() ); } TextUndo::TextUndo( TextEngine* p ) { mpTextEngine = p; } TextUndo::~TextUndo() { } XubString __EXPORT TextUndo::GetComment() const { // return mpTextEngine->GetUndoComment( this ); return String(); } void TextUndo::SetSelection( const TextSelection& rSel ) { if ( GetView() ) GetView()->ImpSetSelection( rSel ); } TextUndoDelPara::TextUndoDelPara( TextEngine* pTextEngine, TextNode* pNode, sal_uLong nPara ) : TextUndo( pTextEngine ) { mpNode = pNode; mnPara = nPara; mbDelObject = sal_True; } TextUndoDelPara::~TextUndoDelPara() { if ( mbDelObject ) delete mpNode; } void __EXPORT TextUndoDelPara::Undo() { GetTextEngine()->InsertContent( mpNode, mnPara ); mbDelObject = sal_False; // gehoert wieder der Engine if ( GetView() ) { TextSelection aSel( TextPaM( mnPara, 0 ), TextPaM( mnPara, mpNode->GetText().Len() ) ); SetSelection( aSel ); } } void __EXPORT TextUndoDelPara::Redo() { // pNode stimmt nicht mehr, falls zwischendurch Undos, in denen // Absaetze verschmolzen sind. mpNode = GetDoc()->GetNodes().GetObject( mnPara ); delete GetTEParaPortions()->GetObject( mnPara ); GetTEParaPortions()->Remove( mnPara ); // Node nicht loeschen, haengt im Undo! GetDoc()->GetNodes().Remove( mnPara ); GetTextEngine()->ImpParagraphRemoved( mnPara ); mbDelObject = sal_True; // gehoert wieder dem Undo sal_uLong nParas = GetDoc()->GetNodes().Count(); sal_uLong n = mnPara < nParas ? mnPara : (nParas-1); TextNode* pN = GetDoc()->GetNodes().GetObject( n ); TextPaM aPaM( n, pN->GetText().Len() ); SetSelection( aPaM ); } // ----------------------------------------------------------------------- // TextUndoConnectParas // ------------------------------------------------------------------------ TextUndoConnectParas::TextUndoConnectParas( TextEngine* pTextEngine, sal_uLong nPara, sal_uInt16 nPos ) : TextUndo( pTextEngine ) { mnPara = nPara; mnSepPos = nPos; } TextUndoConnectParas::~TextUndoConnectParas() { } void __EXPORT TextUndoConnectParas::Undo() { TextPaM aPaM = GetTextEngine()->SplitContent( mnPara, mnSepPos ); SetSelection( aPaM ); } void __EXPORT TextUndoConnectParas::Redo() { TextPaM aPaM = GetTextEngine()->ConnectContents( mnPara ); SetSelection( aPaM ); } TextUndoSplitPara::TextUndoSplitPara( TextEngine* pTextEngine, sal_uLong nPara, sal_uInt16 nPos ) : TextUndo( pTextEngine ) { mnPara = nPara; mnSepPos = nPos; } TextUndoSplitPara::~TextUndoSplitPara() { } void __EXPORT TextUndoSplitPara::Undo() { TextPaM aPaM = GetTextEngine()->ConnectContents( mnPara ); SetSelection( aPaM ); } void __EXPORT TextUndoSplitPara::Redo() { TextPaM aPaM = GetTextEngine()->SplitContent( mnPara, mnSepPos ); SetSelection( aPaM ); } TextUndoInsertChars::TextUndoInsertChars( TextEngine* pTextEngine, const TextPaM& rTextPaM, const XubString& rStr ) : TextUndo( pTextEngine ), maTextPaM( rTextPaM ), maText( rStr ) { } void __EXPORT TextUndoInsertChars::Undo() { TextSelection aSel( maTextPaM, maTextPaM ); aSel.GetEnd().GetIndex() = aSel.GetEnd().GetIndex() + maText.Len(); TextPaM aPaM = GetTextEngine()->ImpDeleteText( aSel ); SetSelection( aPaM ); } void __EXPORT TextUndoInsertChars::Redo() { TextSelection aSel( maTextPaM, maTextPaM ); GetTextEngine()->ImpInsertText( aSel, maText ); TextPaM aNewPaM( maTextPaM ); aNewPaM.GetIndex() = aNewPaM.GetIndex() + maText.Len(); SetSelection( TextSelection( aSel.GetStart(), aNewPaM ) ); } sal_Bool __EXPORT TextUndoInsertChars::Merge( SfxUndoAction* pNextAction ) { if ( !pNextAction->ISA( TextUndoInsertChars ) ) return sal_False; TextUndoInsertChars* pNext = (TextUndoInsertChars*)pNextAction; if ( maTextPaM.GetPara() != pNext->maTextPaM.GetPara() ) return sal_False; if ( ( maTextPaM.GetIndex() + maText.Len() ) == pNext->maTextPaM.GetIndex() ) { maText += pNext->maText; return sal_True; } return sal_False; } TextUndoRemoveChars::TextUndoRemoveChars( TextEngine* pTextEngine, const TextPaM& rTextPaM, const XubString& rStr ) : TextUndo( pTextEngine ), maTextPaM( rTextPaM ), maText( rStr ) { } void __EXPORT TextUndoRemoveChars::Undo() { TextSelection aSel( maTextPaM, maTextPaM ); GetTextEngine()->ImpInsertText( aSel, maText ); aSel.GetEnd().GetIndex() = aSel.GetEnd().GetIndex() + maText.Len(); SetSelection( aSel ); } void __EXPORT TextUndoRemoveChars::Redo() { TextSelection aSel( maTextPaM, maTextPaM ); aSel.GetEnd().GetIndex() = aSel.GetEnd().GetIndex() + maText.Len(); TextPaM aPaM = GetTextEngine()->ImpDeleteText( aSel ); SetSelection( aPaM ); } TextUndoSetAttribs::TextUndoSetAttribs( TextEngine* pTextEngine, const TextSelection& rSel ) : TextUndo( pTextEngine ), maSelection( rSel ) { maSelection.Justify(); // aNewAttribs.Set( rNewItems ); // mbSetIsRemove = sal_False; // mnRemoveWhich = 0; // mnSpecial = 0; } TextUndoSetAttribs::~TextUndoSetAttribs() { // ............... } void __EXPORT TextUndoSetAttribs::Undo() { for ( sal_uLong nPara = maSelection.GetStart().GetPara(); nPara <= maSelection.GetEnd().GetPara(); nPara++ ) { // ContentAttribsInfo* pInf = aPrevAttribs[ (sal_uInt16)(nPara-aESel.nStartPara) ]; // GetTextEngine()->RemoveCharAttribs( nPara ); // TextNode* pNode = GetTextEngine()->GetTextDoc().GetObject( nPara ); // for ( sal_uInt16 nAttr = 0; nAttr < pInf->GetPrevCharAttribs().Count(); nAttr++ ) // { // GetTextEngine()->GetTextDoc().InsertAttrib( pNode, pX->GetStart(), pX->GetEnd(), *pX->GetItem() ); // } } SetSelection( maSelection ); } void __EXPORT TextUndoSetAttribs::Redo() { // if ( !bSetIsRemove ) // GetTextEngine()->SetAttribs( aSel, aNewAttribs, nSpecial ); // else // GetTextEngine()->RemoveCharAttribs( aSel, bRemoveParaAttribs, nRemoveWhich ); SetSelection( maSelection ); }
3,250
3,702
<filename>java/yb-client/src/main/java/org/yb/client/YBTable.java // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. // // The following only applies to changes made to this file as part of YugaByte development. // // Portions Copyright (c) YugaByte, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express // or implied. See the License for the specific language governing permissions and limitations // under the License. // package org.yb.client; import static org.yb.Common.TableType; import org.yb.Schema; import org.yb.annotations.InterfaceAudience; import org.yb.annotations.InterfaceStability; import com.stumbleupon.async.Deferred; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; /** * A YBTable represents a table on a particular cluster. It holds the current * schema of the table. Any given YBTable instance belongs to a specific AsyncYBClient * instance. * * Upon construction, the table is looked up in the catalog (or catalog cache), * and the schema fetched for introspection. The schema is not kept in sync with the master. * * This class is thread-safe. */ @InterfaceAudience.Public @InterfaceStability.Evolving public class YBTable { private final Schema schema; private final PartitionSchema partitionSchema; private final AsyncYBClient client; private final String name; private final String keyspace; private final String tableId; private final TableType tableType; private static final String OBSERVER = "OBSERVER"; private static final String PRE_OBSERVER = "PRE_OBSERVER"; /** * Package-private constructor, use {@link YBClient#openTable(String)} to get an instance. * @param client the client this instance belongs to * @param name this table's name * @param schema this table's schema */ YBTable(AsyncYBClient client, String name, String tableId, Schema schema, PartitionSchema partitionSchema, TableType tableType, String keyspace) { this.schema = schema; this.partitionSchema = partitionSchema; this.client = client; this.name = name; this.tableId = tableId; this.tableType = tableType; this.keyspace = keyspace; } YBTable(AsyncYBClient client, String name, String tableId, Schema schema, PartitionSchema partitionSchema) { this(client, name, tableId, schema, partitionSchema, TableType.YQL_TABLE_TYPE, null); } /** * Get this table's schema, as of the moment this instance was created. * @return this table's schema */ public Schema getSchema() { return this.schema; } /** * Get this table's type. */ public TableType getTableType() { return this.tableType; } /** * Gets the table's partition schema. * * This method is new, and not considered stable or suitable for public use. * * @return the table's partition schema. */ @InterfaceAudience.LimitedPrivate("Impala") @InterfaceStability.Unstable public PartitionSchema getPartitionSchema() { return partitionSchema; } /** * Get this table's name. * @return this table's name */ public String getName() { return this.name; } /** * Get this table's keyspace. * @return this table's keyspace. */ public String getKeyspace() { return this.keyspace; } /** * Get this table's unique identifier. * @return this table's tableId */ public String getTableId() { return tableId; } /** * Get the async client that created this instance. * @return an async yb java client. */ public AsyncYBClient getAsyncClient() { return this.client; } /** * Get all the tablets for this table. This may query the master multiple times if there * are a lot of tablets. * @param deadline deadline in milliseconds for this method to finish * @return a list containing the metadata and locations for each of the tablets in the * table * @throws Exception */ public List<LocatedTablet> getTabletsLocations( long deadline) throws Exception { return getTabletsLocations(null, null, deadline); } /** * Asynchronously get all the tablets for this table. * @param deadline max time spent in milliseconds for the deferred result of this method to * get called back, if deadline is reached, the deferred result will get erred back * @return a {@link Deferred} object that yields a list containing the metadata and * locations for each of the tablets in the table */ public Deferred<List<LocatedTablet>> asyncGetTabletsLocations( long deadline) throws Exception { return asyncGetTabletsLocations(null, null, deadline); } /** * Get all or some tablets for this table. This may query the master multiple times if there * are a lot of tablets. * This method blocks until it gets all the tablets. * @param startKey where to start in the table, pass null to start at the beginning * @param endKey where to stop in the table, pass null to get all the tablets until the end of * the table * @param deadline deadline in milliseconds for this method to finish * @return a list containing the metadata and locations for each of the tablets in the * table * @throws Exception */ public List<LocatedTablet> getTabletsLocations( byte[] startKey, byte[] endKey, long deadline) throws Exception { return client.syncLocateTable(tableId, startKey, endKey, deadline); } /** * Asynchronously get all or some tablets for this table. * @param startKey where to start in the table, pass null to start at the beginning * @param endKey where to stop in the table, pass null to get all the tablets until the end of * the table * @param deadline max time spent in milliseconds for the deferred result of this method to * get called back, if deadline is reached, the deferred result will get erred back * @return a {@link Deferred} object that yields a list containing the metadata and locations * for each of the tablets in the table */ public Deferred<List<LocatedTablet>> asyncGetTabletsLocations( byte[] startKey, byte[] endKey, long deadline) throws Exception { return client.locateTable(tableId, startKey, endKey, deadline); } /** * Loop through all replicas in the table and store a mapping from tserver placement uuid to * a list of lists, containing the live replica count per ts, followed by the read * replica count per ts. If there are two placement uuids, live and readOnly, and two * tservers in each uuid, and RF=1 for both live and readOnly with 8 tablets * in the table, the resulting map would look like this: * "live" : { [[1, 1], [0, 0]] }, "readOnly" : { [[0, 0], [1, 1]] }. * The first list in the map correspond to live replica counts per tserver, and the * second list corresponds to read only replica counts. * @param deadline deadline in milliseconds for getTabletsLocations rpc. * @return a map from placement zone to a list of lists of integers. */ public Map<String, List<List<Integer>>> getMemberTypeCountsForEachTSType(long deadline) throws Exception { // Intermediate map which contains an internal map from ts uuid to live and // read replica counts. Map<String, Map<String, List<Integer>>> intermediateMap = new HashMap<String, Map<String, List<Integer>>>(); List<LocatedTablet> tablets = getTabletsLocations(deadline); for (LocatedTablet tablet : tablets) { for (LocatedTablet.Replica replica : tablet.getReplicas()) { String placementUuid = replica.getTsPlacementUuid(); Map<String, List<Integer>> tsMap; if (intermediateMap.containsKey(placementUuid)) { tsMap = intermediateMap.get(placementUuid); } else { tsMap = new HashMap<String, List<Integer>>(); } String tsUuid = replica.getTsUuid(); List<Integer> liveReadOnlyCounts; if (tsMap.containsKey(tsUuid)) { liveReadOnlyCounts = tsMap.get(tsUuid); } else { liveReadOnlyCounts = new ArrayList<>(Arrays.asList(0, 0)); } if (replica.getMemberType().equals(OBSERVER) || replica.getMemberType().equals(PRE_OBSERVER)) { // This is an read only member, int currCount = liveReadOnlyCounts.get(1); liveReadOnlyCounts.set(1, currCount + 1); } else { int currCount = liveReadOnlyCounts.get(0); liveReadOnlyCounts.set(0, currCount + 1); } tsMap.put(tsUuid, liveReadOnlyCounts); intermediateMap.put(placementUuid, tsMap); } } // Now, convert our internal map into the return map by getting rid of ts uuid. Map<String, List<List<Integer>>> returnMap = new HashMap<String, List<List<Integer>>>(); for (Map.Entry<String, Map<String, List<Integer>>> placementEntry : intermediateMap.entrySet()) { List<List<Integer>> newEntry = new ArrayList<List<Integer>>(); List<Integer> liveCounts = new ArrayList<Integer>(); List<Integer> readOnlyCounts = new ArrayList<Integer>(); for (Map.Entry<String, List<Integer>> tsEntry : placementEntry.getValue().entrySet()) { liveCounts.add(tsEntry.getValue().get(0)); readOnlyCounts.add(tsEntry.getValue().get(1)); } Collections.sort(liveCounts); Collections.sort(readOnlyCounts); newEntry.add(liveCounts); newEntry.add(readOnlyCounts); returnMap.put(placementEntry.getKey(), newEntry);; } return returnMap; } }
3,499
1,433
/* * Copyright (c) 2016, 2017, 2018, 2019 FabricMC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.fabricmc.fabric.impl.transfer.fluid; import java.util.Map; import com.google.common.collect.MapMaker; import com.google.common.primitives.Ints; import net.minecraft.block.BlockState; import net.minecraft.block.Blocks; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.fabricmc.fabric.api.transfer.v1.fluid.CauldronFluidContent; import net.fabricmc.fabric.api.transfer.v1.fluid.FluidVariant; import net.fabricmc.fabric.api.transfer.v1.storage.StoragePreconditions; import net.fabricmc.fabric.api.transfer.v1.storage.base.SingleSlotStorage; import net.fabricmc.fabric.api.transfer.v1.transaction.TransactionContext; import net.fabricmc.fabric.api.transfer.v1.transaction.base.SnapshotParticipant; /** * Standard implementation of {@code Storage<FluidVariant>}, using cauldron/fluid mappings registered in {@link CauldronFluidContent}. * * <p>Implementation notes: * <ul> * <li>To make sure multiple access to the same cauldron return the same wrapper, we maintain a {@code (World, BlockPos) -> Wrapper} cache.</li> * <li>The wrapper mutates the world directly with setBlockState, but updates are suppressed. * On final commit, a block update is sent by reverting to {@linkplain #lastReleasedSnapshot the initial block state} with updates suppressed, * then setting the final block state again, without suppressing updates.</li> * </ul> */ public class CauldronStorage extends SnapshotParticipant<BlockState> implements SingleSlotStorage<FluidVariant> { // Record is used for convenient constructor, hashcode and equals implementations. private record WorldLocation(World world, BlockPos pos) { } // Weak values to make sure wrappers are cleaned up after use, thread-safe. private static final Map<WorldLocation, CauldronStorage> CAULDRONS = new MapMaker().concurrencyLevel(1).weakValues().makeMap(); public static CauldronStorage get(World world, BlockPos pos) { WorldLocation location = new WorldLocation(world, pos.toImmutable()); return CAULDRONS.computeIfAbsent(location, CauldronStorage::new); } private final WorldLocation location; // this is the last released snapshot, which means it's the first snapshot ever saved when onFinalCommit() is called. private BlockState lastReleasedSnapshot; CauldronStorage(WorldLocation location) { this.location = location; } @Override protected void releaseSnapshot(BlockState snapshot) { lastReleasedSnapshot = snapshot; } // Retrieve the current CauldronFluidContent. private CauldronFluidContent getCurrentContent() { CauldronFluidContent content = CauldronFluidContent.getForBlock(createSnapshot().getBlock()); if (content == null) { throw new IllegalStateException("Unexpected error: no cauldron at location " + location); } return content; } // Called by insert and extract to update the block state. private void updateLevel(CauldronFluidContent newContent, int level, TransactionContext transaction) { updateSnapshots(transaction); BlockState newState = newContent.block.getDefaultState(); if (newContent.levelProperty != null) { newState = newState.with(newContent.levelProperty, level); } // Set block state without updates. location.world.setBlockState(location.pos, newState, 0); } @Override public long insert(FluidVariant fluidVariant, long maxAmount, TransactionContext transaction) { StoragePreconditions.notBlankNotNegative(fluidVariant, maxAmount); CauldronFluidContent insertContent = CauldronFluidContent.getForFluid(fluidVariant.getFluid()); if (insertContent != null) { int maxLevelsInserted = Ints.saturatedCast(maxAmount / insertContent.amountPerLevel); if (getAmount() == 0) { // Currently empty, so we can accept any fluid. int levelsInserted = Math.min(maxLevelsInserted, insertContent.maxLevel); if (levelsInserted > 0) { updateLevel(insertContent, levelsInserted, transaction); } return levelsInserted * insertContent.amountPerLevel; } CauldronFluidContent currentContent = getCurrentContent(); if (fluidVariant.isOf(currentContent.fluid)) { // Otherwise we can only accept the same fluid as the current one. int currentLevel = currentContent.currentLevel(createSnapshot()); int levelsInserted = Math.min(maxLevelsInserted, currentContent.maxLevel - currentLevel); if (levelsInserted > 0) { updateLevel(currentContent, currentLevel + levelsInserted, transaction); } return levelsInserted * currentContent.amountPerLevel; } } return 0; } @Override public long extract(FluidVariant fluidVariant, long maxAmount, TransactionContext transaction) { StoragePreconditions.notBlankNotNegative(fluidVariant, maxAmount); CauldronFluidContent currentContent = getCurrentContent(); if (fluidVariant.isOf(currentContent.fluid)) { int maxLevelsExtracted = Ints.saturatedCast(maxAmount / currentContent.amountPerLevel); int currentLevel = currentContent.currentLevel(createSnapshot()); int levelsExtracted = Math.min(maxLevelsExtracted, currentLevel); if (levelsExtracted > 0) { if (levelsExtracted == currentLevel) { // Fully extract -> back to empty cauldron updateSnapshots(transaction); location.world.setBlockState(location.pos, Blocks.CAULDRON.getDefaultState(), 0); } else { // Otherwise just decrease levels updateLevel(currentContent, currentLevel - levelsExtracted, transaction); } } return levelsExtracted * currentContent.amountPerLevel; } return 0; } @Override public boolean isResourceBlank() { return getResource().isBlank(); } @Override public FluidVariant getResource() { return FluidVariant.of(getCurrentContent().fluid); } @Override public long getAmount() { CauldronFluidContent currentContent = getCurrentContent(); return currentContent.currentLevel(createSnapshot()) * currentContent.amountPerLevel; } @Override public long getCapacity() { CauldronFluidContent currentContent = getCurrentContent(); return currentContent.maxLevel * currentContent.amountPerLevel; } @Override public BlockState createSnapshot() { return location.world.getBlockState(location.pos); } @Override public void readSnapshot(BlockState savedState) { location.world.setBlockState(location.pos, savedState, 0); } @Override public void onFinalCommit() { BlockState state = createSnapshot(); BlockState originalState = lastReleasedSnapshot; if (originalState != state) { // Revert change location.world.setBlockState(location.pos, originalState, 0); // Then do the actual change with normal block updates location.world.setBlockState(location.pos, state); } } }
2,229
2,813
package org.jabref.logic.integrity; import java.util.Optional; import java.util.stream.Stream; import org.jabref.logic.l10n.Localization; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import static org.junit.jupiter.api.Assertions.assertEquals; public class ValidCitationKeyCheckerTest { private final ValidCitationKeyChecker checker = new ValidCitationKeyChecker(); @ParameterizedTest @MethodSource("provideCitationKeys") void citationKeyValidity(Optional optionalArgument, String citationKey) { assertEquals(optionalArgument, checker.checkValue(citationKey)); } private static Stream<Arguments> provideCitationKeys() { return Stream.of( Arguments.of(Optional.of(Localization.lang("empty citation key")), ""), Arguments.of(Optional.empty(), "Seaver2019"), Arguments.of(Optional.of(Localization.lang("Invalid citation key")), "Seaver_2019}") ); } }
388
622
<reponame>Jackque/flashback<filename>flashback-core-impl/src/main/java/com/linkedin/flashback/decorator/compression/AbstractCompressor.java<gh_stars>100-1000 /* * Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license. * See LICENSE in the project root for license information. */ package com.linkedin.flashback.decorator.compression; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; /** * Abstract compressor class * * @author shfeng */ public abstract class AbstractCompressor { public byte[] compress(byte[] encodedBytes) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); try (OutputStream stream = getOutputStream(out)) { stream.write(encodedBytes); stream.flush(); } return out.toByteArray(); } abstract protected OutputStream getOutputStream(OutputStream output) throws IOException; }
294
6,989
<filename>contrib/libs/zstd06/common/zstd.h #include <contrib/libs/zstd06/renames.h> /* zstd - standard compression library Header File Copyright (C) 2014-2016, <NAME>. BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. You can contact the author at : - zstd source repository : https://github.com/Cyan4973/zstd */ #ifndef ZSTD_H #define ZSTD_H #if defined (__cplusplus) extern "C" { #endif /*-************************************* * Dependencies ***************************************/ #include <stddef.h> /* size_t */ /*-*************************************************************** * Export parameters *****************************************************************/ /*! * ZSTD_DLL_EXPORT : * Enable exporting of functions when building a Windows DLL */ #if defined(_WIN32) && defined(ZSTD_DLL_EXPORT) && (ZSTD_DLL_EXPORT==1) # define ZSTDLIB_API __declspec(dllexport) #else # define ZSTDLIB_API #endif /* ************************************* * Version ***************************************/ #define ZSTD_VERSION_MAJOR 0 #define ZSTD_VERSION_MINOR 6 #define ZSTD_VERSION_RELEASE 2 #define ZSTD_LIB_VERSION ZSTD_VERSION_MAJOR.ZSTD_VERSION_MINOR.ZSTD_VERSION_RELEASE #define ZSTD_QUOTE(str) #str #define ZSTD_EXPAND_AND_QUOTE(str) ZSTD_QUOTE(str) #define ZSTD_VERSION_STRING ZSTD_EXPAND_AND_QUOTE(ZSTD_LIB_VERSION) #define ZSTD_VERSION_NUMBER (ZSTD_VERSION_MAJOR *100*100 + ZSTD_VERSION_MINOR *100 + ZSTD_VERSION_RELEASE) ZSTDLIB_API unsigned ZSTD_versionNumber (void); /* ************************************* * Simple functions ***************************************/ /*! ZSTD_compress() : Compresses `srcSize` bytes from buffer `src` into buffer `dst` of size `dstCapacity`. Destination buffer must be already allocated. Compression runs faster if `dstCapacity` >= `ZSTD_compressBound(srcSize)`. @return : the number of bytes written into `dst`, or an error code if it fails (which can be tested using ZSTD_isError()) */ ZSTDLIB_API size_t ZSTD_compress( void* dst, size_t dstCapacity, const void* src, size_t srcSize, int compressionLevel); /*! ZSTD_decompress() : `compressedSize` : is the _exact_ size of the compressed blob, otherwise decompression will fail. `dstCapacity` must be large enough, equal or larger than originalSize. @return : the number of bytes decompressed into `dst` (<= `dstCapacity`), or an errorCode if it fails (which can be tested using ZSTD_isError()) */ ZSTDLIB_API size_t ZSTD_decompress( void* dst, size_t dstCapacity, const void* src, size_t compressedSize); /* ************************************* * Helper functions ***************************************/ ZSTDLIB_API size_t ZSTD_compressBound(size_t srcSize); /*!< maximum compressed size (worst case scenario) */ /* Error Management */ ZSTDLIB_API unsigned ZSTD_isError(size_t code); /*!< tells if a `size_t` function result is an error code */ ZSTDLIB_API const char* ZSTD_getErrorName(size_t code); /*!< provides readable string for an error code */ /* ************************************* * Explicit memory management ***************************************/ /** Compression context */ typedef struct ZSTD_CCtx_s ZSTD_CCtx; /*< incomplete type */ ZSTDLIB_API ZSTD_CCtx* ZSTD_createCCtx(void); ZSTDLIB_API size_t ZSTD_freeCCtx(ZSTD_CCtx* cctx); /*!< @return : errorCode */ /** ZSTD_compressCCtx() : Same as ZSTD_compress(), but requires an already allocated ZSTD_CCtx (see ZSTD_createCCtx()) */ ZSTDLIB_API size_t ZSTD_compressCCtx(ZSTD_CCtx* ctx, void* dst, size_t dstCapacity, const void* src, size_t srcSize, int compressionLevel); /** Decompression context */ typedef struct ZSTD_DCtx_s ZSTD_DCtx; ZSTDLIB_API ZSTD_DCtx* ZSTD_createDCtx(void); ZSTDLIB_API size_t ZSTD_freeDCtx(ZSTD_DCtx* dctx); /*!< @return : errorCode */ /** ZSTD_decompressDCtx() : * Same as ZSTD_decompress(), but requires an already allocated ZSTD_DCtx (see ZSTD_createDCtx()) */ ZSTDLIB_API size_t ZSTD_decompressDCtx(ZSTD_DCtx* ctx, void* dst, size_t dstCapacity, const void* src, size_t srcSize); /*-*********************** * Dictionary API *************************/ /*! ZSTD_compress_usingDict() : * Compression using a pre-defined Dictionary content (see dictBuilder). * Note : dict can be NULL, in which case, it's equivalent to ZSTD_compressCCtx() */ ZSTDLIB_API size_t ZSTD_compress_usingDict(ZSTD_CCtx* ctx, void* dst, size_t dstCapacity, const void* src, size_t srcSize, const void* dict,size_t dictSize, int compressionLevel); /*! ZSTD_decompress_usingDict() : * Decompression using a pre-defined Dictionary content (see dictBuilder). * Dictionary must be identical to the one used during compression, otherwise regenerated data will be corrupted. * Note : dict can be NULL, in which case, it's equivalent to ZSTD_decompressDCtx() */ ZSTDLIB_API size_t ZSTD_decompress_usingDict(ZSTD_DCtx* dctx, void* dst, size_t dstCapacity, const void* src, size_t srcSize, const void* dict,size_t dictSize); #if defined (__cplusplus) } #endif #endif /* ZSTD_H */
2,550
638
<gh_stars>100-1000 // Copyright 2018-current Getnamo. All Rights Reserved #include "SocketIONative.h" #include "SIOMessageConvert.h" #include "CULambdaRunnable.h" #include "SIOJConvert.h" #include "sio_client.h" #include "sio_message.h" #include "sio_socket.h" FSocketIONative::FSocketIONative() { PrivateClient = nullptr; AddressAndPort = TEXT("http://localhost:3000"); //default to 127.0.0.1 SessionId = TEXT("Invalid"); LastSessionId = TEXT("None"); bIsConnected = false; MaxReconnectionAttempts = -1; ReconnectionDelay = 5000; bCallbackOnGameThread = true; PrivateClient = MakeShareable(new sio::client); ClearCallbacks(); } void FSocketIONative::Connect(const FString& InAddressAndPort, const TSharedPtr<FJsonObject>& Query /*= nullptr*/, const TSharedPtr<FJsonObject>& Headers /*= nullptr*/, const FString& Path) { std::string StdAddressString = USIOMessageConvert::StdString(InAddressAndPort); if (InAddressAndPort.IsEmpty()) { StdAddressString = USIOMessageConvert::StdString(AddressAndPort); } //Connect to the server on a background thread so it never blocks FCULambdaRunnable::RunLambdaOnBackGroundThread([&, StdAddressString, Query, Headers] { std::map<std::string, std::string> QueryMap = {}; std::map<std::string, std::string> HeadersMap = {}; //fill the headers and query if they're not null if (Headers.IsValid()) { HeadersMap = USIOMessageConvert::JsonObjectToStdStringMap(Headers); } if (Query.IsValid()) { QueryMap = USIOMessageConvert::JsonObjectToStdStringMap(Query); } PrivateClient->set_reconnect_attempts(MaxReconnectionAttempts); PrivateClient->set_reconnect_delay(ReconnectionDelay); //close and reconnect if different url if(PrivateClient->opened()) { if (PrivateClient->get_url() != StdAddressString) { //sync close to re-open PrivateClient->sync_close(); } else { //we're already connected to the correct endpoint, ignore request UE_LOG(SocketIO, Warning, TEXT("Attempted to connect to %s when we're already connected. Request ignored."), UTF8_TO_TCHAR(StdAddressString.c_str())); return; } } PrivateClient->connect(StdAddressString, QueryMap, HeadersMap); }); } void FSocketIONative::Connect(const FString& InAddressAndPort) { TSharedPtr<FJsonObject> Query = MakeShareable(new FJsonObject); TSharedPtr<FJsonObject> Headers = MakeShareable(new FJsonObject); Connect(InAddressAndPort, Query, Headers); } void FSocketIONative::JoinNamespace(const FString& Namespace) { //just referencing the namespace will join it PrivateClient->socket(USIOMessageConvert::StdString(Namespace)); } void FSocketIONative::LeaveNamespace(const FString& Namespace) { PrivateClient->socket(USIOMessageConvert::StdString(Namespace))->close(); } void FSocketIONative::Disconnect() { if (OnDisconnectedCallback) { OnDisconnectedCallback(ESIOConnectionCloseReason::CLOSE_REASON_NORMAL); } bIsConnected = false; ClearCallbacks(); PrivateClient->close(); } void FSocketIONative::SyncDisconnect() { if (OnDisconnectedCallback) { OnDisconnectedCallback(ESIOConnectionCloseReason::CLOSE_REASON_NORMAL); } bIsConnected = false; ClearCallbacks(); PrivateClient->sync_close(); } void FSocketIONative::ClearCallbacks() { PrivateClient->clear_socket_listeners(); SetupInternalCallbacks(); //if clear socket listeners cleared our internal callbacks. reset them EventFunctionMap.Empty(); OnConnectedCallback = nullptr; OnDisconnectedCallback = nullptr; OnNamespaceConnectedCallback = nullptr; OnNamespaceDisconnectedCallback = nullptr; OnReconnectionCallback = nullptr; OnFailCallback = nullptr; } void FSocketIONative::Emit(const FString& EventName, const TSharedPtr<FJsonValue>& Message /*= nullptr*/, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { TFunction<void(const sio::message::list&)> RawCallback = nullptr; //Only bind the raw callback if we pass in a callback ourselves; if (CallbackFunction) { RawCallback = [&, CallbackFunction](const sio::message::list& MessageList) { TArray<TSharedPtr<FJsonValue>> ValueArray; for (uint32 i = 0; i < MessageList.size(); i++) { auto ItemMessagePtr = MessageList[i]; ValueArray.Add(USIOMessageConvert::ToJsonValue(ItemMessagePtr)); } if (CallbackFunction) { CallbackFunction(ValueArray); } }; } EmitRaw( EventName, USIOMessageConvert::ToSIOMessage(Message), RawCallback, Namespace); } void FSocketIONative::Emit(const FString& EventName, const TSharedPtr<FJsonObject>& ObjectMessage /*= nullptr*/, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { Emit(EventName, MakeShareable(new FJsonValueObject(ObjectMessage)), CallbackFunction, Namespace); } void FSocketIONative::Emit(const FString& EventName, const FString& StringMessage /*= FString()*/, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { Emit(EventName, MakeShareable(new FJsonValueString(StringMessage)), CallbackFunction, Namespace); } void FSocketIONative::Emit(const FString& EventName, double NumberMessage, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { Emit(EventName, MakeShareable(new FJsonValueNumber(NumberMessage)), CallbackFunction, Namespace); } void FSocketIONative::Emit(const FString& EventName, bool BooleanMessage, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { Emit(EventName, MakeShareable(new FJsonValueBoolean(BooleanMessage)), CallbackFunction, Namespace); } void FSocketIONative::Emit(const FString& EventName, const TArray<uint8>& BinaryMessage, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { Emit(EventName, MakeShareable(new FJsonValueBinary(BinaryMessage)), CallbackFunction, Namespace); } void FSocketIONative::Emit(const FString& EventName, const TArray<TSharedPtr<FJsonValue>>& ArrayMessage, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { Emit(EventName, MakeShareable(new FJsonValueArray(ArrayMessage)), CallbackFunction, Namespace); } void FSocketIONative::Emit(const FString& EventName, UStruct* Struct, const void* StructPtr, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { Emit(EventName, USIOJConvert::ToJsonObject(Struct, (void*)StructPtr), CallbackFunction, Namespace); } void FSocketIONative::Emit(const FString& EventName, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= TEXT("/")*/) { TSharedPtr<FJsonValue> NoneValue; Emit(EventName, NoneValue, CallbackFunction, Namespace); } void FSocketIONative::Emit(const FString& EventName, const SIO_TEXT_TYPE StringMessage /*= TEXT("")*/, TFunction< void(const TArray<TSharedPtr<FJsonValue>>&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= TEXT("/")*/) { Emit(EventName, MakeShareable(new FJsonValueString(FString(StringMessage))), CallbackFunction, Namespace); } void FSocketIONative::EmitRaw(const FString& EventName, const sio::message::list& MessageList /*= nullptr*/, TFunction<void(const sio::message::list&)> CallbackFunction /*= nullptr*/, const FString& Namespace /*= FString(TEXT("/"))*/) { std::function<void(sio::message::list const&)> RawCallback = nullptr; //Only have non-null raw callback if we pass in a callback function if (CallbackFunction) { RawCallback = [&, CallbackFunction](const sio::message::list& response) { if (CallbackFunction != nullptr) { //Callback on game thread if (bCallbackOnGameThread) { FCULambdaRunnable::RunShortLambdaOnGameThread([&, CallbackFunction, response] { if (CallbackFunction) { CallbackFunction(response); } }); } else { CallbackFunction(response); } } }; } PrivateClient->socket(USIOMessageConvert::StdString(Namespace))->emit( USIOMessageConvert::StdString(EventName), MessageList, RawCallback); } void FSocketIONative::EmitRawBinary(const FString& EventName, uint8* Data, int32 DataLength, const FString& Namespace /*= FString(TEXT("/"))*/) { PrivateClient->socket(USIOMessageConvert::StdString(Namespace))->emit(USIOMessageConvert::StdString(EventName), std::make_shared<std::string>((char*)Data, DataLength)); } void FSocketIONative::OnEvent(const FString& EventName, TFunction< void(const FString&, const TSharedPtr<FJsonValue>&)> CallbackFunction, const FString& Namespace /*= FString(TEXT("/"))*/, ESIOThreadOverrideOption CallbackThread /*= USE_DEFAULT*/) { //Keep track of all the bound native JsonValue functions FSIOBoundEvent BoundEvent; BoundEvent.Function = CallbackFunction; BoundEvent.Namespace = Namespace; EventFunctionMap.Add(EventName, BoundEvent); OnRawEvent(EventName, [&, CallbackFunction](const FString& Event, const sio::message::ptr& RawMessage) { CallbackFunction(Event, USIOMessageConvert::ToJsonValue(RawMessage)); }, Namespace, CallbackThread); } void FSocketIONative::OnRawEvent(const FString& EventName, TFunction< void(const FString&, const sio::message::ptr&)> CallbackFunction, const FString& Namespace /*= FString(TEXT("/"))*/, ESIOThreadOverrideOption CallbackThread /*= USE_DEFAULT*/) { if (CallbackFunction == nullptr) { PrivateClient->socket(USIOMessageConvert::StdString(Namespace))->off(USIOMessageConvert::StdString(EventName)); } else { //determine thread override option bool bCallbackThisEventOnGameThread = bCallbackOnGameThread; switch (CallbackThread) { case USE_DEFAULT: break; case USE_GAME_THREAD: bCallbackThisEventOnGameThread = true; break; case USE_NETWORK_THREAD: bCallbackThisEventOnGameThread = false; break; default: break; } const TFunction< void(const FString&, const sio::message::ptr&)> SafeFunction = CallbackFunction; //copy the function so it remains in context PrivateClient->socket(USIOMessageConvert::StdString(Namespace))->on( USIOMessageConvert::StdString(EventName), sio::socket::event_listener_aux( [&, SafeFunction, bCallbackThisEventOnGameThread](std::string const& name, sio::message::ptr const& data, bool isAck, sio::message::list &ack_resp) { if (SafeFunction != nullptr) { const FString SafeName = USIOMessageConvert::FStringFromStd(name); if (bCallbackThisEventOnGameThread) { FCULambdaRunnable::RunShortLambdaOnGameThread([&, SafeFunction, SafeName, data] { SafeFunction(SafeName, data); }); } else { SafeFunction(SafeName, data); } } })); } } void FSocketIONative::OnBinaryEvent(const FString& EventName, TFunction< void(const FString&, const TArray<uint8>&)> CallbackFunction, const FString& Namespace /*= FString(TEXT("/"))*/) { const TFunction< void(const FString&, const TArray<uint8>&)> SafeFunction = CallbackFunction; //copy the function so it remains in context PrivateClient->socket(USIOMessageConvert::StdString(Namespace))->on( USIOMessageConvert::StdString(EventName), sio::socket::event_listener_aux( [&, SafeFunction](std::string const& name, sio::message::ptr const& data, bool isAck, sio::message::list &ack_resp) { const FString SafeName = USIOMessageConvert::FStringFromStd(name); //Construct raw buffer if (data->get_flag() == sio::message::flag_binary) { TArray<uint8> Buffer; int32 BufferSize = data->get_binary()->size(); auto MessageBuffer = data->get_binary(); Buffer.Append((uint8*)(MessageBuffer->data()), BufferSize); if (bCallbackOnGameThread) { FCULambdaRunnable::RunShortLambdaOnGameThread([&, SafeFunction, SafeName, Buffer] { SafeFunction(SafeName, Buffer); }); } else { SafeFunction(SafeName, Buffer); } } else { UE_LOG(SocketIO, Warning, TEXT("Non-binary message received to binary message lambda, check server message data!")); } })); } void FSocketIONative::UnbindEvent(const FString& EventName, const FString& Namespace /*= TEXT("/")*/) { OnRawEvent(EventName, nullptr, Namespace); EventFunctionMap.Remove(EventName); } void FSocketIONative::SetupInternalCallbacks() { PrivateClient->set_open_listener(sio::client::con_listener([&]() { //too early to get session id here so we defer the connection event until we connect to a namespace })); PrivateClient->set_close_listener(sio::client::close_listener([&](sio::client::close_reason const& reason) { bIsConnected = false; ESIOConnectionCloseReason DisconnectReason = (ESIOConnectionCloseReason)reason; FString DisconnectReasonString = USIOJConvert::EnumToString(TEXT("ESIOConnectionCloseReason"), DisconnectReason); if (VerboseLog) { UE_LOG(SocketIO, Log, TEXT("SocketIO Disconnected %s reason: %s"), *SessionId, *DisconnectReasonString); } LastSessionId = SessionId; SessionId = TEXT("Invalid"); if (OnDisconnectedCallback) { if (bCallbackOnGameThread) { FCULambdaRunnable::RunShortLambdaOnGameThread([&, DisconnectReason] { if (OnDisconnectedCallback) { OnDisconnectedCallback(DisconnectReason); } }); } else { OnDisconnectedCallback(DisconnectReason); } } })); PrivateClient->set_socket_open_listener(sio::client::socket_listener([&](std::string const& nsp) { //Special case, we have a latent connection after already having been disconnected if (!PrivateClient.IsValid()) { return; } if (!bIsConnected) { bIsConnected = true; SessionId = USIOMessageConvert::FStringFromStd(PrivateClient->get_sessionid()); if (VerboseLog) { UE_LOG(SocketIO, Log, TEXT("SocketIO Connected with session: %s"), *SessionId); } if (OnConnectedCallback) { if (bCallbackOnGameThread) { const FString SafeSessionId = SessionId; FCULambdaRunnable::RunShortLambdaOnGameThread([&, SafeSessionId] { if (OnConnectedCallback) { OnConnectedCallback(SessionId); } }); } else { OnConnectedCallback(SessionId); } } } const FString Namespace = USIOMessageConvert::FStringFromStd(nsp); if (VerboseLog) { UE_LOG(SocketIO, Log, TEXT("SocketIO %s connected to namespace: %s"), *SessionId, *Namespace); } if (OnNamespaceConnectedCallback) { if (bCallbackOnGameThread) { FCULambdaRunnable::RunShortLambdaOnGameThread([&, Namespace] { if (OnNamespaceConnectedCallback) { OnNamespaceConnectedCallback(Namespace); } }); } else { OnNamespaceConnectedCallback(Namespace); } } })); PrivateClient->set_socket_close_listener(sio::client::socket_listener([&](std::string const& nsp) { const FString Namespace = USIOMessageConvert::FStringFromStd(nsp); FString NamespaceSession = SessionId; if (NamespaceSession.Equals(TEXT("Invalid"))) { NamespaceSession = LastSessionId; } if (VerboseLog) { UE_LOG(SocketIO, Log, TEXT("SocketIO %s disconnected from namespace: %s"), *NamespaceSession, *Namespace); } if (OnNamespaceDisconnectedCallback) { if (bCallbackOnGameThread) { FCULambdaRunnable::RunShortLambdaOnGameThread([&, Namespace] { if (OnNamespaceDisconnectedCallback) { OnNamespaceDisconnectedCallback(Namespace); } }); } else { OnNamespaceDisconnectedCallback(Namespace); } } })); PrivateClient->set_fail_listener(sio::client::con_listener([&]() { if (VerboseLog) { UE_LOG(SocketIO, Log, TEXT("SocketIO failed to connect.")); } if (OnFailCallback) { if (bCallbackOnGameThread) { FCULambdaRunnable::RunShortLambdaOnGameThread([&] { if (OnFailCallback) { OnFailCallback(); } }); } else { OnFailCallback(); } } })); PrivateClient->set_reconnect_listener(sio::client::reconnect_listener([&](unsigned num, unsigned delay) { bIsConnected = false; if (VerboseLog) { UE_LOG(SocketIO, Log, TEXT("SocketIO %s appears to have lost connection, reconnecting attempt %d with delay %d"), *SessionId, num, delay); } if (OnReconnectionCallback) { if (bCallbackOnGameThread) { FCULambdaRunnable::RunShortLambdaOnGameThread([&, num, delay] { if (OnReconnectionCallback) { OnReconnectionCallback(num, delay); } }); } else { OnReconnectionCallback(num, delay); } } })); }
6,322
473
<filename>challenges/CableGrindLlama/pov_33/pov.c #include "libpov.h" int main(int cgc_argc, char *cgc_argv[]) { cgc_negotiate_type1(0x0, 0x0, 0); do { //*** writing data static unsigned char write_00000_00000[] = "\x7f\x01\x00\x00"; static unsigned int write_00000_00000_len = 4; unsigned char *write_00000 = NULL; unsigned int write_00000_len = 0; write_00000 = cgc_append_buf(write_00000, &write_00000_len, write_00000_00000, write_00000_00000_len); if (write_00000_len > 0) { cgc_transmit_all(1, write_00000, write_00000_len); } cgc_free(write_00000); } while (0); do { //*** writing data static unsigned char write_00001_00000[] = "\x01\x00\x00\x00\x00\x00\x00\x00\x67\x01\x00\x00\x5f\x01\x00\x00" "\x00\x00\x00\x00\x10\x01\x10\x10\x00\x00\x00\x00\x5f\x01\x00\x00" "\x73\x20\x30\xe6\xc9\x3c\x67\xde\x0f\x47\xb1\x13\xbc\x8d\x2c\x6d" "\x42\x30\x6e\xf0\x0f\x00\x00\x00\x01\x00\x7f\x01\x00\x7a\x01\x00" "\x73\xbb\x51\x2a\x9c\x99\x7b\x65\x5e\xd8\x02\x99\x2c\x3f\x53\x49" "\x75\x01\x00\xb1\x23\xe6\xf5\x76\x4e\x95\xc0\x1b\x3f\x24\x9d\x95" "\x01\x00\xca\x5b\x9c\x8f\x88\x76\xd0\x12\xe7\xd4\xd4\x05\xca\x01" "\xc8\x36\x28\xcc\x9b\x98\xd1\xb1\x94\x21\x8f\x59\x7a\x41\x00\xff" "\x7f\x0c\x8c\x29\x27\x17\x22\x80\x70\x4e\x5a\xaa\x35\x89\x89\x16" "\x24\x33\xfc\xc0\x34\xe7\x3b\xb0\x8c\x59\xdd\x58\x70\xf7\xcd\x5d" "\x9a\x38\x52\x1b\xff\x31\x95\x20\x2f\x99\x75\x0c\x6a\xd7\x24\x9b" "\xff\xe5\xa2\x67\x77\x15\x3b\x5d\x83\x9c\xda\x9a\x48\xad\x10\xc1" "\xa5\x2e\x3c\xc5\xea\x10\x9d\x71\xb8\x77\x2c\x87\xcd\x8f\x29\x15" "\x37\x49\x1f\x6a\xe6\x1a\xbb\x87\x56\xae\x68\x4d\xa6\xad\xd1\xdb" "\x1a\xe2\x82\x1a\x77\xad\x83\x38\x5a\xef\x76\x85\x87\x0b\x4d\xaf" "\x81\x1d\xbb\x47\xb3\x2f\x86\x0f\xed\xa2\x8d\x22\x0e\xbb\x19\x02" "\xe0\x07\xbd\x9f\x95\xda\xa0\x48\x47\xf8\x37\xcc\x53\x9f\xea\x6b" "\xf5\x6d\x06\xc5\x7f\x45\xa0\x02\x99\x1d\x7a\x72\x72\x91\xe6\x67" "\x61\x77\x1a\x41\xe3\x33\x89\xd2\xf4\x00\xc0\x86\x5f\x58\x3b\x90" "\x30\xde\xe7\x29\x81\x9b\x5e\x39\x5f\xcc\x3b\x82\xc0\xae\xe4\xe9" "\x27\x02\x5b\xad\xbe\x86\x5a\xcd\xcc\x02\x1d\x94\xbc\x66\x88\x84" "\x7d\x1e\xd8\x15\x3f\x31\x45\xd7\x11\x14\x11\x4b\xb4\xa5\x25\xeb" "\x88\xc7\x27\x71\x9e\x8c\x75\x41\x56\xc8\xd3\xb1\xd3\x89\x03\x85" "\x47\x47\x1f\x41\x59\xc1\xa8\xa2\x12\x02\x95\x28\xef\x8e\xf5"; static unsigned int write_00001_00000_len = 383; unsigned char *write_00001 = NULL; unsigned int write_00001_len = 0; write_00001 = cgc_append_buf(write_00001, &write_00001_len, write_00001_00000, write_00001_00000_len); if (write_00001_len > 0) { cgc_transmit_all(1, write_00001, write_00001_len); } cgc_free(write_00001); } while (0); do { unsigned char *read_00000; unsigned int read_00000_len; unsigned int read_00000_ptr = 0; //**** delimited read static unsigned char read_00000_delim[] = "\x0a"; read_00000 = NULL; read_00000_len = 0; int read_00000_res = cgc_delimited_read(0, &read_00000, &read_00000_len, read_00000_delim, 1); if (read_00000_res) {} //silence unused variable warning //**** read match data static unsigned char match_00000_00000[] = "\x53\x74\x61\x72\x74\x69\x6e\x67\x20\x64\x69\x73\x73\x65\x63\x74" "\x69\x6f\x6e\x2e\x2e\x2e"; read_00000_ptr += cgc_data_match(read_00000 + read_00000_ptr, read_00000_len - read_00000_ptr, match_00000_00000, 22); cgc_free(read_00000); if (read_00000_ptr) {} //silence unused variable warning if any } while (0); do { unsigned char *read_00001; unsigned int read_00001_len; unsigned int read_00001_ptr = 0; //**** delimited read static unsigned char read_00001_delim[] = "\x3d\x3d\x3d\x3d\x0a"; read_00001 = NULL; read_00001_len = 0; int read_00001_res = cgc_delimited_read(0, &read_00001, &read_00001_len, read_00001_delim, 5); if (read_00001_res) {} //silence unused variable warning /* read match pcre: .*New Packet */ static char read_00001_00000_regex[] = "\x2e\x2a\x4e\x65\x77\x20\x50\x61\x63\x6b\x65\x74"; static match_result read_00001_00000_match; pcre *read_00001_00000_pcre = cgc_init_regex(read_00001_00000_regex); if (read_00001_00000_pcre != NULL) { int rc = cgc_regex_match(read_00001_00000_pcre, 0, read_00001 + read_00001_ptr, read_00001_len - read_00001_ptr, &read_00001_00000_match); if (rc > 0) { read_00001_ptr += read_00001_00000_match.match_end - read_00001_00000_match.match_start; } else { //this is a pov so what does this even mean? //why would we quit on failed match, just keep sending stuff. } pcre_free(read_00001_00000_pcre); } else { //this is a pov so what does this even mean? //why would we quit on failed regex compile, just keep sending stuff. } cgc_free(read_00001); if (read_00001_ptr) {} //silence unused variable warning if any } while (0); do { unsigned char *read_00002; unsigned int read_00002_len; unsigned int read_00002_ptr = 0; //**** delimited read static unsigned char read_00002_delim[] = "\x3d\x3d\x3d\x0a"; read_00002 = NULL; read_00002_len = 0; int read_00002_res = cgc_delimited_read(0, &read_00002, &read_00002_len, read_00002_delim, 4); if (read_00002_res) {} //silence unused variable warning /* read match pcre: .*===llama */ static char read_00002_00000_regex[] = "\x2e\x2a\x3d\x3d\x3d\x6c\x6c\x61\x6d\x61"; static match_result read_00002_00000_match; pcre *read_00002_00000_pcre = cgc_init_regex(read_00002_00000_regex); if (read_00002_00000_pcre != NULL) { int rc = cgc_regex_match(read_00002_00000_pcre, 0, read_00002 + read_00002_ptr, read_00002_len - read_00002_ptr, &read_00002_00000_match); if (rc > 0) { read_00002_ptr += read_00002_00000_match.match_end - read_00002_00000_match.match_start; } else { //this is a pov so what does this even mean? //why would we quit on failed match, just keep sending stuff. } pcre_free(read_00002_00000_pcre); } else { //this is a pov so what does this even mean? //why would we quit on failed regex compile, just keep sending stuff. } cgc_free(read_00002); if (read_00002_ptr) {} //silence unused variable warning if any } while (0); do { unsigned char *read_00003; unsigned int read_00003_len; unsigned int read_00003_ptr = 0; //**** delimited read static unsigned char read_00003_delim[] = "\x3d\x3d\x3d\x0a"; read_00003 = NULL; read_00003_len = 0; int read_00003_res = cgc_delimited_read(0, &read_00003, &read_00003_len, read_00003_delim, 4); if (read_00003_res) {} //silence unused variable warning /* read match pcre: .*===llamapycnite */ static char read_00003_00000_regex[] = "\x2e\x2a\x3d\x3d\x3d\x6c\x6c\x61\x6d\x61\x70\x79\x63\x6e\x69\x74" "\x65"; static match_result read_00003_00000_match; pcre *read_00003_00000_pcre = cgc_init_regex(read_00003_00000_regex); if (read_00003_00000_pcre != NULL) { int rc = cgc_regex_match(read_00003_00000_pcre, 0, read_00003 + read_00003_ptr, read_00003_len - read_00003_ptr, &read_00003_00000_match); if (rc > 0) { read_00003_ptr += read_00003_00000_match.match_end - read_00003_00000_match.match_start; } else { //this is a pov so what does this even mean? //why would we quit on failed match, just keep sending stuff. } pcre_free(read_00003_00000_pcre); } else { //this is a pov so what does this even mean? //why would we quit on failed regex compile, just keep sending stuff. } cgc_free(read_00003); if (read_00003_ptr) {} //silence unused variable warning if any } while (0); do { unsigned char *read_00004; unsigned int read_00004_len; unsigned int read_00004_ptr = 0; //**** delimited read static unsigned char read_00004_delim[] = "\x3d\x3d\x3d\x0a"; read_00004 = NULL; read_00004_len = 0; int read_00004_res = cgc_delimited_read(0, &read_00004, &read_00004_len, read_00004_delim, 4); if (read_00004_res) {} //silence unused variable warning /* read match pcre: .*Dissection finished */ static char read_00004_00000_regex[] = "\x2e\x2a\x44\x69\x73\x73\x65\x63\x74\x69\x6f\x6e\x20\x66\x69\x6e" "\x69\x73\x68\x65\x64"; static match_result read_00004_00000_match; pcre *read_00004_00000_pcre = cgc_init_regex(read_00004_00000_regex); if (read_00004_00000_pcre != NULL) { int rc = cgc_regex_match(read_00004_00000_pcre, 0, read_00004 + read_00004_ptr, read_00004_len - read_00004_ptr, &read_00004_00000_match); if (rc > 0) { read_00004_ptr += read_00004_00000_match.match_end - read_00004_00000_match.match_start; } else { //this is a pov so what does this even mean? //why would we quit on failed match, just keep sending stuff. } pcre_free(read_00004_00000_pcre); } else { //this is a pov so what does this even mean? //why would we quit on failed regex compile, just keep sending stuff. } cgc_free(read_00004); if (read_00004_ptr) {} //silence unused variable warning if any } while (0); }
5,359
2,151
<gh_stars>1000+ /* * Copyright (c) 2011 The Native Client Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #include <assert.h> #include <pthread.h> #include <stdio.h> #include <stdlib.h> #include <sys/mman.h> void *mapping; void *thread_func(void *unused_arg) { fprintf(stderr, "child thread has started.\n"); while (1) { /* * This checks whether mmap() with MAP_FIXED is atomic. On * Windows, mmap() must temporarily unmap and then re-map a page. * If this is observable by untrusted code, then the memory access * below can fault if it occurs while the address is temporarily * unmapped. * See http://code.google.com/p/nativeclient/issues/detail?id=1848 * * This is technically a stress test: since this is * non-deterministic, we are not guaranteed to detect the problem * if it exists. However, this is such a tight loop that if we * are running on a multicore system, we are almost certain to * detect the problem. */ (*(volatile int *) mapping)++; } return NULL; } int main(void) { mapping = mmap(NULL, 0x10000, PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_PRIVATE, -1, 0); assert(mapping != MAP_FAILED); pthread_t tid; int rc = pthread_create(&tid, NULL, thread_func, NULL); assert(rc == 0); /* * To increase the chance of detecting a problem, spin until we can * see that our thread has really been scheduled. */ fprintf(stderr, "waiting for child thread...\n"); while (*(volatile int *) mapping == 0) { /* Nothing */ } for (int index = 0; index < 1000; index++) { fprintf(stderr, "mmap call #%i\n", index); void *result = mmap(mapping, 0x10000, PROT_READ | PROT_WRITE, MAP_ANONYMOUS | MAP_PRIVATE | MAP_FIXED, -1, 0); assert(result == mapping); /* * Sanity check: Spin until we see that our thread has touched the * page. This checks that the thread has been correctly * unsuspended on Windows. On failure, this will hang. */ fprintf(stderr, "checking for write to page...\n"); int value = *(volatile int *) mapping; while (*(volatile int *) mapping == value) { /* Nothing */ } } return 0; }
859
1,694
<reponame>CrackerCat/iWeChat<gh_stars>1000+ // // Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>. // #import "MMUIViewController.h" @class MMTableViewInfo; @protocol BindPhoneSuccessViewControllerDelegate; @interface BindPhoneSuccessViewController : MMUIViewController { id <BindPhoneSuccessViewControllerDelegate> _delegate; MMTableViewInfo *_tableViewInfo; } @property(retain, nonatomic) MMTableViewInfo *tableViewInfo; // @synthesize tableViewInfo=_tableViewInfo; @property(nonatomic) __weak id <BindPhoneSuccessViewControllerDelegate> delegate; // @synthesize delegate=_delegate; - (void).cxx_destruct; - (void)showBindedView; - (void)initTableView; - (void)didReceiveMemoryWarning; - (void)viewDidLoad; - (void)viewDidLayoutSubviews; @end
304
2,293
#!/usr/bin/python # # Copyright (C) 2007, 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains extensions to Atom objects used with Blogger.""" __author__ = 'api.jscudder (<NAME>)' import atom import gdata import re LABEL_SCHEME = 'http://www.blogger.com/atom/ns#' THR_NAMESPACE = 'http://purl.org/syndication/thread/1.0' class BloggerEntry(gdata.GDataEntry): """Adds convenience methods inherited by all Blogger entries.""" blog_name_pattern = re.compile('(http://)(\w*)') blog_id_pattern = re.compile('(tag:blogger.com,1999:blog-)(\w*)') blog_id2_pattern = re.compile('tag:blogger.com,1999:user-(\d+)\.blog-(\d+)') def GetBlogId(self): """Extracts the Blogger id of this blog. This method is useful when contructing URLs by hand. The blog id is often used in blogger operation URLs. This should not be confused with the id member of a BloggerBlog. The id element is the Atom id XML element. The blog id which this method returns is a part of the Atom id. Returns: The blog's unique id as a string. """ if self.id.text: match = self.blog_id_pattern.match(self.id.text) if match: return match.group(2) else: return self.blog_id2_pattern.match(self.id.text).group(2) return None def GetBlogName(self): """Finds the name of this blog as used in the 'alternate' URL. An alternate URL is in the form 'http://blogName.blogspot.com/'. For an entry representing the above example, this method would return 'blogName'. Returns: The blog's URL name component as a string. """ for link in self.link: if link.rel == 'alternate': return self.blog_name_pattern.match(link.href).group(2) return None class BlogEntry(BloggerEntry): """Describes a blog entry in the feed listing a user's blogs.""" def BlogEntryFromString(xml_string): return atom.CreateClassFromXMLString(BlogEntry, xml_string) class BlogFeed(gdata.GDataFeed): """Describes a feed of a user's blogs.""" _children = gdata.GDataFeed._children.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BlogEntry]) def BlogFeedFromString(xml_string): return atom.CreateClassFromXMLString(BlogFeed, xml_string) class BlogPostEntry(BloggerEntry): """Describes a blog post entry in the feed of a blog's posts.""" post_id_pattern = re.compile('(tag:blogger.com,1999:blog-)(\w*)(.post-)(\w*)') def AddLabel(self, label): """Adds a label to the blog post. The label is represented by an Atom category element, so this method is shorthand for appending a new atom.Category object. Args: label: str """ self.category.append(atom.Category(scheme=LABEL_SCHEME, term=label)) def GetPostId(self): """Extracts the postID string from the entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return self.post_id_pattern.match(self.id.text).group(4) return None def BlogPostEntryFromString(xml_string): return atom.CreateClassFromXMLString(BlogPostEntry, xml_string) class BlogPostFeed(gdata.GDataFeed): """Describes a feed of a blog's posts.""" _children = gdata.GDataFeed._children.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [BlogPostEntry]) def BlogPostFeedFromString(xml_string): return atom.CreateClassFromXMLString(BlogPostFeed, xml_string) class InReplyTo(atom.AtomBase): _tag = 'in-reply-to' _namespace = THR_NAMESPACE _attributes = atom.AtomBase._attributes.copy() _attributes['href'] = 'href' _attributes['ref'] = 'ref' _attributes['source'] = 'source' _attributes['type'] = 'type' def __init__(self, href=None, ref=None, source=None, type=None, extension_elements=None, extension_attributes=None, text=None): self.href = href self.ref = ref self.source = source self.type = type self.extension_elements = extension_elements or [] self.extension_attributes = extension_attributes or {} self.text = text def InReplyToFromString(xml_string): return atom.CreateClassFromXMLString(InReplyTo, xml_string) class CommentEntry(BloggerEntry): """Describes a blog post comment entry in the feed of a blog post's comments.""" _children = BloggerEntry._children.copy() _children['{%s}in-reply-to' % THR_NAMESPACE] = ('in_reply_to', InReplyTo) comment_id_pattern = re.compile('.*-(\w*)$') def __init__(self, author=None, category=None, content=None, contributor=None, atom_id=None, link=None, published=None, rights=None, source=None, summary=None, control=None, title=None, updated=None, in_reply_to=None, extension_elements=None, extension_attributes=None, text=None): BloggerEntry.__init__(self, author=author, category=category, content=content, contributor=contributor, atom_id=atom_id, link=link, published=published, rights=rights, source=source, summary=summary, control=control, title=title, updated=updated, extension_elements=extension_elements, extension_attributes=extension_attributes, text=text) self.in_reply_to = in_reply_to def GetCommentId(self): """Extracts the commentID string from the entry's Atom id. Returns: A string of digits which identify this post within the blog. """ if self.id.text: return self.comment_id_pattern.match(self.id.text).group(1) return None def CommentEntryFromString(xml_string): return atom.CreateClassFromXMLString(CommentEntry, xml_string) class CommentFeed(gdata.GDataFeed): """Describes a feed of a blog post's comments.""" _children = gdata.GDataFeed._children.copy() _children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [CommentEntry]) def CommentFeedFromString(xml_string): return atom.CreateClassFromXMLString(CommentFeed, xml_string)
2,244
1,533
#include ""
3
1,670
<filename>autobahn/twisted/test/test_choosereactor.py ############################################################################### # # The MIT License (MIT) # # Copyright (c) Crossbar.io Technologies GmbH # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ############################################################################### import sys from unittest.mock import Mock import twisted.internet from twisted.trial import unittest from autobahn.twisted import choosereactor class ChooseReactorTests(unittest.TestCase): def patch_reactor(self, name, new_reactor): """ Patch ``name`` so that Twisted will grab a fake reactor instead of a real one. """ if hasattr(twisted.internet, name): self.patch(twisted.internet, name, new_reactor) else: def _cleanup(): delattr(twisted.internet, name) setattr(twisted.internet, name, new_reactor) def patch_modules(self): """ Patch ``sys.modules`` so that Twisted believes there is no installed reactor. """ old_modules = dict(sys.modules) new_modules = dict(sys.modules) del new_modules["twisted.internet.reactor"] def _cleanup(): sys.modules = old_modules self.addCleanup(_cleanup) sys.modules = new_modules def test_unknown(self): """ ``install_optimal_reactor`` will use the default reactor if it is unable to detect the platform it is running on. """ reactor_mock = Mock() self.patch_reactor("selectreactor", reactor_mock) self.patch(sys, "platform", "unknown") # Emulate that a reactor has not been installed self.patch_modules() choosereactor.install_optimal_reactor() reactor_mock.install.assert_called_once_with() def test_mac(self): """ ``install_optimal_reactor`` will install KQueueReactor on Darwin (OS X). """ reactor_mock = Mock() self.patch_reactor("kqreactor", reactor_mock) self.patch(sys, "platform", "darwin") # Emulate that a reactor has not been installed self.patch_modules() choosereactor.install_optimal_reactor() reactor_mock.install.assert_called_once_with() def test_win(self): """ ``install_optimal_reactor`` will install IOCPReactor on Windows. """ if sys.platform != 'win32': raise unittest.SkipTest('unit test requires Windows') reactor_mock = Mock() self.patch_reactor("iocpreactor", reactor_mock) self.patch(sys, "platform", "win32") # Emulate that a reactor has not been installed self.patch_modules() choosereactor.install_optimal_reactor() reactor_mock.install.assert_called_once_with() def test_bsd(self): """ ``install_optimal_reactor`` will install KQueueReactor on BSD. """ reactor_mock = Mock() self.patch_reactor("kqreactor", reactor_mock) self.patch(sys, "platform", "freebsd11") # Emulate that a reactor has not been installed self.patch_modules() choosereactor.install_optimal_reactor() reactor_mock.install.assert_called_once_with() def test_linux(self): """ ``install_optimal_reactor`` will install EPollReactor on Linux. """ reactor_mock = Mock() self.patch_reactor("epollreactor", reactor_mock) self.patch(sys, "platform", "linux") # Emulate that a reactor has not been installed self.patch_modules() choosereactor.install_optimal_reactor() reactor_mock.install.assert_called_once_with()
1,797
7,409
import logging import time from typing import Sequence, cast from celery import shared_task from posthog.models import Action from posthog.utils import is_clickhouse_enabled logger = logging.getLogger(__name__) @shared_task(ignore_result=True) def calculate_action(action_id: int) -> None: if is_clickhouse_enabled(): # In EE, actions are not precalculated return start_time = time.time() action: Action = Action.objects.get(pk=action_id) action.calculate_events() total_time = time.time() - start_time logger.info(f"Calculating action {action.pk} took {total_time:.2f} seconds") def calculate_actions_from_last_calculation() -> None: if is_clickhouse_enabled(): # In EE, actions are not precalculated return start_time_overall = time.time() for action in cast(Sequence[Action], Action.objects.filter(is_calculating=False, deleted=False)): start_time = time.time() action.calculate_events(start=action.last_calculated_at) total_time = time.time() - start_time logger.info(f"Calculating action {action.pk} took {total_time:.2f} seconds") total_time_overall = time.time() - start_time_overall logger.info(f"Calculated new event-action pairs in {total_time_overall:.2f} s")
462
678
<reponame>bzxy/cydia<filename>iOSOpenDev/frameworks/AirPortAssistant.framework/Headers/StepByStepUIViewController_ExtendWired_Config.h /** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/AirPortAssistant.framework/AirPortAssistant */ #import <AirPortAssistant/StepByStepUIViewController_ExtendWireless_Config.h> __attribute__((visibility("hidden"))) @interface StepByStepUIViewController_ExtendWired_Config : StepByStepUIViewController_ExtendWireless_Config { } - (void)viewDidLoad; // 0x17231 @end
183
16,989
/* * Copyright 2019 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.build.android.desugar.langmodel; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableSet; import java.util.Collection; import org.objectweb.asm.Opcodes; /** * Used to track the declaration and invocation information of a class member, including fields, * constructors and methods. */ @AutoValue abstract class ClassMemberTrackReason { abstract boolean hasDeclReason(); abstract int ownerAccess(); abstract int memberAccess(); abstract ImmutableSet<MemberUseKind> useAccesses(); public static ClassMemberTrackReasonBuilder builder() { return new AutoValue_ClassMemberTrackReason.Builder() .setHasDeclReason(false) .setOwnerAccess(0) .setMemberAccess(0); } abstract ClassMemberTrackReasonBuilder toBuilder(); final boolean hasInterfaceDeclReason() { return hasDeclReason() && (ownerAccess() & Opcodes.ACC_INTERFACE) != 0; } final boolean hasMemberUseReason() { return !useAccesses().isEmpty(); } /** The builder for {@link ClassMemberTrackReason}. */ @AutoValue.Builder abstract static class ClassMemberTrackReasonBuilder { abstract ClassMemberTrackReasonBuilder setHasDeclReason(boolean value); abstract ClassMemberTrackReasonBuilder setOwnerAccess(int value); abstract ClassMemberTrackReasonBuilder setMemberAccess(int value); abstract ClassMemberTrackReasonBuilder setUseAccesses(Collection<MemberUseKind> value); abstract ImmutableSet.Builder<MemberUseKind> useAccessesBuilder(); final ClassMemberTrackReasonBuilder setDeclAccess(int ownerAccess, int memberAccess) { return setHasDeclReason(true).setOwnerAccess(ownerAccess).setMemberAccess(memberAccess); } final ClassMemberTrackReasonBuilder addUseAccess(int invokeOpcode) { useAccessesBuilder().add(MemberUseKind.fromValue(invokeOpcode)); return this; } final ClassMemberTrackReasonBuilder addAllUseAccesses(Collection<MemberUseKind> values) { useAccessesBuilder().addAll(values); return this; } final ClassMemberTrackReasonBuilder mergeFrom(ClassMemberTrackReason otherReason) { if (otherReason.hasDeclReason()) { setDeclAccess(otherReason.ownerAccess(), otherReason.memberAccess()); } addAllUseAccesses(otherReason.useAccesses()); return this; } abstract ClassMemberTrackReason build(); } }
875
320
<filename>MoPubSDKTests/MPAdViewOverlayDelegateMock.h<gh_stars>100-1000 // // MPAdViewOverlayDelegateMock.h // // Copyright 2018-2021 Twitter, Inc. // Licensed under the MoPub SDK License Agreement // http://www.mopub.com/legal/sdk-license-agreement/ // #import <Foundation/Foundation.h> #import "MPAdViewOverlay.h" NS_ASSUME_NONNULL_BEGIN @interface MPAdViewOverlayDelegateMock : NSObject <MPAdViewOverlayDelegate> @property (nonatomic, copy, nullable) void (^overlayDidTriggerEventBlock)(MPAdViewOverlay * _Nullable overlay, MPVideoEvent event); @property (nonatomic, copy, nullable) void (^overlayDidFinishCountdownBlock)(MPAdViewOverlay * _Nullable overlay); @end NS_ASSUME_NONNULL_END
246
1,380
<gh_stars>1000+ /* * sophia database * sphia.org * * Copyright (c) <NAME> * BSD License */ #include <sophia.h> #include <libss.h> #include <libsf.h> #include <libsr.h> #include <libsv.h> #include <libsd.h> #include <libst.h> static void secondary_index_test_unique0(void) { void *env = sp_env(); t( env != NULL ); t( sp_setstring(env, "sophia.path", st_r.conf->sophia_dir, 0) == 0 ); t( sp_setint(env, "scheduler.threads", 0) == 0 ); t( sp_setstring(env, "log.path", st_r.conf->log_dir, 0) == 0 ); /* unique */ t( sp_setstring(env, "db", "primary", 0) == 0 ); t( sp_setint(env, "db.primary.compaction.cache", 0) == 0 ); t( sp_setstring(env, "db.primary.scheme", "a", 0) == 0 ); t( sp_setstring(env, "db.primary.scheme.a", "u32,key(0)", 0) == 0 ); t( sp_setstring(env, "db.primary.scheme", "b", 0) == 0 ); t( sp_setstring(env, "db.primary.scheme.b", "u32", 0) == 0 ); t( sp_setint(env, "db.primary.sync", 0) == 0 ); /* unique */ t( sp_setstring(env, "db", "secondary", 0) == 0 ); t( sp_setint(env, "db.secondary.compaction.cache", 0) == 0 ); t( sp_setstring(env, "db.secondary.scheme", "a", 0) == 0 ); t( sp_setstring(env, "db.secondary.scheme.a", "u32", 0) == 0 ); t( sp_setstring(env, "db.secondary.scheme", "b", 0) == 0 ); t( sp_setstring(env, "db.secondary.scheme.b", "u32,key(0)", 0) == 0 ); t( sp_setint(env, "db.secondary.sync", 0) == 0 ); void *primary = sp_getobject(env, "db.primary"); void *secondary = sp_getobject(env, "db.secondary"); t( primary != NULL ); t( secondary != NULL ); t( sp_open(env) == 0 ); void *tx; void *po, *so; uint32_t a, b; tx = sp_begin(env); a = 0; b = 3; po = sp_document(primary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, po) == 0 ); so = sp_document(secondary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, so) == 0 ); t( sp_commit(tx) == 0 ); tx = sp_begin(env); a = 1; b = 2; po = sp_document(primary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, po) == 0 ); so = sp_document(secondary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, so) == 0 ); t( sp_commit(tx) == 0 ); tx = sp_begin(env); a = 2; b = 1; po = sp_document(primary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, po) == 0 ); so = sp_document(secondary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, so) == 0 ); t( sp_commit(tx) == 0 ); tx = sp_begin(env); a = 3; b = 0; po = sp_document(primary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, po) == 0 ); so = sp_document(secondary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, so) == 0 ); t( sp_commit(tx) == 0 ); uint32_t current_a = 0; uint32_t current_b = 3; void *cur = sp_cursor(env); po = sp_document(primary); sp_setstring(po, "order", ">=", 0); while ((po = sp_get(cur, po))) { t( *(uint32_t*)sp_getstring(po, "a", NULL) == current_a ); t( *(uint32_t*)sp_getstring(po, "b", NULL) == current_b ); current_a++; current_b--; } sp_destroy(cur); current_a = 3; current_b = 0; cur = sp_cursor(env); so = sp_document(secondary); sp_setstring(so, "order", ">=", 0); while ((so = sp_get(cur, so))) { t( *(uint32_t*)sp_getstring(so, "a", NULL) == current_a ); t( *(uint32_t*)sp_getstring(so, "b", NULL) == current_b ); current_a--; current_b++; } sp_destroy(cur); sp_destroy(env); } static void secondary_index_test_nonunique0(void) { void *env = sp_env(); t( env != NULL ); t( sp_setstring(env, "sophia.path", st_r.conf->sophia_dir, 0) == 0 ); t( sp_setint(env, "scheduler.threads", 0) == 0 ); t( sp_setstring(env, "log.path", st_r.conf->log_dir, 0) == 0 ); /* unique */ t( sp_setstring(env, "db", "primary", 0) == 0 ); t( sp_setint(env, "db.primary.compaction.cache", 0) == 0 ); t( sp_setstring(env, "db.primary.scheme", "a", 0) == 0 ); t( sp_setstring(env, "db.primary.scheme.a", "u32,key(0)", 0) == 0 ); t( sp_setstring(env, "db.primary.scheme", "b", 0) == 0 ); t( sp_setstring(env, "db.primary.scheme.b", "u32", 0) == 0 ); t( sp_setint(env, "db.primary.sync", 0) == 0 ); /* non-unique */ t( sp_setstring(env, "db", "secondary", 0) == 0 ); t( sp_setint(env, "db.secondary.compaction.cache", 0) == 0 ); t( sp_setstring(env, "db.secondary.scheme", "a", 0) == 0 ); t( sp_setstring(env, "db.secondary.scheme.a", "u32,key(1)", 0) == 0 ); t( sp_setstring(env, "db.secondary.scheme", "b", 0) == 0 ); t( sp_setstring(env, "db.secondary.scheme.b", "u32,key(0)", 0) == 0 ); t( sp_setint(env, "db.secondary.sync", 0) == 0 ); void *primary = sp_getobject(env, "db.primary"); void *secondary = sp_getobject(env, "db.secondary"); t( primary != NULL ); t( secondary != NULL ); t( sp_open(env) == 0 ); void *tx; void *po, *so; uint32_t a, b; tx = sp_begin(env); a = 0; b = 0; po = sp_document(primary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, po) == 0 ); so = sp_document(secondary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, so) == 0 ); t( sp_commit(tx) == 0 ); tx = sp_begin(env); a = 1; b = 0; po = sp_document(primary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, po) == 0 ); so = sp_document(secondary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, so) == 0 ); t( sp_commit(tx) == 0 ); tx = sp_begin(env); a = 2; b = 0; po = sp_document(primary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, po) == 0 ); so = sp_document(secondary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, so) == 0 ); t( sp_commit(tx) == 0 ); tx = sp_begin(env); a = 3; b = 0; po = sp_document(primary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, po) == 0 ); so = sp_document(secondary); sp_setstring(po, "a", &a, sizeof(a)); sp_setstring(po, "b", &b, sizeof(b)); t( sp_set(tx, so) == 0 ); t( sp_commit(tx) == 0 ); t( sp_setint(env, "db.primary.compaction.compact", 0) == 0 ); t( sp_setint(env, "db.secondary.compaction.compact", 0) == 0 ); uint32_t current_a = 0; uint32_t current_b = 0; void *cur = sp_cursor(env); po = sp_document(primary); sp_setstring(po, "order", ">=", 0); while ((po = sp_get(cur, po))) { t( *(uint32_t*)sp_getstring(po, "a", NULL) == current_a ); t( *(uint32_t*)sp_getstring(po, "b", NULL) == current_b ); current_a++; } sp_destroy(cur); current_a = 0; current_b = 0; cur = sp_cursor(env); so = sp_document(secondary); sp_setstring(so, "order", ">=", 0); while ((so = sp_get(cur, so))) { t( *(uint32_t*)sp_getstring(so, "a", NULL) == current_a ); t( *(uint32_t*)sp_getstring(so, "b", NULL) == current_b ); current_a++; } sp_destroy(cur); sp_destroy(env); } stgroup *secondary_index_group(void) { stgroup *group = st_group("secondary_index"); st_groupadd(group, st_test("unique", secondary_index_test_unique0)); st_groupadd(group, st_test("nonunique", secondary_index_test_nonunique0)); return group; }
3,390
602
package io.stargate.db.query; import java.util.List; import java.util.Objects; public interface BoundQuery { /** The type of query this is. */ QueryType type(); /** The query and values used to obtain this bound query. */ BoundQuery.Source<?> source(); /** * A CQL query string representation of this query, with bind markers for the values of {@link * #values()}. */ default String queryString() { return source().query().queryStringForPreparation(); } /** * The values of this bound query, corresponding to the bind markers for {@link #queryString()}. * * <p>Please note that those values may or may not be equals to {@code bounded().query().values()} * because, as specified in {@link Query#queryStringForPreparation()}, a {@link Query} is allowed * to include some additional values (that the ones in {@link Query#bindMarkers()}) in the bound * queries it produces. */ List<TypedValue> values(); final class Source<Q extends Query<?>> { private final Q sourceQuery; private final List<TypedValue> sourceValues; public Source(Q boundedQuery, List<TypedValue> sourceValues) { this.sourceQuery = boundedQuery; this.sourceValues = sourceValues; } public Q query() { return sourceQuery; } public List<TypedValue> values() { return sourceValues; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof Source)) { return false; } Source<?> source = (Source<?>) o; return sourceQuery.equals(source.sourceQuery) && sourceValues.equals(source.sourceValues); } @Override public int hashCode() { return Objects.hash(sourceQuery, sourceValues); } @Override public String toString() { return String.format("%s with values=%s", sourceQuery, sourceValues); } } }
652
839
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.management.jmx.export; import javax.management.JMException; import javax.management.ObjectName; import org.apache.cxf.management.ManagedComponent; import org.apache.cxf.management.annotation.ManagedAttribute; import org.apache.cxf.management.annotation.ManagedNotification; import org.apache.cxf.management.annotation.ManagedNotifications; import org.apache.cxf.management.annotation.ManagedOperation; import org.apache.cxf.management.annotation.ManagedOperationParameter; import org.apache.cxf.management.annotation.ManagedOperationParameters; import org.apache.cxf.management.annotation.ManagedResource; @ManagedResource(componentName = "AnnotationTest", description = "My Managed Bean", persistPolicy = "OnUpdate", currencyTimeLimit = 15, log = false, logFile = "jmx.log", persistPeriod = 200, persistLocation = "/local/work", persistName = "bar.jmx") @ManagedNotifications({@ManagedNotification(name = "My Notification", notificationTypes = {"type.foo", "type.bar" }) }) public class AnnotationTestInstrumentation implements ManagedComponent { private String name; private String nickName; private int age; private boolean isSuperman; @ManagedAttribute(description = "The Age Attribute", currencyTimeLimit = 15) public int getAge() { return age; } public void setAge(int a) { this.age = a; } @ManagedOperation(currencyTimeLimit = 30) public long myOperation() { return 1L; } @ManagedAttribute(description = "The Name Attribute", currencyTimeLimit = 20, defaultValue = "bar", persistPolicy = "OnUpdate") public void setName(String n) { this.name = n; } @ManagedAttribute(defaultValue = "bar", persistPeriod = 300) public String getName() { return name; } @ManagedAttribute(defaultValue = "barasd", description = "The Nick Name Attribute") public String getNickName() { return this.nickName; } public void setNickName(String n) { this.nickName = n; } @ManagedAttribute(description = "The Is Superman Attribute") public void setSuperman(boolean superman) { this.isSuperman = superman; } public boolean isSuperman() { return isSuperman; } @ManagedOperation(description = "Add Two Numbers Together") @ManagedOperationParameters({@ManagedOperationParameter( name = "x", description = "Left operand"), @ManagedOperationParameter( name = "y", description = "Right operand") }) public int add(int x, int y) { return x + y; } public ObjectName getObjectName() throws JMException { return new ObjectName("org.apache.cxf:type=AnnotationTestInstrumentation"); } }
1,371
493
<filename>src/com/jdh/microcraft/entity/particle/EntitySmashParticle.java package com.jdh.microcraft.entity.particle; import com.jdh.microcraft.Global; import com.jdh.microcraft.gfx.Renderer; import com.jdh.microcraft.level.Level; public class EntitySmashParticle extends EntityParticle { private static final int SPRITE_X = 12, SPRITE_Y = 2; private final int color; public EntitySmashParticle(Level level, int x, int y, int color) { super(level, x, y); this.color = color; } public static void spawn(Level level, int x, int y, int color, int min, int max) { int n = min + Global.random.nextInt(max - min + 1); for (int i = 0; i < n; i++) { level.addEntity(new EntitySmashParticle(level, x, y, color)); } } @Override public void render() { Global.random.setSeed(this.id); Renderer.render( SPRITE_X, SPRITE_Y, this.getRenderX(), this.getRenderY(), color, (Global.random.nextBoolean() ? Renderer.FLIP_X : Renderer.FLIP_NONE) | (Global.random.nextBoolean() ? Renderer.FLIP_Y : Renderer.FLIP_NONE) ); } }
494
2,151
<filename>third_party/libyuv/include/libyuv/planar_functions.h /* * Copyright (c) 2016, Alliance for Open Media. All rights reserved * * This source code is subject to the terms of the BSD 2 Clause License and * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License * was not distributed with this source code in the LICENSE file, you can * obtain it at www.aomedia.org/license/software. If the Alliance for Open * Media Patent License 1.0 was not distributed with this source code in the * PATENTS file, you can obtain it at www.aomedia.org/license/patent. */ #ifndef INCLUDE_LIBYUV_PLANAR_FUNCTIONS_H_ // NOLINT #define INCLUDE_LIBYUV_PLANAR_FUNCTIONS_H_ #include "libyuv/basic_types.h" // TODO(fbarchard): Remove the following headers includes. #include "libyuv/convert.h" #include "libyuv/convert_argb.h" #ifdef __cplusplus namespace libyuv { extern "C" { #endif // Copy a plane of data. LIBYUV_API void CopyPlane(const uint8* src_y, int src_stride_y, uint8* dst_y, int dst_stride_y, int width, int height); LIBYUV_API void CopyPlane_16(const uint16* src_y, int src_stride_y, uint16* dst_y, int dst_stride_y, int width, int height); // Set a plane of data to a 32 bit value. LIBYUV_API void SetPlane(uint8* dst_y, int dst_stride_y, int width, int height, uint32 value); // Copy I400. Supports inverting. LIBYUV_API int I400ToI400(const uint8* src_y, int src_stride_y, uint8* dst_y, int dst_stride_y, int width, int height); #define J400ToJ400 I400ToI400 // Copy I422 to I422. #define I422ToI422 I422Copy LIBYUV_API int I422Copy(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_y, int dst_stride_y, uint8* dst_u, int dst_stride_u, uint8* dst_v, int dst_stride_v, int width, int height); // Copy I444 to I444. #define I444ToI444 I444Copy LIBYUV_API int I444Copy(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_y, int dst_stride_y, uint8* dst_u, int dst_stride_u, uint8* dst_v, int dst_stride_v, int width, int height); // Convert YUY2 to I422. LIBYUV_API int YUY2ToI422(const uint8* src_yuy2, int src_stride_yuy2, uint8* dst_y, int dst_stride_y, uint8* dst_u, int dst_stride_u, uint8* dst_v, int dst_stride_v, int width, int height); // Convert UYVY to I422. LIBYUV_API int UYVYToI422(const uint8* src_uyvy, int src_stride_uyvy, uint8* dst_y, int dst_stride_y, uint8* dst_u, int dst_stride_u, uint8* dst_v, int dst_stride_v, int width, int height); LIBYUV_API int YUY2ToNV12(const uint8* src_yuy2, int src_stride_yuy2, uint8* dst_y, int dst_stride_y, uint8* dst_uv, int dst_stride_uv, int width, int height); LIBYUV_API int UYVYToNV12(const uint8* src_uyvy, int src_stride_uyvy, uint8* dst_y, int dst_stride_y, uint8* dst_uv, int dst_stride_uv, int width, int height); // Convert I420 to I400. (calls CopyPlane ignoring u/v). LIBYUV_API int I420ToI400(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_y, int dst_stride_y, int width, int height); // Alias #define J420ToJ400 I420ToI400 #define I420ToI420Mirror I420Mirror // I420 mirror. LIBYUV_API int I420Mirror(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_y, int dst_stride_y, uint8* dst_u, int dst_stride_u, uint8* dst_v, int dst_stride_v, int width, int height); // Alias #define I400ToI400Mirror I400Mirror // I400 mirror. A single plane is mirrored horizontally. // Pass negative height to achieve 180 degree rotation. LIBYUV_API int I400Mirror(const uint8* src_y, int src_stride_y, uint8* dst_y, int dst_stride_y, int width, int height); // Alias #define ARGBToARGBMirror ARGBMirror // ARGB mirror. LIBYUV_API int ARGBMirror(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height); // Convert NV12 to RGB565. LIBYUV_API int NV12ToRGB565(const uint8* src_y, int src_stride_y, const uint8* src_uv, int src_stride_uv, uint8* dst_rgb565, int dst_stride_rgb565, int width, int height); // Convert NV21 to RGB565. LIBYUV_API int NV21ToRGB565(const uint8* src_y, int src_stride_y, const uint8* src_uv, int src_stride_uv, uint8* dst_rgb565, int dst_stride_rgb565, int width, int height); // I422ToARGB is in convert_argb.h // Convert I422 to BGRA. LIBYUV_API int I422ToBGRA(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_bgra, int dst_stride_bgra, int width, int height); // Convert I422 to ABGR. LIBYUV_API int I422ToABGR(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_abgr, int dst_stride_abgr, int width, int height); // Convert I422 to RGBA. LIBYUV_API int I422ToRGBA(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_rgba, int dst_stride_rgba, int width, int height); // Draw a rectangle into I420. LIBYUV_API int I420Rect(uint8* dst_y, int dst_stride_y, uint8* dst_u, int dst_stride_u, uint8* dst_v, int dst_stride_v, int x, int y, int width, int height, int value_y, int value_u, int value_v); // Draw a rectangle into ARGB. LIBYUV_API int ARGBRect(uint8* dst_argb, int dst_stride_argb, int x, int y, int width, int height, uint32 value); // Convert ARGB to gray scale ARGB. LIBYUV_API int ARGBGrayTo(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height); // Make a rectangle of ARGB gray scale. LIBYUV_API int ARGBGray(uint8* dst_argb, int dst_stride_argb, int x, int y, int width, int height); // Make a rectangle of ARGB Sepia tone. LIBYUV_API int ARGBSepia(uint8* dst_argb, int dst_stride_argb, int x, int y, int width, int height); // Apply a matrix rotation to each ARGB pixel. // matrix_argb is 4 signed ARGB values. -128 to 127 representing -2 to 2. // The first 4 coefficients apply to B, G, R, A and produce B of the output. // The next 4 coefficients apply to B, G, R, A and produce G of the output. // The next 4 coefficients apply to B, G, R, A and produce R of the output. // The last 4 coefficients apply to B, G, R, A and produce A of the output. LIBYUV_API int ARGBColorMatrix(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, const int8* matrix_argb, int width, int height); // Deprecated. Use ARGBColorMatrix instead. // Apply a matrix rotation to each ARGB pixel. // matrix_argb is 3 signed ARGB values. -128 to 127 representing -1 to 1. // The first 4 coefficients apply to B, G, R, A and produce B of the output. // The next 4 coefficients apply to B, G, R, A and produce G of the output. // The last 4 coefficients apply to B, G, R, A and produce R of the output. LIBYUV_API int RGBColorMatrix(uint8* dst_argb, int dst_stride_argb, const int8* matrix_rgb, int x, int y, int width, int height); // Apply a color table each ARGB pixel. // Table contains 256 ARGB values. LIBYUV_API int ARGBColorTable(uint8* dst_argb, int dst_stride_argb, const uint8* table_argb, int x, int y, int width, int height); // Apply a color table each ARGB pixel but preserve destination alpha. // Table contains 256 ARGB values. LIBYUV_API int RGBColorTable(uint8* dst_argb, int dst_stride_argb, const uint8* table_argb, int x, int y, int width, int height); // Apply a luma/color table each ARGB pixel but preserve destination alpha. // Table contains 32768 values indexed by [Y][C] where 7 it 7 bit luma from // RGB (YJ style) and C is an 8 bit color component (R, G or B). LIBYUV_API int ARGBLumaColorTable(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, const uint8* luma_rgb_table, int width, int height); // Apply a 3 term polynomial to ARGB values. // poly points to a 4x4 matrix. The first row is constants. The 2nd row is // coefficients for b, g, r and a. The 3rd row is coefficients for b squared, // g squared, r squared and a squared. The 4rd row is coefficients for b to // the 3, g to the 3, r to the 3 and a to the 3. The values are summed and // result clamped to 0 to 255. // A polynomial approximation can be dirived using software such as 'R'. LIBYUV_API int ARGBPolynomial(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, const float* poly, int width, int height); // Quantize a rectangle of ARGB. Alpha unaffected. // scale is a 16 bit fractional fixed point scaler between 0 and 65535. // interval_size should be a value between 1 and 255. // interval_offset should be a value between 0 and 255. LIBYUV_API int ARGBQuantize(uint8* dst_argb, int dst_stride_argb, int scale, int interval_size, int interval_offset, int x, int y, int width, int height); // Copy ARGB to ARGB. LIBYUV_API int ARGBCopy(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height); // Copy ARGB to ARGB. LIBYUV_API int ARGBCopyAlpha(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height); // Copy ARGB to ARGB. LIBYUV_API int ARGBCopyYToAlpha(const uint8* src_y, int src_stride_y, uint8* dst_argb, int dst_stride_argb, int width, int height); typedef void (*ARGBBlendRow)(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width); // Get function to Alpha Blend ARGB pixels and store to destination. LIBYUV_API ARGBBlendRow GetARGBBlend(); // Alpha Blend ARGB images and store to destination. // Alpha of destination is set to 255. LIBYUV_API int ARGBBlend(const uint8* src_argb0, int src_stride_argb0, const uint8* src_argb1, int src_stride_argb1, uint8* dst_argb, int dst_stride_argb, int width, int height); // Multiply ARGB image by ARGB image. Shifted down by 8. Saturates to 255. LIBYUV_API int ARGBMultiply(const uint8* src_argb0, int src_stride_argb0, const uint8* src_argb1, int src_stride_argb1, uint8* dst_argb, int dst_stride_argb, int width, int height); // Add ARGB image with ARGB image. Saturates to 255. LIBYUV_API int ARGBAdd(const uint8* src_argb0, int src_stride_argb0, const uint8* src_argb1, int src_stride_argb1, uint8* dst_argb, int dst_stride_argb, int width, int height); // Subtract ARGB image (argb1) from ARGB image (argb0). Saturates to 0. LIBYUV_API int ARGBSubtract(const uint8* src_argb0, int src_stride_argb0, const uint8* src_argb1, int src_stride_argb1, uint8* dst_argb, int dst_stride_argb, int width, int height); // Convert I422 to YUY2. LIBYUV_API int I422ToYUY2(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_frame, int dst_stride_frame, int width, int height); // Convert I422 to UYVY. LIBYUV_API int I422ToUYVY(const uint8* src_y, int src_stride_y, const uint8* src_u, int src_stride_u, const uint8* src_v, int src_stride_v, uint8* dst_frame, int dst_stride_frame, int width, int height); // Convert unattentuated ARGB to preattenuated ARGB. LIBYUV_API int ARGBAttenuate(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height); // Convert preattentuated ARGB to unattenuated ARGB. LIBYUV_API int ARGBUnattenuate(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height); // Convert MJPG to ARGB. LIBYUV_API int MJPGToARGB(const uint8* sample, size_t sample_size, uint8* argb, int argb_stride, int w, int h, int dw, int dh); // Internal function - do not call directly. // Computes table of cumulative sum for image where the value is the sum // of all values above and to the left of the entry. Used by ARGBBlur. LIBYUV_API int ARGBComputeCumulativeSum(const uint8* src_argb, int src_stride_argb, int32* dst_cumsum, int dst_stride32_cumsum, int width, int height); // Blur ARGB image. // dst_cumsum table of width * (height + 1) * 16 bytes aligned to // 16 byte boundary. // dst_stride32_cumsum is number of ints in a row (width * 4). // radius is number of pixels around the center. e.g. 1 = 3x3. 2=5x5. // Blur is optimized for radius of 5 (11x11) or less. LIBYUV_API int ARGBBlur(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int32* dst_cumsum, int dst_stride32_cumsum, int width, int height, int radius); // Multiply ARGB image by ARGB value. LIBYUV_API int ARGBShade(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height, uint32 value); // Interpolate between two ARGB images using specified amount of interpolation // (0 to 255) and store to destination. // 'interpolation' is specified as 8 bit fraction where 0 means 100% src_argb0 // and 255 means 1% src_argb0 and 99% src_argb1. // Internally uses ARGBScale bilinear filtering. // Caveat: This function will write up to 16 bytes beyond the end of dst_argb. LIBYUV_API int ARGBInterpolate(const uint8* src_argb0, int src_stride_argb0, const uint8* src_argb1, int src_stride_argb1, uint8* dst_argb, int dst_stride_argb, int width, int height, int interpolation); #if defined(__pnacl__) || defined(__CLR_VER) || \ (defined(__i386__) && !defined(__SSE2__)) #define LIBYUV_DISABLE_X86 #endif // The following are available on all x86 platforms: #if !defined(LIBYUV_DISABLE_X86) && \ (defined(_M_IX86) || defined(__x86_64__) || defined(__i386__)) #define HAS_ARGBAFFINEROW_SSE2 #endif // Row function for copying pixels from a source with a slope to a row // of destination. Useful for scaling, rotation, mirror, texture mapping. LIBYUV_API void ARGBAffineRow_C(const uint8* src_argb, int src_argb_stride, uint8* dst_argb, const float* uv_dudv, int width); LIBYUV_API void ARGBAffineRow_SSE2(const uint8* src_argb, int src_argb_stride, uint8* dst_argb, const float* uv_dudv, int width); // Shuffle ARGB channel order. e.g. BGRA to ARGB. // shuffler is 16 bytes and must be aligned. LIBYUV_API int ARGBShuffle(const uint8* src_bgra, int src_stride_bgra, uint8* dst_argb, int dst_stride_argb, const uint8* shuffler, int width, int height); // Sobel ARGB effect with planar output. LIBYUV_API int ARGBSobelToPlane(const uint8* src_argb, int src_stride_argb, uint8* dst_y, int dst_stride_y, int width, int height); // Sobel ARGB effect. LIBYUV_API int ARGBSobel(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height); // Sobel ARGB effect w/ Sobel X, Sobel, Sobel Y in ARGB. LIBYUV_API int ARGBSobelXY(const uint8* src_argb, int src_stride_argb, uint8* dst_argb, int dst_stride_argb, int width, int height); #ifdef __cplusplus } // extern "C" } // namespace libyuv #endif #endif // INCLUDE_LIBYUV_PLANAR_FUNCTIONS_H_ NOLINT
7,931
432
/* Language-specific hook definitions for C front end. Copyright (C) 1991-2018 Free Software Foundation, Inc. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see <http://www.gnu.org/licenses/>. */ #include "config.h" #include "system.h" #include "coretypes.h" #include "c-tree.h" #include "langhooks.h" #include "langhooks-def.h" #include "c-objc-common.h" enum c_language_kind c_language = clk_c; /* Lang hooks common to C and ObjC are declared in c-objc-common.h; consequently, there should be very few hooks below. */ #undef LANG_HOOKS_NAME #define LANG_HOOKS_NAME "GNU C" #undef LANG_HOOKS_INIT #define LANG_HOOKS_INIT c_objc_common_init #undef LANG_HOOKS_INIT_TS #define LANG_HOOKS_INIT_TS c_common_init_ts #if CHECKING_P #undef LANG_HOOKS_RUN_LANG_SELFTESTS #define LANG_HOOKS_RUN_LANG_SELFTESTS selftest::run_c_tests #endif /* #if CHECKING_P */ #undef LANG_HOOKS_GET_SUBSTRING_LOCATION #define LANG_HOOKS_GET_SUBSTRING_LOCATION c_get_substring_location /* Each front end provides its own lang hook initializer. */ struct lang_hooks lang_hooks = LANG_HOOKS_INITIALIZER; #if CHECKING_P namespace selftest { /* Implementation of LANG_HOOKS_RUN_LANG_SELFTESTS for the C frontend. */ void run_c_tests (void) { /* Run selftests shared within the C family. */ c_family_tests (); /* Additional C-specific tests. */ } } // namespace selftest #endif /* #if CHECKING_P */ #include "gtype-c.h"
713
6,097
<filename>RuneFramework/vendor/glm/test/gtc/gtc_type_aligned.cpp<gh_stars>1000+ #include <glm/glm.hpp> #if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE #include <glm/gtc/type_aligned.hpp> #include <glm/gtc/type_precision.hpp> #include <glm/ext/vector_relational.hpp> #include <glm/ext/matrix_relational.hpp> GLM_STATIC_ASSERT(glm::detail::is_aligned<glm::aligned_lowp>::value, "aligned_lowp is not aligned"); GLM_STATIC_ASSERT(glm::detail::is_aligned<glm::aligned_mediump>::value, "aligned_mediump is not aligned"); GLM_STATIC_ASSERT(glm::detail::is_aligned<glm::aligned_highp>::value, "aligned_highp is not aligned"); GLM_STATIC_ASSERT(!glm::detail::is_aligned<glm::packed_highp>::value, "packed_highp is aligned"); GLM_STATIC_ASSERT(!glm::detail::is_aligned<glm::packed_mediump>::value, "packed_mediump is aligned"); GLM_STATIC_ASSERT(!glm::detail::is_aligned<glm::packed_lowp>::value, "packed_lowp is aligned"); struct my_vec4_packed { glm::uint32 a; glm::vec4 b; }; GLM_STATIC_ASSERT(sizeof(my_vec4_packed) == sizeof(glm::uint32) + sizeof(glm::vec4), "glm::vec4 packed is not correct"); struct my_vec4_aligned { glm::uint32 a; glm::aligned_vec4 b; }; GLM_STATIC_ASSERT(sizeof(my_vec4_aligned) == sizeof(glm::aligned_vec4) * 2, "glm::vec4 aligned is not correct"); struct my_dvec4_packed { glm::uint64 a; glm::dvec4 b; }; GLM_STATIC_ASSERT(sizeof(my_dvec4_packed) == sizeof(glm::uint64) + sizeof(glm::dvec4), "glm::dvec4 packed is not correct"); struct my_dvec4_aligned { glm::uint64 a; glm::aligned_dvec4 b; }; //GLM_STATIC_ASSERT(sizeof(my_dvec4_aligned) == sizeof(glm::aligned_dvec4) * 2, "glm::dvec4 aligned is not correct"); struct my_ivec4_packed { glm::uint32 a; glm::ivec4 b; }; GLM_STATIC_ASSERT(sizeof(my_ivec4_packed) == sizeof(glm::uint32) + sizeof(glm::ivec4), "glm::ivec4 packed is not correct"); struct my_ivec4_aligned { glm::uint32 a; glm::aligned_ivec4 b; }; GLM_STATIC_ASSERT(sizeof(my_ivec4_aligned) == sizeof(glm::aligned_ivec4) * 2, "glm::ivec4 aligned is not correct"); struct my_u8vec4_packed { glm::uint32 a; glm::u8vec4 b; }; GLM_STATIC_ASSERT(sizeof(my_u8vec4_packed) == sizeof(glm::uint32) + sizeof(glm::u8vec4), "glm::u8vec4 packed is not correct"); static int test_copy() { int Error = 0; { glm::aligned_ivec4 const a(1, 2, 3, 4); glm::ivec4 const u(a); Error += a.x == u.x ? 0 : 1; Error += a.y == u.y ? 0 : 1; Error += a.z == u.z ? 0 : 1; Error += a.w == u.w ? 0 : 1; } { my_ivec4_aligned a; a.b = glm::ivec4(1, 2, 3, 4); my_ivec4_packed u; u.b = a.b; Error += a.b.x == u.b.x ? 0 : 1; Error += a.b.y == u.b.y ? 0 : 1; Error += a.b.z == u.b.z ? 0 : 1; Error += a.b.w == u.b.w ? 0 : 1; } return Error; } static int test_ctor() { int Error = 0; # if GLM_HAS_CONSTEXPR { constexpr glm::aligned_ivec4 v(1); Error += v.x == 1 ? 0 : 1; Error += v.y == 1 ? 0 : 1; Error += v.z == 1 ? 0 : 1; Error += v.w == 1 ? 0 : 1; } { constexpr glm::packed_ivec4 v(1); Error += v.x == 1 ? 0 : 1; Error += v.y == 1 ? 0 : 1; Error += v.z == 1 ? 0 : 1; Error += v.w == 1 ? 0 : 1; } { constexpr glm::ivec4 v(1); Error += v.x == 1 ? 0 : 1; Error += v.y == 1 ? 0 : 1; Error += v.z == 1 ? 0 : 1; Error += v.w == 1 ? 0 : 1; } # endif//GLM_HAS_CONSTEXPR return Error; } static int test_aligned_ivec4() { int Error = 0; glm::aligned_ivec4 const v(1, 2, 3, 4); Error += glm::all(glm::equal(v, glm::aligned_ivec4(1, 2, 3, 4))) ? 0 : 1; glm::aligned_ivec4 const u = v * 2; Error += glm::all(glm::equal(u, glm::aligned_ivec4(2, 4, 6, 8))) ? 0 : 1; return Error; } static int test_aligned_mat4() { int Error = 0; glm::aligned_vec4 const u(1.f, 2.f, 3.f, 4.f); Error += glm::all(glm::equal(u, glm::aligned_vec4(1.f, 2.f, 3.f, 4.f), 0.0001f)) ? 0 : 1; glm::aligned_vec4 const v(1, 2, 3, 4); Error += glm::all(glm::equal(v, glm::aligned_vec4(1.f, 2.f, 3.f, 4.f), 0.0001f)) ? 0 : 1; glm::aligned_mat4 const m(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15); glm::aligned_mat4 const t = glm::transpose(m); glm::aligned_mat4 const expected = glm::mat4(0, 4, 8, 12, 1, 5, 9, 13, 2, 6, 10, 14, 3, 7, 11, 15); Error += glm::all(glm::equal(t, expected, 0.0001f)) ? 0 : 1; return Error; } int main() { int Error = 0; Error += test_ctor(); Error += test_copy(); Error += test_aligned_ivec4(); Error += test_aligned_mat4(); return Error; } #else int main() { return 0; } #endif
2,085
370
package com.fastasyncworldedit.core.util.task; import com.fastasyncworldedit.core.Fawe; import java.io.Closeable; import java.util.concurrent.Callable; import java.util.concurrent.Future; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import java.util.function.Supplier; public class AsyncNotifyQueue implements Closeable { private final Lock lock = new ReentrantLock(true); private final Thread.UncaughtExceptionHandler handler; private boolean closed; public AsyncNotifyQueue(Thread.UncaughtExceptionHandler handler) { this.handler = handler; } public Thread.UncaughtExceptionHandler getHandler() { return handler; } public <T> Future<T> run(Runnable task) { return call(() -> { task.run(); return null; }); } public <T> Future<T> supply(Supplier<T> task) { return call(task::get); } public <T> Future<T> call(Callable<T> task) { Future[] self = new Future[1]; Callable<T> wrapped = () -> { if (!closed) { lock.lock(); try { if (!closed) { try { return task.call(); } catch (Throwable e) { handler.uncaughtException(Thread.currentThread(), e); if (self[0] != null) { self[0].cancel(true); } } } } finally { lock.unlock(); } } if (self[0] != null) { self[0].cancel(true); } return null; }; self[0] = Fawe.instance().getQueueHandler().async(wrapped); return self[0]; } @Override public void close() { closed = true; } public boolean isClosed() { return closed; } }
1,041
5,798
<gh_stars>1000+ # Copyright 2018 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= """Test for the Iris dataset module.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import unittest import numpy as np import iris_data class IrisDataTest(unittest.TestCase): def testLoadData(self): iris_x, iris_y = iris_data.load() self.assertEqual(2, len(iris_x.shape)) self.assertGreater(iris_x.shape[0], 0) self.assertEqual(4, iris_x.shape[1]) self.assertEqual(iris_x.shape[0], iris_y.shape[0]) self.assertEqual(3, iris_y.shape[1]) self.assertTrue( np.allclose(np.ones([iris_y.shape[0], 1]), np.sum(iris_y, axis=1))) if __name__ == '__main__': unittest.main()
443
2,728
<filename>sdk/cognitiveservices/azure-cognitiveservices-knowledge-qnamaker/azure/cognitiveservices/knowledge/qnamaker/models/feedback_record_dto.py # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class FeedbackRecordDTO(Model): """Active learning feedback record. :param user_id: Unique identifier for the user. :type user_id: str :param user_question: The suggested question being provided as feedback. :type user_question: str :param qna_id: The qnaId for which the suggested question is provided as feedback. :type qna_id: int """ _validation = { 'user_question': {'max_length': 1000}, } _attribute_map = { 'user_id': {'key': 'userId', 'type': 'str'}, 'user_question': {'key': 'userQuestion', 'type': 'str'}, 'qna_id': {'key': 'qnaId', 'type': 'int'}, } def __init__(self, **kwargs): super(FeedbackRecordDTO, self).__init__(**kwargs) self.user_id = kwargs.get('user_id', None) self.user_question = kwargs.get('user_question', None) self.qna_id = kwargs.get('qna_id', None)
530
318
/* * Copyright (c) 2002-2021 "Neo4j," * Neo4j Sweden AB [http://neo4j.com] * * This file is part of Neo4j. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.neo4j.ogm.drivers.bolt.response; import org.neo4j.driver.Result; import org.neo4j.driver.summary.SummaryCounters; import org.neo4j.ogm.config.ObjectMapperFactory; import org.neo4j.ogm.response.model.QueryStatisticsModel; import org.neo4j.ogm.result.adapter.ResultAdapter; import com.fasterxml.jackson.databind.ObjectMapper; /** * @author <NAME> * @author <NAME> */ public class StatisticsModelAdapter implements ResultAdapter<Result, QueryStatisticsModel> { protected static final ObjectMapper mapper = ObjectMapperFactory.objectMapper(); @Override public QueryStatisticsModel adapt(Result result) { QueryStatisticsModel queryStatisticsModel = new QueryStatisticsModel(); SummaryCounters stats = result.consume().counters(); queryStatisticsModel.setContains_updates(stats.containsUpdates()); queryStatisticsModel.setNodes_created(stats.nodesCreated()); queryStatisticsModel.setNodes_deleted(stats.nodesDeleted()); queryStatisticsModel.setProperties_set(stats.propertiesSet()); queryStatisticsModel.setRelationships_created(stats.relationshipsCreated()); queryStatisticsModel.setRelationship_deleted(stats.relationshipsDeleted()); queryStatisticsModel.setLabels_added(stats.labelsAdded()); queryStatisticsModel.setLabels_removed(stats.labelsRemoved()); queryStatisticsModel.setIndexes_added(stats.indexesAdded()); queryStatisticsModel.setIndexes_removed(stats.indexesRemoved()); queryStatisticsModel.setConstraints_added(stats.constraintsAdded()); queryStatisticsModel.setConstraints_removed(stats.constraintsRemoved()); return queryStatisticsModel; } }
759
505
import os import cv2 import glob import insightface import imutils import numpy as np import tqdm def eye_aspect_ratio(eye): A = np.linalg.norm(eye[1] - eye[7]) B = np.linalg.norm(eye[2] - eye[6]) C = np.linalg.norm(eye[3] - eye[5]) D = np.linalg.norm(eye[0] - eye[4]) ear = (A + B + C) / (3.0 * D) return ear set_part = 'train' print("set_part : ", set_part) root_path = '/media/data4T1/hanson/Landmarks/LaPa' label_txt = os.path.join(root_path, 'lapa_' + set_part + '_label.txt') drap_index = [56, 57, 58, 64, 65, 66, 75, 84] imgslist = glob.glob(os.path.join(root_path, set_part + '/images', '*.jpg')) retina = insightface.model_zoo.get_model('retinaface_mnet025_v1') retina.prepare(-1, 0.4) with open(label_txt, 'w') as wf: for img_path in tqdm.tqdm(imgslist): landm_path = os.path.join(root_path, set_part + '/landmarks',img_path.split('/')[-1].replace('.jpg', '.txt')) with open(landm_path, 'r') as rf: landms = rf.readlines() img = cv2.imread(img_path) det_img = imutils.resize(img, width=480) r = float(img.shape[1]) / 480. lanmark = [] for i in range(1, len(landms)): if i in drap_index: continue x, y = landms[i].strip().split() lanmark.append(float(x)) lanmark.append(float(y)) cv2.circle(img, (int(float(x)), int(float(y))),1,(255, 0, 0), 1) leye = [] reye = [] leye.append(np.array([lanmark[60 * 2 + 0], lanmark[60 * 2 + 1]])) leye.append(np.array([lanmark[61 * 2 + 0], lanmark[61 * 2 + 1]])) leye.append(np.array([lanmark[62 * 2 + 0], lanmark[62 * 2 + 1]])) leye.append(np.array([lanmark[63 * 2 + 0], lanmark[63 * 2 + 1]])) leye.append(np.array([lanmark[64 * 2 + 0], lanmark[64 * 2 + 1]])) leye.append(np.array([lanmark[65 * 2 + 0], lanmark[65 * 2 + 1]])) leye.append(np.array([lanmark[66 * 2 + 0], lanmark[66 * 2 + 1]])) leye.append(np.array([lanmark[67 * 2 + 0], lanmark[67 * 2 + 1]])) reye.append(np.array([lanmark[68 * 2 + 0], lanmark[68 * 2 + 1]])) reye.append(np.array([lanmark[69 * 2 + 0], lanmark[69 * 2 + 1]])) reye.append(np.array([lanmark[70 * 2 + 0], lanmark[70 * 2 + 1]])) reye.append(np.array([lanmark[71 * 2 + 0], lanmark[71 * 2 + 1]])) reye.append(np.array([lanmark[72 * 2 + 0], lanmark[72 * 2 + 1]])) reye.append(np.array([lanmark[73 * 2 + 0], lanmark[73 * 2 + 1]])) reye.append(np.array([lanmark[74 * 2 + 0], lanmark[74 * 2 + 1]])) reye.append(np.array([lanmark[75 * 2 + 0], lanmark[75 * 2 + 1]])) leye = np.array(leye) l_ear = eye_aspect_ratio(leye) reye = np.array(reye) r_ear = eye_aspect_ratio(reye) for k in range(leye.shape[0]): x, y = leye[k] cv2.circle(img, (int(float(x)), int(float(y))),1,(0, 255, 0), 2) x, y = reye[k] cv2.circle(img, (int(float(x)), int(float(y))),1,(0, 255, 255), 2) if l_ear > 0.1 and r_ear > 0.1: continue bboxes, landmarks = retina.detect(det_img, threshold=0.5, scale=1.0) if bboxes.shape[0] >= 1: area = (bboxes[:, 2] - bboxes[:, 0]) * (bboxes[:, 3] - bboxes[:, 1]) img_center = det_img.shape[0] // 2, det_img.shape[1] // 2 offsets = np.vstack( [(bboxes[:, 0] + bboxes[:, 2]) / 2 - img_center[1], (bboxes[:, 1] + bboxes[:, 3]) / 2 - img_center[0]]) offset_dist_squared = np.sum(np.power(offsets, 2.0), 0) values = area - offset_dist_squared * 2.0 # some extra weight on the centering bindex = np.argsort(values)[::-1] # some extra weight on the centering bindex = bindex[0:1] bboxes = bboxes[bindex, :] landmarks = landmarks[bindex, :] else: continue box = [int(bboxes[0][0] * r), int(bboxes[0][1] * r),int(bboxes[0][2] * r), int(bboxes[0][3] * r)] cv2.rectangle(img, (box[0], box[1]),(box[2], box[3]),(0,255,0),1) landmark_str = ' '.join(list(map(str, lanmark))) attributes_str = '0 0 0 0 0 0' path_str = 'LAPA_' + set_part + '/' + img_path.split('/')[-1] box_str = ' '.join(list(map(str, box))) label = '{} {} {} {}\n'.format(landmark_str, box_str, attributes_str, path_str) # if l_ear < 0.2 or r_ear < 0.2: wf.write(label) # print(label) print("left eye : " , l_ear) print("reft eye : ", r_ear) # cv2.imshow('img', img) # cv2.waitKey(0)
2,373
830
# Copyright 2021 JD.com, Inc., JD AI """ @author: <NAME> @contact: <EMAIL> """ import torch from torch import nn from torch.nn.utils.weight_norm import weight_norm from xmodaler.config import configurable from xmodaler.config import CfgNode as CN from xmodaler.config import kfg from ..layers import ShiftedConvLayer, SoftAttention from .decoder import Decoder from .build import DECODER_REGISTRY import math __all__ = ["TDConvEDDecoder"] @DECODER_REGISTRY.register() class TDConvEDDecoder(nn.Module): @configurable def __init__( self, *, num_hidden_layers: int, hidden_size: int, kernel_sizes: list, # list of int conv_dropout: float, att_embed_size: int, att_embed_dropout: float, use_norm: bool ): super(TDConvEDDecoder, self).__init__() self.num_layers = num_hidden_layers self.hidden_size = hidden_size self.kernel_sizes = kernel_sizes self.conv_dropout = conv_dropout self.att_embed_size = att_embed_size self.att_embed_dropout = att_embed_dropout if use_norm: self.gv_feat_embed = weight_norm(nn.Linear(hidden_size, hidden_size)) self.gv_feat_dropout = nn.Dropout(conv_dropout) if conv_dropout > 0. else None self.wt_gv_embed = weight_norm(nn.Linear(hidden_size * 2, hidden_size)) self.wt_gv_embed_dropout = nn.Dropout(conv_dropout) if conv_dropout > 0. else None self.p_att_feats = weight_norm(nn.Linear(hidden_size, att_embed_size)) self.p_att_feats_dropout = nn.Dropout(conv_dropout) if conv_dropout > 0. else None else: self.gv_feat_embed = nn.Linear(hidden_size, hidden_size) self.gv_feat_dropout = nn.Dropout(conv_dropout) if conv_dropout > 0. else None self.wt_gv_embed = nn.Linear(hidden_size * 2, hidden_size) self.wt_gv_embed_dropout = nn.Dropout(conv_dropout) if conv_dropout > 0. else None self.p_att_feats = nn.Linear(hidden_size, att_embed_size) self.p_att_feats_dropout = nn.Dropout(conv_dropout) if conv_dropout > 0. else None self.layers = nn.ModuleList( [ShiftedConvLayer( hidden_size, hidden_size, kernel_size, # list of int stride=1, padding_mode='zeros', # 'zeros' dropout=conv_dropout, use_norm=use_norm) for kernel_size in self.kernel_sizes] ) self.att = SoftAttention( hidden_size = hidden_size, att_embed_size = att_embed_size, att_embed_dropout = att_embed_dropout, use_norm = use_norm ) self._clear_decoding_buffer() @classmethod def from_config(cls, cfg): return { "num_hidden_layers": cfg.MODEL.TDCONVED.DECODER.NUM_HIDDEN_LAYERS, "hidden_size": cfg.MODEL.TDCONVED.DECODER.HIDDEN_SIZE, "kernel_sizes": cfg.MODEL.TDCONVED.DECODER.KERNEL_SIZES, # list of int "conv_dropout": cfg.MODEL.TDCONVED.DECODER.DROPOUT, "att_embed_size": cfg.MODEL.TDCONVED.DECODER.ATT_EMBED_SIZE, "att_embed_dropout": cfg.MODEL.TDCONVED.DECODER.ATT_EMBED_DROPOUT, "use_norm": cfg.MODEL.TDCONVED.DECODER.USE_NORM } @classmethod def add_config(cls, cfg): cfg.MODEL.TDCONVED.DECODER = CN() cfg.MODEL.TDCONVED.DECODER.NUM_HIDDEN_LAYERS = 2 cfg.MODEL.TDCONVED.DECODER.HIDDEN_SIZE = 512 cfg.MODEL.TDCONVED.DECODER.KERNEL_SIZES = [3, 3] cfg.MODEL.TDCONVED.DECODER.DROPOUT = 0.5 cfg.MODEL.TDCONVED.DECODER.ATT_EMBED_SIZE = 256 cfg.MODEL.TDCONVED.DECODER.ATT_EMBED_DROPOUT = 0.5 cfg.MODEL.TDCONVED.DECODER.USE_NORM = True def preprocess(self, batched_inputs): att_feats = batched_inputs[kfg.ATT_FEATS] batch_size, num_frames, hidden_size = att_feats.size() att_masks = batched_inputs[kfg.ATT_MASKS].view(batch_size, num_frames) # [batch, num_frames] ext_att_masks = batched_inputs[kfg.EXT_ATT_MASKS] # 4-D p_att_feats = self.p_att_feats(att_feats) if self.p_att_feats_dropout is not None: p_att_feats = self.p_att_feats_dropout(p_att_feats) gv_feat = torch.sum(att_feats * att_masks.unsqueeze(-1), 1) / torch.sum(att_masks.unsqueeze(-1), 1) gv_feat = self.gv_feat_embed(gv_feat) if self.gv_feat_dropout is not None: gv_feat = self.gv_feat_dropout(gv_feat) if self.training: self._clear_decoding_buffer() wt = batched_inputs[kfg.G_TOKENS_IDS] # [batch, max_len] seq_len = wt.size(1) # expand along time batched_inputs.update( { kfg.P_ATT_FEATS: p_att_feats.unsqueeze(1).expand(batch_size, seq_len, num_frames, self.att_embed_size) .contiguous().view(-1, num_frames, self.att_embed_size), kfg.GLOBAL_FEATS: gv_feat.unsqueeze(1).expand(batch_size, seq_len, hidden_size), kfg.ATT_FEATS: att_feats.unsqueeze(1).expand(batch_size, seq_len, num_frames, hidden_size) .contiguous().view(-1, num_frames, hidden_size), kfg.EXT_ATT_MASKS: ext_att_masks.expand(batch_size, seq_len, 1, num_frames) .contiguous().view(-1, num_frames) } ) else: self._init_decoding_buffer(batch_size) wt = batched_inputs[kfg.G_TOKENS_TYPE] # [batch, max_len] seq_len = wt.size(1) # expand along time batched_inputs.update( { kfg.P_ATT_FEATS: p_att_feats.unsqueeze(1).expand(batch_size, seq_len, num_frames, self.att_embed_size), kfg.GLOBAL_FEATS: gv_feat.unsqueeze(1).expand(batch_size, seq_len, hidden_size), kfg.ATT_FEATS: att_feats.unsqueeze(1).expand(batch_size, seq_len, num_frames, hidden_size), kfg.EXT_ATT_MASKS: ext_att_masks.expand(batch_size, seq_len, 1, num_frames) } ) ''' batched_inputs.update( { kfg.P_ATT_FEATS: p_att_feats.unsqueeze(1).tile(1, seq_len, 1, 1).view(-1, num_frames, self.att_embed_size), kfg.GLOBAL_FEATS: gv_feat.unsqueeze(1).tile(1, seq_len, 1), kfg.ATT_FEATS: att_feats.unsqueeze(1).tile(1, seq_len, 1, 1).view(-1, num_frames, dimension), kfg.EXT_ATT_MASKS: ext_att_masks.tile(1, seq_len, 1, 1).view(-1, num_frames) } ) ''' return batched_inputs def _init_decoding_buffer(self, batch_size): self.pred_token_embed = torch.zeros(batch_size, 0, self.hidden_size, dtype=torch.long).cuda() def _clear_decoding_buffer(self): self.pred_token_embed = None def forward(self, batched_inputs): wt = batched_inputs[kfg.G_TOKEN_EMBED] att_feats = batched_inputs[kfg.ATT_FEATS] ext_att_masks = batched_inputs[kfg.EXT_ATT_MASKS] p_att_feats = batched_inputs[kfg.P_ATT_FEATS] global_feats = batched_inputs[kfg.GLOBAL_FEATS] history_states = batched_inputs.get(kfg.HISTORY_STATES, None) if self.training: cur_input_embed = torch.cat([wt, global_feats], axis=-1) cur_att_feats = att_feats cur_att_masks = ext_att_masks cur_p_att_feats = p_att_feats history_states = [None] * (self.num_layers + 1) else: time_step = batched_inputs[kfg.TIME_STEP] batch_size = att_feats.size(0) beam_size = wt.size(0) // batch_size if wt.dim() == 2: # [batch * beam, 1, hidden_size] wt = wt.unsqueeze(1) # init history_states if kfg.HISTORY_STATES not in batched_inputs: shape = list(wt.size()) # [batch * beam, 1, hidden_size] shape[1] = 0 history_states = [wt.new(torch.Size(shape))] * (self.num_layers + 1) # additional one for input layer batched_inputs[kfg.HISTORY_STATES] = history_states # input of current time step max_seq_len, num_frames, hidden_size = att_feats.size(-3), att_feats.size(-2), att_feats.size(-1) cur_global_feats = (global_feats[:, time_step:time_step+1, :]).unsqueeze(1).expand(batch_size, beam_size, 1, hidden_size) cur_global_feats = cur_global_feats.view(-1, 1, hidden_size) cur_input_embed = torch.cat([wt, cur_global_feats], axis=-1) # [batch * beam * time, num_frames, hidden] cur_att_feats = (att_feats[:, :time_step+1, :, :]).unsqueeze(1).expand(batch_size, beam_size, time_step+1, num_frames, hidden_size) \ .contiguous().view(-1, num_frames, hidden_size) # [batch * beam * time, num_frames], -inf cur_att_masks = (ext_att_masks[:, :time_step+1, :, :]).unsqueeze(1).expand(batch_size, beam_size, time_step+1, 1, num_frames) \ .contiguous().view(-1, num_frames) # [batch * beam * time, num_frames, att_embed_size] cur_p_att_feats = (p_att_feats[:, :time_step+1, ]).unsqueeze(1).expand(batch_size, beam_size, time_step+1, num_frames, self.att_embed_size) \ .contiguous().view(-1, num_frames, self.att_embed_size) cur_input_embed = self.wt_gv_embed(cur_input_embed) if self.wt_gv_embed_dropout is not None: cur_input_embed = self.wt_gv_embed_dropout(cur_input_embed) if history_states[0] is not None: # for test input_embed = torch.cat([history_states[0], cur_input_embed], axis=1) history_states[0] = input_embed # update the history states else: input_embed = cur_input_embed layer_outputs = [] layer_input = input_embed for idx, layer_module in enumerate(self.layers): layer_output = layer_module(layer_input) layer_output = (layer_output + layer_input) * math.sqrt(0.5) # residual connection layer_outputs.append(layer_output) if history_states[idx+1] is not None: # update the new hidden state for current step history_states[idx+1] = torch.cat([history_states[idx+1], layer_output[:, -1:, :]], axis=1) layer_input = layer_output # attention batch_size = layer_output.size(0) hidden_states = layer_output.view(-1, self.hidden_size) # [batch * beam * time_step, hidden_size] att_outputs = self.att(hidden_states, cur_att_feats, cur_p_att_feats, cur_att_masks) att_outputs = att_outputs.view(batch_size, -1, self.hidden_size) layer_output = (layer_output + att_outputs) * math.sqrt(0.5) if not self.training: return { kfg.G_HIDDEN_STATES: layer_output[:, -1, :], kfg.HISTORY_STATES: history_states } else: return { kfg.G_HIDDEN_STATES: layer_output }
6,218
2,151
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_UI_BROWSER_DIALOGS_H_ #define CHROME_BROWSER_UI_BROWSER_DIALOGS_H_ #include <string> #include <utility> #include <vector> #include "base/callback.h" #include "base/memory/scoped_refptr.h" #include "base/optional.h" #include "base/strings/string16.h" #include "build/build_config.h" #include "chrome/browser/ui/bookmarks/bookmark_editor.h" #include "content/public/browser/content_browser_client.h" #include "content/public/browser/resource_request_info.h" #include "third_party/skia/include/core/SkColor.h" #include "ui/gfx/native_widget_types.h" #if defined(OS_CHROMEOS) #include "chrome/browser/chromeos/apps/intent_helper/apps_navigation_types.h" #endif // OS_CHROMEOS class Browser; class LoginHandler; class Profile; class WebShareTarget; struct WebApplicationInfo; namespace base { class FilePath; } namespace content { class BrowserContext; class ColorChooser; class WebContents; } namespace extensions { class Extension; } namespace net { class AuthChallengeInfo; } namespace payments { class PaymentRequest; class PaymentRequestDialog; } namespace safe_browsing { class ChromeCleanerController; class ChromeCleanerDialogController; class ChromeCleanerRebootDialogController; class SettingsResetPromptController; } namespace task_manager { class TaskManagerTableModel; } namespace ui { class WebDialogDelegate; struct SelectedFileInfo; } namespace chrome { // Shows or hides the Task Manager. |browser| can be NULL when called from Ash. // Returns a pointer to the underlying TableModel, which can be ignored, or used // for testing. task_manager::TaskManagerTableModel* ShowTaskManager(Browser* browser); void HideTaskManager(); #if !defined(OS_MACOSX) // Creates and shows an HTML dialog with the given delegate and context. // The window is automatically destroyed when it is closed. // Returns the created window. // // Make sure to use the returned window only when you know it is safe // to do so, i.e. before OnDialogClosed() is called on the delegate. gfx::NativeWindow ShowWebDialog(gfx::NativeView parent, content::BrowserContext* context, ui::WebDialogDelegate* delegate); #endif // !defined(OS_MACOSX) #if defined(OS_CHROMEOS) // Creates and shows an HTML dialog with the given delegate and browser context. // The dialog is placed in the ash window hierarchy in the given container. The // window is automatically destroyed when it is closed. // Returns the created window. // See ash/public/cpp/shell_window_ids.h for |container_id| values. The window // is destroyed when it is closed. See also chrome::ShowWebDialog(). // |is_minimal_style| means whether the title area of the dialog should be hide. gfx::NativeWindow ShowWebDialogInContainer(int container_id, content::BrowserContext* context, ui::WebDialogDelegate* delegate, bool is_minimal_style = false); #endif // defined(OS_CHROMEOS) // Shows the create chrome app shortcut dialog box. // |close_callback| may be null. void ShowCreateChromeAppShortcutsDialog( gfx::NativeWindow parent_window, Profile* profile, const extensions::Extension* app, const base::Callback<void(bool /* created */)>& close_callback); // Callback used to indicate whether a user has accepted the installation of a // web app. The boolean parameter is true when the user accepts the dialog. The // WebApplicationInfo parameter contains the information about the app, // possibly modified by the user. using AppInstallationAcceptanceCallback = base::OnceCallback<void(bool, const WebApplicationInfo&)>; // Shows the Bookmark App bubble. // See Extension::InitFromValueFlags::FROM_BOOKMARK for a description of // bookmark apps. // // |web_app_info| is the WebApplicationInfo being converted into an app. void ShowBookmarkAppDialog(content::WebContents* web_contents, const WebApplicationInfo& web_app_info, AppInstallationAcceptanceCallback callback); // Shows the PWA installation confirmation bubble. // // |web_app_info| is the WebApplicationInfo to be installed. void ShowPWAInstallDialog(content::WebContents* web_contents, const WebApplicationInfo& web_app_info, AppInstallationAcceptanceCallback callback); // Shows a color chooser that reports to the given WebContents. content::ColorChooser* ShowColorChooser(content::WebContents* web_contents, SkColor initial_color); #if defined(OS_MACOSX) // Bridging methods that show/hide the toolkit-views based Task Manager on Mac. task_manager::TaskManagerTableModel* ShowTaskManagerViews(Browser* browser); void HideTaskManagerViews(); // Show the Views "Chrome Update" dialog. void ShowUpdateChromeDialogViews(gfx::NativeWindow parent); #endif // OS_MACOSX #if defined(TOOLKIT_VIEWS) // Creates a toolkit-views based LoginHandler (e.g. HTTP-Auth dialog). scoped_refptr<LoginHandler> CreateLoginHandlerViews( net::AuthChallengeInfo* auth_info, content::ResourceRequestInfo::WebContentsGetter web_contents_getter, LoginAuthRequiredCallback auth_required_callback); // Shows the toolkit-views based BookmarkEditor. void ShowBookmarkEditorViews(gfx::NativeWindow parent_window, Profile* profile, const BookmarkEditor::EditDetails& details, BookmarkEditor::Configuration configuration); payments::PaymentRequestDialog* CreatePaymentRequestDialog( payments::PaymentRequest* request); // Used to return the target the user picked or nullptr if the user cancelled // the share. using WebShareTargetPickerCallback = base::OnceCallback<void(const WebShareTarget*)>; // Shows the dialog to choose a share target app. |targets| is a list of app // title and manifest URL pairs that will be shown in a list. If the user picks // a target, this calls |callback| with the manifest URL of the chosen target, // or supplies null if the user cancelled the share. void ShowWebShareTargetPickerDialog(gfx::NativeWindow parent_window, std::vector<WebShareTarget> targets, WebShareTargetPickerCallback callback); #endif // TOOLKIT_VIEWS // Values used in the Dialog.Creation UMA metric. Each value represents a // different type of dialog box. // These values are written to logs. New enum values can be added, but existing // enums must never be renumbered or deleted and reused. enum class DialogIdentifier { UNKNOWN = 0, TRANSLATE = 1, BOOKMARK = 2, BOOKMARK_EDITOR = 3, DESKTOP_MEDIA_PICKER = 4, OUTDATED_UPGRADE = 5, ONE_CLICK_SIGNIN = 6, PROFILE_SIGNIN_CONFIRMATION = 7, HUNG_RENDERER = 8, SESSION_CRASHED = 9, CONFIRM_BUBBLE = 10, UPDATE_RECOMMENDED = 11, CRYPTO_PASSWORD = 12, SAFE_BROWSING_DOWNLOAD_FEEDBACK = 13, FIRST_RUN = 14, NETWORK_SHARE_PROFILE_WARNING = 15, CONFLICTING_MODULE = 16, CRITICAL_NOTIFICATION = 17, IME_WARNING = 18, TOOLBAR_ACTIONS_BAR = 19, GLOBAL_ERROR = 20, EXTENSION_INSTALL = 21, EXTENSION_UNINSTALL = 22, EXTENSION_INSTALLED = 23, PAYMENT_REQUEST = 24, SAVE_CARD = 25, CARD_UNMASK = 26, SIGN_IN = 27, SIGN_IN_SYNC_CONFIRMATION = 28, SIGN_IN_ERROR = 29, SIGN_IN_EMAIL_CONFIRMATION = 30, PROFILE_CHOOSER = 31, ACCOUNT_CHOOSER = 32, ARC_APP = 33, AUTO_SIGNIN_FIRST_RUN = 34, BOOKMARK_APP_CONFIRMATION = 35, CHOOSER_UI = 36, CHOOSER = 37, COLLECTED_COOKIES = 38, CONSTRAINED_WEB = 39, CONTENT_SETTING_CONTENTS = 40, CREATE_CHROME_APPLICATION_SHORTCUT = 41, DOWNLOAD_DANGER_PROMPT = 42, DOWNLOAD_IN_PROGRESS = 43, ECHO = 44, ENROLLMENT = 45, EXTENSION = 46, EXTENSION_POPUP_AURA = 47, EXTERNAL_PROTOCOL = 48, EXTERNAL_PROTOCOL_CHROMEOS = 49, FIRST_RUN_DIALOG = 50, HOME_PAGE_UNDO = 51, IDLE_ACTION_WARNING = 52, IMPORT_LOCK = 53, INTENT_PICKER = 54, INVERT = 55, JAVA_SCRIPT = 56, JAVA_SCRIPT_APP_MODAL_X11 = 57, LOGIN_HANDLER = 58, MANAGE_PASSWORDS = 59, MEDIA_GALLERIES = 60, MULTIPROFILES_INTRO = 61, MULTIPROFILES_SESSION_ABORTED = 62, NATIVE_CONTAINER = 63, NETWORK_CONFIG = 64, PERMISSIONS = 65, PLATFORM_KEYS_CERTIFICATE_SELECTOR = 66, PLATFORM_VERIFICATION = 67, PROXIMITY_AUTH_ERROR = 68, REQUEST_PIN = 69, SSL_CLIENT_CERTIFICATE_SELECTOR = 70, SIMPLE_MESSAGE_BOX = 71, TAB_MODAL_CONFIRM = 72, TASK_MANAGER = 73, TELEPORT_WARNING = 74, USER_MANAGER = 75, USER_MANAGER_PROFILE = 76, VALIDATION_MESSAGE = 77, WEB_SHARE_TARGET_PICKER = 78, ZOOM = 79, LOCK_SCREEN_NOTE_APP_TOAST = 80, PWA_CONFIRMATION = 81, RELAUNCH_RECOMMENDED = 82, CROSTINI_INSTALLER = 83, RELAUNCH_REQUIRED = 84, UNITY_SYNC_CONSENT_BUMP = 85, CROSTINI_UNINSTALLER = 86, DOWNLOAD_OPEN_CONFIRMATION = 87, MAX_VALUE }; // Record an UMA metric counting the creation of a dialog box of this type. void RecordDialogCreation(DialogIdentifier identifier); #if defined(OS_WIN) // Shows the settings reset prompt dialog asking the user if they want to reset // some of their settings. void ShowSettingsResetPrompt( Browser* browser, safe_browsing::SettingsResetPromptController* controller); // Shows the Chrome Cleanup dialog asking the user if they want to clean their // system from unwanted software. This is called when unwanted software has been // detected on the system. void ShowChromeCleanerPrompt( Browser* browser, safe_browsing::ChromeCleanerDialogController* dialog_controller, safe_browsing::ChromeCleanerController* cleaner_controller); // Shows the Chrome Cleanup reboot dialog asking the user if they want to // restart their computer once a cleanup has finished. This is called when the // Chrome Cleanup ends in a reboot required state. void ShowChromeCleanerRebootPrompt( Browser* browser, safe_browsing::ChromeCleanerRebootDialogController* dialog_controller); #endif // OS_WIN } // namespace chrome #if defined(OS_CHROMEOS) // TODO(djacobo): Find a better place for IntentPickerResponse. // This callback informs the launch name and type of the app selected by the // user, along with the reason why the Bubble was closed and whether the // decision should be persisted. When the reason is ERROR or DIALOG_DEACTIVATED, // the values of the launch name, app type, and persistence boolean are all // ignored. using IntentPickerResponse = base::OnceCallback<void(const std::string&, chromeos::AppType, chromeos::IntentPickerCloseReason, bool should_persist)>; #endif // OS_CHROMEOS void ShowFolderUploadConfirmationDialog( const base::FilePath& path, base::OnceCallback<void(const std::vector<ui::SelectedFileInfo>&)> callback, std::vector<ui::SelectedFileInfo> selected_files, content::WebContents* web_contents); #endif // CHROME_BROWSER_UI_BROWSER_DIALOGS_H_
4,060
925
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging import os import subprocess import sys import tempfile from telemetry.core import exceptions from telemetry.core import util from telemetry.internal.backends.mandoline import mandoline_browser_backend class DesktopMandolineBackend( mandoline_browser_backend.MandolineBrowserBackend): """The backend for controlling a locally-executed browser instance, on Linux or Windows. """ # It is OK to have abstract methods. pylint: disable=W0223 def __init__(self, desktop_platform_backend, browser_options, executable, browser_directory): super(DesktopMandolineBackend, self).__init__( desktop_platform_backend, browser_options=browser_options) # Initialize fields so that an explosion during init doesn't break in Close. self._proc = None self._tmp_output_file = None self._executable = executable if not self._executable: raise Exception('Cannot create browser, no executable found!') self._browser_directory = browser_directory def HasBrowserFinishedLaunching(self): # In addition to the functional check performed by the base class, quickly # check if the browser process is still alive. if not self.IsBrowserRunning(): raise exceptions.ProcessGoneException( "Return code: %d" % self._proc.returncode) return super(DesktopMandolineBackend, self).HasBrowserFinishedLaunching() def GetBrowserStartupArgs(self): args = super(DesktopMandolineBackend, self).GetBrowserStartupArgs() if self.browser_options.use_devtools_active_port: raise NotImplementedError() else: self._port = util.GetUnreservedAvailableLocalPort() logging.info('Requested remote debugging port: %d' % self._port) args.append('--remote-debugging-port=%i' % self._port) return args def Start(self): assert not self._proc, 'Must call Close() before Start()' args = [self._executable] args.extend(self.GetBrowserStartupArgs()) if self.browser_options.startup_url: # TODO(yzshen): For now "about:blank" is not supported yet. if self.browser_options.startup_url != "about:blank": args.append(self.browser_options.startup_url) env = os.environ.copy() logging.debug('Starting Mandoline %s', args) if self._tmp_output_file: # Close the previous temp output file, if it exists. Please note that # Close() doesn't do this because GetStandardOutput() needs to access this # file and it may be called after Close(). self._tmp_output_file.close() self._tmp_output_file = None if not self.browser_options.show_stdout: self._tmp_output_file = tempfile.NamedTemporaryFile() self._proc = subprocess.Popen( args, stdout=self._tmp_output_file, stderr=subprocess.STDOUT, env=env) else: self._proc = subprocess.Popen(args, env=env) try: self._WaitForBrowserToComeUp() self._InitDevtoolsClientBackend() except: self.Close() raise @property def pid(self): if self._proc: return self._proc.pid return None @property def browser_directory(self): return self._browser_directory @property def profile_directory(self): raise NotImplementedError() def IsBrowserRunning(self): return self._proc and self._proc.poll() == None def GetStandardOutput(self): if not self._tmp_output_file: if self.browser_options.show_stdout: # This can happen in the case that loading the mandoline binary fails. # We print rather than using logging here, because that makes a # recursive call to this function. print >> sys.stderr, "Can't get standard output with --show-stdout" return '' try: self._tmp_output_file.flush() self._tmp_output_file.seek(0) return self._tmp_output_file.read() except IOError: return '' def GetStackTrace(self): return ('Retrieving stack trace from the browser is not yet supported. ' 'Returning browser stdout:\n' + self.GetStandardOutput()) def __del__(self): self.Close() def _TryCooperativeShutdown(self): if self.browser.platform.IsCooperativeShutdownSupported(): if self.browser.platform.CooperativelyShutdown(self._proc, "mandoline"): try: util.WaitFor(lambda: not self.IsBrowserRunning(), timeout=5) logging.info('Successfully shut down browser cooperatively') except exceptions.TimeoutException as e: logging.warning('Failed to cooperatively shutdown. ' + 'Proceeding to terminate: ' + str(e)) def Close(self): super(DesktopMandolineBackend, self).Close() if self.IsBrowserRunning(): self._TryCooperativeShutdown() if self.IsBrowserRunning(): self._proc.kill() self._proc = None
1,739
5,813
<filename>core/src/main/java/org/apache/druid/java/util/common/parsers/JSONPathFieldSpec.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.java.util.common.parsers; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import java.util.Objects; public class JSONPathFieldSpec { private final JSONPathFieldType type; private final String name; private final String expr; @JsonCreator public JSONPathFieldSpec( @JsonProperty("type") JSONPathFieldType type, @JsonProperty("name") String name, @JsonProperty("expr") String expr ) { this.type = type; this.name = Preconditions.checkNotNull(name, "Missing 'name' in field spec"); // If expr is null and type is root, use the name as the expr too. if (expr == null && type == JSONPathFieldType.ROOT) { this.expr = name; } else { this.expr = Preconditions.checkNotNull(expr, "Missing 'expr' for field[%s]", name); } } @JsonProperty public JSONPathFieldType getType() { return type; } @JsonProperty public String getName() { return name; } @JsonProperty public String getExpr() { return expr; } @JsonCreator public static JSONPathFieldSpec fromString(String name) { return JSONPathFieldSpec.createRootField(name); } public static JSONPathFieldSpec createNestedField(String name, String expr) { return new JSONPathFieldSpec(JSONPathFieldType.PATH, name, expr); } public static JSONPathFieldSpec createJqField(String name, String expr) { return new JSONPathFieldSpec(JSONPathFieldType.JQ, name, expr); } public static JSONPathFieldSpec createRootField(String name) { return new JSONPathFieldSpec(JSONPathFieldType.ROOT, name, null); } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final JSONPathFieldSpec that = (JSONPathFieldSpec) o; return type == that.type && Objects.equals(name, that.name) && Objects.equals(expr, that.expr); } @Override public int hashCode() { return Objects.hash(type, name, expr); } @Override public String toString() { return "JSONPathFieldSpec{" + "type=" + type + ", name='" + name + '\'' + ", expr='" + expr + '\'' + '}'; } }
1,117